初始版本提交

This commit is contained in:
hzj
2025-01-17 14:58:27 +08:00
parent 92f98231f2
commit 04e3f48da8
123 changed files with 116 additions and 38 deletions

View File

@@ -0,0 +1,24 @@
package com.njcn.dataProcess;
import lombok.extern.slf4j.Slf4j;
import org.mybatis.spring.annotation.MapperScan;
import org.springframework.boot.SpringApplication;
import org.springframework.boot.autoconfigure.SpringBootApplication;
import org.springframework.cloud.openfeign.EnableFeignClients;
import org.springframework.context.annotation.DependsOn;
/**
* @author xy
*/
@Slf4j
@DependsOn("proxyMapperRegister")
@MapperScan("com.njcn.**.mapper")
@SpringBootApplication(scanBasePackages = "com.njcn")
public class DataProcessBootApplication {
public static void main(String[] args) {
SpringApplication.run(DataProcessBootApplication.class, args);
}
}

View File

@@ -0,0 +1,20 @@
package com.njcn.dataProcess.annotation;
import java.lang.annotation.ElementType;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
import java.lang.annotation.Target;
/**
*
* 动态注解类
*
* @author hongawen
* @version 1.0
* @data 2024/11/7 11:00
*/
@Target({ElementType.TYPE,ElementType.METHOD,ElementType.FIELD})
@Retention(RetentionPolicy.RUNTIME)
public @interface InsertBean {
}

View File

@@ -0,0 +1,20 @@
package com.njcn.dataProcess.annotation;
import java.lang.annotation.ElementType;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
import java.lang.annotation.Target;
/**
*
* 动态注解类
*
* @author hongawen
* @version 1.0
* @data 2024/11/7 11:00
*/
@Target({ElementType.TYPE,ElementType.METHOD,ElementType.FIELD})
@Retention(RetentionPolicy.RUNTIME)
public @interface QueryBean {
}

View File

@@ -0,0 +1,79 @@
package com.njcn.dataProcess.bean;
import com.njcn.common.pojo.exception.BusinessException;
import com.njcn.dataProcess.annotation.InsertBean;
import com.njcn.dataProcess.annotation.QueryBean;
import org.springframework.beans.BeansException;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.beans.factory.config.BeanPostProcessor;
import org.springframework.context.ApplicationContext;
import org.springframework.stereotype.Component;
import org.springframework.stereotype.Controller;
import org.springframework.stereotype.Service;
import org.springframework.web.bind.annotation.RestController;
import java.lang.reflect.Field;
/**
* @author hongawen
* @version 1.0
* @data 2024/11/7 11:31
*/
@Component
public class DynamicBeanProcessor implements BeanPostProcessor {
@Autowired
private ApplicationContext context;
/**
* 查询配置
*/
@Value("${data.source.query:Influxdb}")
private String queryParam;
/**
* 插入配置
*/
@Value("${data.source.insert:Relation}")
private String insertParam;
@Override
public Object postProcessBeforeInitialization(Object bean, String beanName) throws BeansException {
if (bean.getClass().isAnnotationPresent(Controller.class) || bean.getClass().isAnnotationPresent(RestController.class)||bean.getClass().isAnnotationPresent(Service.class)) {
processFields(bean);
}
return bean;
}
private void processFields(Object bean) {
Field[] fields = bean.getClass().getDeclaredFields();
for (Field field : fields) {
String beanId;
Class<?> type = field.getType();
// 判断是否是接口类型,并且是否是注解指定类库
if (type.isInterface() && (field.isAnnotationPresent(QueryBean.class) || field.isAnnotationPresent(InsertBean.class))) {
String name = type.getName();
beanId = name.substring(name.lastIndexOf(".") + 2);
if (field.isAnnotationPresent(QueryBean.class)) {
beanId = queryParam + beanId + "Impl";
} else if (field.isAnnotationPresent(InsertBean.class)) {
beanId = insertParam + beanId + "Impl";
}
try {
field.setAccessible(true);
field.set(bean, context.getBean(beanId));
} catch (IllegalAccessException e) {
throw new BusinessException("获取动态实现类失败");
}
}
}
}
@Override
public Object postProcessAfterInitialization(Object bean, String beanName) throws BeansException {
return bean;
}
}

View File

@@ -0,0 +1,62 @@
package com.njcn.dataProcess.controller;
import com.njcn.common.pojo.annotation.OperateInfo;
import com.njcn.common.pojo.constant.OperateType;
import com.njcn.common.pojo.enums.common.LogEnum;
import com.njcn.common.pojo.enums.response.CommonResponseEnum;
import com.njcn.common.pojo.response.HttpResult;
import com.njcn.common.utils.HttpResultUtil;
import com.njcn.dataProcess.annotation.InsertBean;
import com.njcn.dataProcess.annotation.QueryBean;
import com.njcn.dataProcess.dto.DataFlickerDTO;
import com.njcn.dataProcess.service.IDataFlicker;
import com.njcn.web.controller.BaseController;
import io.swagger.annotations.Api;
import io.swagger.annotations.ApiOperation;
import lombok.extern.slf4j.Slf4j;
import org.springframework.stereotype.Controller;
import org.springframework.validation.annotation.Validated;
import org.springframework.web.bind.annotation.PostMapping;
import org.springframework.web.bind.annotation.RequestBody;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RestController;
import java.util.List;
/**
* @author hongawen
* @version 1.0
* @data 2024/11/6 19:48
*/
@Validated
@Slf4j
@Controller
@RestController
@RequestMapping("/dataFlicker")
@Api(tags = "短时闪变数据")
public class DataFlickerController extends BaseController {
@QueryBean
private IDataFlicker dataFlickerQuery;
@InsertBean
private IDataFlicker dataFlickerInsert;
@OperateInfo(info = LogEnum.BUSINESS_COMMON,operateType = OperateType.ADD)
@PostMapping("/batchInsertion")
@ApiOperation("批量插入")
public HttpResult<String> batchInsertion(@RequestBody List<DataFlickerDTO> dataFlickerDTOList) {
String methodDescribe = getMethodDescribe("batchInsertion");
dataFlickerInsert.batchInsertion(dataFlickerDTOList);
return HttpResultUtil.assembleCommonResponseResult(CommonResponseEnum.SUCCESS, null, methodDescribe);
}
}

View File

@@ -0,0 +1,61 @@
package com.njcn.dataProcess.controller;
import com.njcn.common.pojo.annotation.OperateInfo;
import com.njcn.common.pojo.constant.OperateType;
import com.njcn.common.pojo.enums.common.LogEnum;
import com.njcn.common.pojo.enums.response.CommonResponseEnum;
import com.njcn.common.pojo.response.HttpResult;
import com.njcn.common.utils.HttpResultUtil;
import com.njcn.dataProcess.dto.DataFlucDTO;
import com.njcn.dataProcess.annotation.InsertBean;
import com.njcn.dataProcess.annotation.QueryBean;
import com.njcn.dataProcess.service.IDataFluc;
import com.njcn.web.controller.BaseController;
import io.swagger.annotations.Api;
import io.swagger.annotations.ApiOperation;
import lombok.extern.slf4j.Slf4j;
import org.springframework.stereotype.Controller;
import org.springframework.validation.annotation.Validated;
import org.springframework.web.bind.annotation.PostMapping;
import org.springframework.web.bind.annotation.RequestBody;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RestController;
import java.util.List;
/**
* @author hongawen
* @version 1.0
* @data 2024/11/6 19:48
*/
@Validated
@Slf4j
@Controller
@RestController
@RequestMapping("/dataFluc")
@Api(tags = "电压波动")
public class DataFlucController extends BaseController {
@QueryBean
private IDataFluc dataFlucQuery;
@InsertBean
private IDataFluc dataFlucInsert;
@OperateInfo(info = LogEnum.BUSINESS_COMMON,operateType = OperateType.ADD)
@PostMapping("/batchInsertion")
@ApiOperation("批量插入")
public HttpResult<String> batchInsertion(@RequestBody List<DataFlucDTO> dataFlucDTOList) {
String methodDescribe = getMethodDescribe("batchInsertion");
dataFlucInsert.batchInsertion(dataFlucDTOList);
return HttpResultUtil.assembleCommonResponseResult(CommonResponseEnum.SUCCESS, null, methodDescribe);
}
}

View File

@@ -0,0 +1,61 @@
package com.njcn.dataProcess.controller;
import com.njcn.common.pojo.annotation.OperateInfo;
import com.njcn.common.pojo.constant.OperateType;
import com.njcn.common.pojo.enums.common.LogEnum;
import com.njcn.common.pojo.enums.response.CommonResponseEnum;
import com.njcn.common.pojo.response.HttpResult;
import com.njcn.common.utils.HttpResultUtil;
import com.njcn.dataProcess.dto.DataHarmphasicIDTO;
import com.njcn.dataProcess.annotation.InsertBean;
import com.njcn.dataProcess.annotation.QueryBean;
import com.njcn.dataProcess.service.IDataHarmphasicI;
import com.njcn.web.controller.BaseController;
import io.swagger.annotations.Api;
import io.swagger.annotations.ApiOperation;
import lombok.extern.slf4j.Slf4j;
import org.springframework.stereotype.Controller;
import org.springframework.validation.annotation.Validated;
import org.springframework.web.bind.annotation.PostMapping;
import org.springframework.web.bind.annotation.RequestBody;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RestController;
import java.util.List;
/**
* @author hongawen
* @version 1.0
* @data 2024/11/6 19:48
*/
@Validated
@Slf4j
@Controller
@RestController
@RequestMapping("/dataHarmphasicI")
@Api(tags = "谐波电流角度数据")
public class DataHarmphasicIController extends BaseController {
@QueryBean
private IDataHarmphasicI dataHarmphasicIQuery;
@InsertBean
private IDataHarmphasicI dataHarmphasicIInsert;
@OperateInfo(info = LogEnum.BUSINESS_COMMON,operateType = OperateType.ADD)
@PostMapping("/batchInsertion")
@ApiOperation("批量插入")
public HttpResult<String> batchInsertion(@RequestBody List<DataHarmphasicIDTO> dataHarmphasicIDTOList) {
String methodDescribe = getMethodDescribe("batchInsertion");
dataHarmphasicIInsert.batchInsertion(dataHarmphasicIDTOList);
return HttpResultUtil.assembleCommonResponseResult(CommonResponseEnum.SUCCESS, null, methodDescribe);
}
}

View File

@@ -0,0 +1,61 @@
package com.njcn.dataProcess.controller;
import com.njcn.common.pojo.annotation.OperateInfo;
import com.njcn.common.pojo.constant.OperateType;
import com.njcn.common.pojo.enums.common.LogEnum;
import com.njcn.common.pojo.enums.response.CommonResponseEnum;
import com.njcn.common.pojo.response.HttpResult;
import com.njcn.common.utils.HttpResultUtil;
import com.njcn.dataProcess.dto.DataHarmphasicVDTO;
import com.njcn.dataProcess.annotation.InsertBean;
import com.njcn.dataProcess.annotation.QueryBean;
import com.njcn.dataProcess.service.IDataHarmphasicV;
import com.njcn.web.controller.BaseController;
import io.swagger.annotations.Api;
import io.swagger.annotations.ApiOperation;
import lombok.extern.slf4j.Slf4j;
import org.springframework.stereotype.Controller;
import org.springframework.validation.annotation.Validated;
import org.springframework.web.bind.annotation.PostMapping;
import org.springframework.web.bind.annotation.RequestBody;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RestController;
import java.util.List;
/**
* @author hongawen
* @version 1.0
* @data 2024/11/6 19:48
*/
@Validated
@Slf4j
@Controller
@RestController
@RequestMapping("/dataHarmphasicV")
@Api(tags = "谐波电压角度数据")
public class DataHarmphasicVController extends BaseController {
@QueryBean
private IDataHarmphasicV dataHarmphasicVQuery;
@InsertBean
private IDataHarmphasicV dataHarmphasicVInsert;
@OperateInfo(info = LogEnum.BUSINESS_COMMON,operateType = OperateType.ADD)
@PostMapping("/batchInsertion")
@ApiOperation("批量插入")
public HttpResult<String> batchInsertion(@RequestBody List<DataHarmphasicVDTO> dataHarmphasicVDTOList) {
String methodDescribe = getMethodDescribe("batchInsertion");
dataHarmphasicVInsert.batchInsertion(dataHarmphasicVDTOList);
return HttpResultUtil.assembleCommonResponseResult(CommonResponseEnum.SUCCESS, null, methodDescribe);
}
}

View File

@@ -0,0 +1,61 @@
package com.njcn.dataProcess.controller;
import com.njcn.common.pojo.annotation.OperateInfo;
import com.njcn.common.pojo.constant.OperateType;
import com.njcn.common.pojo.enums.common.LogEnum;
import com.njcn.common.pojo.enums.response.CommonResponseEnum;
import com.njcn.common.pojo.response.HttpResult;
import com.njcn.common.utils.HttpResultUtil;
import com.njcn.dataProcess.dto.DataHarmpowerPDTO;
import com.njcn.dataProcess.annotation.InsertBean;
import com.njcn.dataProcess.annotation.QueryBean;
import com.njcn.dataProcess.service.IDataHarmpowerP;
import com.njcn.web.controller.BaseController;
import io.swagger.annotations.Api;
import io.swagger.annotations.ApiOperation;
import lombok.extern.slf4j.Slf4j;
import org.springframework.stereotype.Controller;
import org.springframework.validation.annotation.Validated;
import org.springframework.web.bind.annotation.PostMapping;
import org.springframework.web.bind.annotation.RequestBody;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RestController;
import java.util.List;
/**
* @author hongawen
* @version 1.0
* @data 2024/11/6 19:48
*/
@Validated
@Slf4j
@Controller
@RestController
@RequestMapping("/dataHarmpowerP")
@Api(tags = "有功功率数据")
public class DataHarmpowerPController extends BaseController {
@QueryBean
private IDataHarmpowerP dataHarmpowerPQuery;
@InsertBean
private IDataHarmpowerP dataHarmpowerPInsert;
@OperateInfo(info = LogEnum.BUSINESS_COMMON,operateType = OperateType.ADD)
@PostMapping("/batchInsertion")
@ApiOperation("批量插入")
public HttpResult<String> batchInsertion(@RequestBody List<DataHarmpowerPDTO> dataHarmpowerPDTOList) {
String methodDescribe = getMethodDescribe("batchInsertion");
dataHarmpowerPInsert.batchInsertion(dataHarmpowerPDTOList);
return HttpResultUtil.assembleCommonResponseResult(CommonResponseEnum.SUCCESS, null, methodDescribe);
}
}

View File

@@ -0,0 +1,61 @@
package com.njcn.dataProcess.controller;
import com.njcn.common.pojo.annotation.OperateInfo;
import com.njcn.common.pojo.constant.OperateType;
import com.njcn.common.pojo.enums.common.LogEnum;
import com.njcn.common.pojo.enums.response.CommonResponseEnum;
import com.njcn.common.pojo.response.HttpResult;
import com.njcn.common.utils.HttpResultUtil;
import com.njcn.dataProcess.dto.DataHarmpowerQDTO;
import com.njcn.dataProcess.annotation.InsertBean;
import com.njcn.dataProcess.annotation.QueryBean;
import com.njcn.dataProcess.service.IDataHarmpowerQ;
import com.njcn.web.controller.BaseController;
import io.swagger.annotations.Api;
import io.swagger.annotations.ApiOperation;
import lombok.extern.slf4j.Slf4j;
import org.springframework.stereotype.Controller;
import org.springframework.validation.annotation.Validated;
import org.springframework.web.bind.annotation.PostMapping;
import org.springframework.web.bind.annotation.RequestBody;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RestController;
import java.util.List;
/**
* @author hongawen
* @version 1.0
* @data 2024/11/6 19:48
*/
@Validated
@Slf4j
@Controller
@RestController
@RequestMapping("/dataHarmpowerQ")
@Api(tags = "无功功率数据")
public class DataHarmpowerQController extends BaseController {
@QueryBean
private IDataHarmpowerQ dataHarmpowerQQuery;
@InsertBean
private IDataHarmpowerQ dataHarmpowerQInsert;
@OperateInfo(info = LogEnum.BUSINESS_COMMON,operateType = OperateType.ADD)
@PostMapping("/batchInsertion")
@ApiOperation("批量插入")
public HttpResult<String> batchInsertion(@RequestBody List<DataHarmpowerQDTO> dataHarmpowerQDTOList) {
String methodDescribe = getMethodDescribe("batchInsertion");
dataHarmpowerQInsert.batchInsertion(dataHarmpowerQDTOList);
return HttpResultUtil.assembleCommonResponseResult(CommonResponseEnum.SUCCESS, null, methodDescribe);
}
}

View File

@@ -0,0 +1,60 @@
package com.njcn.dataProcess.controller;
import com.njcn.common.pojo.annotation.OperateInfo;
import com.njcn.common.pojo.constant.OperateType;
import com.njcn.common.pojo.enums.common.LogEnum;
import com.njcn.common.pojo.enums.response.CommonResponseEnum;
import com.njcn.common.pojo.response.HttpResult;
import com.njcn.common.utils.HttpResultUtil;
import com.njcn.dataProcess.dto.DataHarmpowerSDTO;
import com.njcn.dataProcess.annotation.InsertBean;
import com.njcn.dataProcess.annotation.QueryBean;
import com.njcn.dataProcess.service.IDataHarmpowerS;
import com.njcn.web.controller.BaseController;
import io.swagger.annotations.Api;
import io.swagger.annotations.ApiOperation;
import lombok.extern.slf4j.Slf4j;
import org.springframework.stereotype.Controller;
import org.springframework.validation.annotation.Validated;
import org.springframework.web.bind.annotation.PostMapping;
import org.springframework.web.bind.annotation.RequestBody;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RestController;
import java.util.List;
/**
* @author hongawen
* @version 1.0
* @data 2024/11/6 19:48
*/
@Validated
@Slf4j
@Controller
@RestController
@RequestMapping("/dataHarmpowerS")
@Api(tags = "视在功率数据")
public class DataHarmpowerSController extends BaseController {
@QueryBean
private IDataHarmpowerS dataHarmpowerSQuery;
@InsertBean
private IDataHarmpowerS dataHarmpowerSInsert;
@OperateInfo(info = LogEnum.BUSINESS_COMMON,operateType = OperateType.ADD)
@PostMapping("/batchInsertion")
@ApiOperation("批量插入")
public HttpResult<String> batchInsertion(@RequestBody List<DataHarmpowerSDTO> dataHarmpowerSDTOList) {
String methodDescribe = getMethodDescribe("batchInsertion");
dataHarmpowerSInsert.batchInsertion(dataHarmpowerSDTOList);
return HttpResultUtil.assembleCommonResponseResult(CommonResponseEnum.SUCCESS, null, methodDescribe);
}
}

View File

@@ -0,0 +1,61 @@
package com.njcn.dataProcess.controller;
import com.njcn.common.pojo.annotation.OperateInfo;
import com.njcn.common.pojo.constant.OperateType;
import com.njcn.common.pojo.enums.common.LogEnum;
import com.njcn.common.pojo.enums.response.CommonResponseEnum;
import com.njcn.common.pojo.response.HttpResult;
import com.njcn.common.utils.HttpResultUtil;
import com.njcn.dataProcess.dto.DataIDTO;
import com.njcn.dataProcess.annotation.InsertBean;
import com.njcn.dataProcess.annotation.QueryBean;
import com.njcn.dataProcess.service.IDataI;
import com.njcn.web.controller.BaseController;
import io.swagger.annotations.Api;
import io.swagger.annotations.ApiOperation;
import lombok.extern.slf4j.Slf4j;
import org.springframework.stereotype.Controller;
import org.springframework.validation.annotation.Validated;
import org.springframework.web.bind.annotation.PostMapping;
import org.springframework.web.bind.annotation.RequestBody;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RestController;
import java.util.List;
/**
* @author hongawen
* @version 1.0
* @data 2024/11/6 19:48
*/
@Validated
@Slf4j
@Controller
@RestController
@RequestMapping("/dataI")
@Api(tags = "电流数据")
public class DataIController extends BaseController {
@QueryBean
private IDataI dataIQuery;
@InsertBean
private IDataI dataIInsert;
@OperateInfo(info = LogEnum.BUSINESS_COMMON,operateType = OperateType.ADD)
@PostMapping("/batchInsertion")
@ApiOperation("批量插入")
public HttpResult<String> batchInsertion(@RequestBody List<DataIDTO> dataIDTOList) {
String methodDescribe = getMethodDescribe("batchInsertion");
dataIInsert.batchInsertion(dataIDTOList);
return HttpResultUtil.assembleCommonResponseResult(CommonResponseEnum.SUCCESS, null, methodDescribe);
}
}

View File

@@ -0,0 +1,61 @@
package com.njcn.dataProcess.controller;
import com.njcn.common.pojo.annotation.OperateInfo;
import com.njcn.common.pojo.constant.OperateType;
import com.njcn.common.pojo.enums.common.LogEnum;
import com.njcn.common.pojo.enums.response.CommonResponseEnum;
import com.njcn.common.pojo.response.HttpResult;
import com.njcn.common.utils.HttpResultUtil;
import com.njcn.dataProcess.dto.DataInharmIDTO;
import com.njcn.dataProcess.annotation.InsertBean;
import com.njcn.dataProcess.annotation.QueryBean;
import com.njcn.dataProcess.service.IDataInharmI;
import com.njcn.web.controller.BaseController;
import io.swagger.annotations.Api;
import io.swagger.annotations.ApiOperation;
import lombok.extern.slf4j.Slf4j;
import org.springframework.stereotype.Controller;
import org.springframework.validation.annotation.Validated;
import org.springframework.web.bind.annotation.PostMapping;
import org.springframework.web.bind.annotation.RequestBody;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RestController;
import java.util.List;
/**
* @author hongawen
* @version 1.0
* @data 2024/11/6 19:48
*/
@Validated
@Slf4j
@Controller
@RestController
@RequestMapping("/dataInharmI")
@Api(tags = "间谐波电电流数据")
public class DataInharmIController extends BaseController {
@QueryBean
private IDataInharmI dataInharmIQuery;
@InsertBean
private IDataInharmI dataInharmIInsert;
@OperateInfo(info = LogEnum.BUSINESS_COMMON,operateType = OperateType.ADD)
@PostMapping("/batchInsertion")
@ApiOperation("批量插入")
public HttpResult<String> batchInsertion(@RequestBody List<DataInharmIDTO> dataInharmIDTOList) {
String methodDescribe = getMethodDescribe("batchInsertion");
dataInharmIInsert.batchInsertion(dataInharmIDTOList);
return HttpResultUtil.assembleCommonResponseResult(CommonResponseEnum.SUCCESS, null, methodDescribe);
}
}

View File

@@ -0,0 +1,61 @@
package com.njcn.dataProcess.controller;
import com.njcn.common.pojo.annotation.OperateInfo;
import com.njcn.common.pojo.constant.OperateType;
import com.njcn.common.pojo.enums.common.LogEnum;
import com.njcn.common.pojo.enums.response.CommonResponseEnum;
import com.njcn.common.pojo.response.HttpResult;
import com.njcn.common.utils.HttpResultUtil;
import com.njcn.dataProcess.dto.DataInharmVDTO;
import com.njcn.dataProcess.annotation.InsertBean;
import com.njcn.dataProcess.annotation.QueryBean;
import com.njcn.dataProcess.service.IDataInharmV;
import com.njcn.web.controller.BaseController;
import io.swagger.annotations.Api;
import io.swagger.annotations.ApiOperation;
import lombok.extern.slf4j.Slf4j;
import org.springframework.stereotype.Controller;
import org.springframework.validation.annotation.Validated;
import org.springframework.web.bind.annotation.PostMapping;
import org.springframework.web.bind.annotation.RequestBody;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RestController;
import java.util.List;
/**
* @author hongawen
* @version 1.0
* @data 2024/11/6 19:48
*/
@Validated
@Slf4j
@Controller
@RestController
@RequestMapping("/dataInharmV")
@Api(tags = "间谐波电压数据")
public class DataInharmVController extends BaseController {
@QueryBean
private IDataInharmV dataInharmVQuery;
@InsertBean
private IDataInharmV dataInharmVInsert;
@OperateInfo(info = LogEnum.BUSINESS_COMMON,operateType = OperateType.ADD)
@PostMapping("/batchInsertion")
@ApiOperation("批量插入")
public HttpResult<String> batchInsertion(@RequestBody List<DataInharmVDTO> dataInharmVDTOList) {
String methodDescribe = getMethodDescribe("batchInsertion");
dataInharmVInsert.batchInsertion(dataInharmVDTOList);
return HttpResultUtil.assembleCommonResponseResult(CommonResponseEnum.SUCCESS, null, methodDescribe);
}
}

View File

@@ -0,0 +1,61 @@
package com.njcn.dataProcess.controller;
import com.njcn.common.pojo.annotation.OperateInfo;
import com.njcn.common.pojo.constant.OperateType;
import com.njcn.common.pojo.enums.common.LogEnum;
import com.njcn.common.pojo.enums.response.CommonResponseEnum;
import com.njcn.common.pojo.response.HttpResult;
import com.njcn.common.utils.HttpResultUtil;
import com.njcn.dataProcess.dto.DataPltDTO;
import com.njcn.dataProcess.annotation.InsertBean;
import com.njcn.dataProcess.annotation.QueryBean;
import com.njcn.dataProcess.service.IDataPlt;
import com.njcn.web.controller.BaseController;
import io.swagger.annotations.Api;
import io.swagger.annotations.ApiOperation;
import lombok.extern.slf4j.Slf4j;
import org.springframework.stereotype.Controller;
import org.springframework.validation.annotation.Validated;
import org.springframework.web.bind.annotation.PostMapping;
import org.springframework.web.bind.annotation.RequestBody;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RestController;
import java.util.List;
/**
* @author hongawen
* @version 1.0
* @data 2024/11/6 19:48
*/
@Validated
@Slf4j
@Controller
@RestController
@RequestMapping("/dataPlt")
@Api(tags = "长时闪变数据")
public class DataPltController extends BaseController {
@QueryBean
private IDataPlt dataPltQuery;
@InsertBean
private IDataPlt dataPltInsert;
@OperateInfo(info = LogEnum.BUSINESS_COMMON,operateType = OperateType.ADD)
@PostMapping("/batchInsertion")
@ApiOperation("批量插入")
public HttpResult<String> batchInsertion(@RequestBody List<DataPltDTO> dataPltDTOList) {
String methodDescribe = getMethodDescribe("batchInsertion");
dataPltInsert.batchInsertion(dataPltDTOList);
return HttpResultUtil.assembleCommonResponseResult(CommonResponseEnum.SUCCESS, null, methodDescribe);
}
}

View File

@@ -0,0 +1,81 @@
package com.njcn.dataProcess.controller;
import com.njcn.common.pojo.annotation.OperateInfo;
import com.njcn.common.pojo.constant.OperateType;
import com.njcn.common.pojo.enums.common.LogEnum;
import com.njcn.common.pojo.enums.response.CommonResponseEnum;
import com.njcn.common.pojo.response.HttpResult;
import com.njcn.common.utils.HttpResultUtil;
import com.njcn.dataProcess.dto.DataVDTO;
import com.njcn.dataProcess.dto.DataVFiveItemDTO;
import com.njcn.dataProcess.annotation.InsertBean;
import com.njcn.dataProcess.annotation.QueryBean;
import com.njcn.dataProcess.param.LineCountEvaluateParam;
import com.njcn.dataProcess.service.IDataV;
import com.njcn.web.controller.BaseController;
import io.swagger.annotations.Api;
import io.swagger.annotations.ApiImplicitParam;
import io.swagger.annotations.ApiOperation;
import lombok.extern.slf4j.Slf4j;
import org.springframework.stereotype.Controller;
import org.springframework.validation.annotation.Validated;
import org.springframework.web.bind.annotation.*;
import java.time.LocalDateTime;
import java.util.List;
import java.util.Map;
/**
* @author hongawen
* @version 1.0
* @data 2024/11/6 19:48
*/
@Validated
@Slf4j
@Controller
@RestController
@RequestMapping("/dataV")
@Api(tags = "电压类数据获取")
public class DataVController extends BaseController {
@QueryBean
private IDataV dataVQuery;
@InsertBean
private IDataV dataVInsert;
@OperateInfo(info = LogEnum.BUSINESS_COMMON,operateType = OperateType.ADD)
@PostMapping("/getLineCountEvaluate")
@ApiOperation("获取监测点时间范围内统计数据")
@ApiImplicitParam(name = "lineParam", value = "算法通用查询参数", required = true)
public HttpResult<Map<String, List<DataVFiveItemDTO>>> getLineCountEvaluate(@RequestBody LineCountEvaluateParam lineParam) {
String methodDescribe = getMethodDescribe("getLineCountEvaluate");
Map<String, List<DataVFiveItemDTO>> result = dataVQuery.getLineCountEvaluate(lineParam);
return HttpResultUtil.assembleCommonResponseResult(CommonResponseEnum.SUCCESS, null, methodDescribe);
}
@OperateInfo(info = LogEnum.BUSINESS_COMMON,operateType = OperateType.ADD)
@PostMapping("/batchInsertion")
@ApiOperation("批量插入")
public HttpResult<String> batchInsertion(@RequestBody List<DataVDTO> dataVDTOList) {
String methodDescribe = getMethodDescribe("batchInsertion");
dataVInsert.batchInsertion(dataVDTOList);
return HttpResultUtil.assembleCommonResponseResult(CommonResponseEnum.SUCCESS, null, methodDescribe);
}
@OperateInfo(info = LogEnum.BUSINESS_COMMON,operateType = OperateType.ADD)
@PostMapping("/monitoringTime")
@ApiOperation("获取监测点数据时间点(补招使用)")
public HttpResult<List<LocalDateTime>> monitoringTime(@RequestParam("lineId") String lineId,@RequestParam("localData") String localData) {
String methodDescribe = getMethodDescribe("monitoringTime");
List<LocalDateTime> localDateTimeList = dataVQuery.monitoringTime(lineId,localData);
return HttpResultUtil.assembleCommonResponseResult(CommonResponseEnum.SUCCESS, localDateTimeList, methodDescribe);
}
}

View File

@@ -0,0 +1,50 @@
package com.njcn.dataProcess.controller;
import com.njcn.common.pojo.annotation.OperateInfo;
import com.njcn.common.pojo.constant.OperateType;
import com.njcn.common.pojo.enums.common.LogEnum;
import com.njcn.common.pojo.enums.response.CommonResponseEnum;
import com.njcn.common.pojo.response.HttpResult;
import com.njcn.common.utils.HttpResultUtil;
import com.njcn.dataProcess.dto.LnDataDTO;
import com.njcn.dataProcess.service.LnDataDealService;
import com.njcn.web.controller.BaseController;
import io.swagger.annotations.Api;
import io.swagger.annotations.ApiOperation;
import lombok.extern.slf4j.Slf4j;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Controller;
import org.springframework.validation.annotation.Validated;
import org.springframework.web.bind.annotation.PostMapping;
import org.springframework.web.bind.annotation.RequestBody;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RestController;
/**
* Description:
* Date: 2024/12/11 18:22【需求编号】
*
* @author clam
* @version V1.0.0
*/
@Validated
@Slf4j
@Controller
@RestController
@RequestMapping("/lndata")
@Api(tags = "辽宁前置上送数据")
public class LnDataDealController extends BaseController {
@Autowired
private LnDataDealService lnDataDealService;
@OperateInfo(info = LogEnum.BUSINESS_COMMON,operateType = OperateType.ADD)
@PostMapping("/batchInsertion")
@ApiOperation("批量插入")
public HttpResult<String> batchInsertion(@RequestBody LnDataDTO lnDataDTO) {
String methodDescribe = getMethodDescribe("batchInsertion");
lnDataDealService.batchInsertion(lnDataDTO);
return HttpResultUtil.assembleCommonResponseResult(CommonResponseEnum.SUCCESS, null, methodDescribe);
}
}

View File

@@ -0,0 +1,59 @@
package com.njcn.dataProcess.controller;
import com.njcn.common.pojo.annotation.OperateInfo;
import com.njcn.common.pojo.constant.OperateType;
import com.njcn.common.pojo.enums.common.LogEnum;
import com.njcn.common.pojo.enums.response.CommonResponseEnum;
import com.njcn.common.pojo.response.HttpResult;
import com.njcn.common.utils.HttpResultUtil;
import com.njcn.dataProcess.dto.RmpEventDetailDTO;
import com.njcn.dataProcess.annotation.InsertBean;
import com.njcn.dataProcess.annotation.QueryBean;
import com.njcn.dataProcess.service.IRmpEventDetail;
import com.njcn.web.controller.BaseController;
import io.swagger.annotations.Api;
import io.swagger.annotations.ApiOperation;
import lombok.extern.slf4j.Slf4j;
import org.springframework.stereotype.Controller;
import org.springframework.validation.annotation.Validated;
import org.springframework.web.bind.annotation.PostMapping;
import org.springframework.web.bind.annotation.RequestBody;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RestController;
/**
* @author hongawen
* @version 1.0
* @data 2024/11/6 19:48
*/
@Validated
@Slf4j
@Controller
@RestController
@RequestMapping("/rmpEventDetail")
@Api(tags = "暂态事件")
public class RmpEventDetailController extends BaseController {
@QueryBean
private IRmpEventDetail iRmpEventDetailQuery;
@InsertBean
private IRmpEventDetail iRmpEventDetailInsert;
@OperateInfo(info = LogEnum.BUSINESS_COMMON,operateType = OperateType.ADD)
@PostMapping("/batchInsertion")
@ApiOperation("批量插入")
public HttpResult<String> batchInsertion(@RequestBody RmpEventDetailDTO rmpEventDetailDTO) {
String methodDescribe = getMethodDescribe("batchInsertion");
iRmpEventDetailInsert.batchInsertion(rmpEventDetailDTO);
return HttpResultUtil.assembleCommonResponseResult(CommonResponseEnum.SUCCESS, null, methodDescribe);
}
}

View File

@@ -0,0 +1,17 @@
package com.njcn.dataProcess.dao.imapper;
import com.njcn.dataProcess.po.influx.DataFlicker;
import com.njcn.influx.base.InfluxDbBaseMapper;
/**
* <p>
* Mapper 接口
* </p>
*
* @author hongawen
* @since 2023-12-28
*/
public interface DataFlickerMapper extends InfluxDbBaseMapper<DataFlicker> {
}

View File

@@ -0,0 +1,17 @@
package com.njcn.dataProcess.dao.imapper;
import com.njcn.dataProcess.po.influx.DataFluc;
import com.njcn.influx.base.InfluxDbBaseMapper;
/**
* <p>
* Mapper 接口
* </p>
*
* @author hongawen
* @since 2023-12-28
*/
public interface DataFlucMapper extends InfluxDbBaseMapper<DataFluc> {
}

View File

@@ -0,0 +1,17 @@
package com.njcn.dataProcess.dao.imapper;
import com.njcn.dataProcess.po.influx.DataHarmphasicI;
import com.njcn.influx.base.InfluxDbBaseMapper;
/**
* <p>
* Mapper 接口
* </p>
*
* @author hongawen
* @since 2023-12-28
*/
public interface DataHarmphasicIMapper extends InfluxDbBaseMapper<DataHarmphasicI> {
}

View File

@@ -0,0 +1,17 @@
package com.njcn.dataProcess.dao.imapper;
import com.njcn.dataProcess.po.influx.DataHarmphasicV;
import com.njcn.influx.base.InfluxDbBaseMapper;
/**
* <p>
* Mapper 接口
* </p>
*
* @author hongawen
* @since 2023-12-28
*/
public interface DataHarmphasicVMapper extends InfluxDbBaseMapper<DataHarmphasicV> {
}

View File

@@ -0,0 +1,17 @@
package com.njcn.dataProcess.dao.imapper;
import com.njcn.dataProcess.po.influx.DataHarmpowerP;
import com.njcn.influx.base.InfluxDbBaseMapper;
/**
* <p>
* Mapper 接口
* </p>
*
* @author hongawen
* @since 2023-12-28
*/
public interface DataHarmpowerPMapper extends InfluxDbBaseMapper<DataHarmpowerP> {
}

View File

@@ -0,0 +1,17 @@
package com.njcn.dataProcess.dao.imapper;
import com.njcn.dataProcess.po.influx.DataHarmpowerQ;
import com.njcn.influx.base.InfluxDbBaseMapper;
/**
* <p>
* Mapper 接口
* </p>
*
* @author hongawen
* @since 2023-12-28
*/
public interface DataHarmpowerQMapper extends InfluxDbBaseMapper<DataHarmpowerQ> {
}

View File

@@ -0,0 +1,17 @@
package com.njcn.dataProcess.dao.imapper;
import com.njcn.dataProcess.po.influx.DataHarmpowerS;
import com.njcn.influx.base.InfluxDbBaseMapper;
/**
* <p>
* Mapper 接口
* </p>
*
* @author hongawen
* @since 2023-12-28
*/
public interface DataHarmpowerSMapper extends InfluxDbBaseMapper<DataHarmpowerS> {
}

View File

@@ -0,0 +1,16 @@
package com.njcn.dataProcess.dao.imapper;
import com.njcn.dataProcess.po.influx.DataI;
import com.njcn.influx.base.InfluxDbBaseMapper;
/**
* @author hongawen
* @version 1.0
* @data 2024/11/7 18:49
*/
public interface DataIMapper extends InfluxDbBaseMapper<DataI> {
}

View File

@@ -0,0 +1,17 @@
package com.njcn.dataProcess.dao.imapper;
import com.njcn.dataProcess.po.influx.DataInharmI;
import com.njcn.influx.base.InfluxDbBaseMapper;
/**
* <p>
* Mapper 接口
* </p>
*
* @author hongawen
* @since 2023-12-28
*/
public interface DataInharmIMapper extends InfluxDbBaseMapper<DataInharmI> {
}

View File

@@ -0,0 +1,17 @@
package com.njcn.dataProcess.dao.imapper;
import com.njcn.dataProcess.po.influx.DataInharmV;
import com.njcn.influx.base.InfluxDbBaseMapper;
/**
* <p>
* Mapper 接口
* </p>
*
* @author hongawen
* @since 2023-12-28
*/
public interface DataInharmVMapper extends InfluxDbBaseMapper<DataInharmV> {
}

View File

@@ -0,0 +1,17 @@
package com.njcn.dataProcess.dao.imapper;
import com.njcn.dataProcess.po.influx.DataPlt;
import com.njcn.influx.base.InfluxDbBaseMapper;
/**
* <p>
* Mapper 接口
* </p>
*
* @author hongawen
* @since 2023-12-28
*/
public interface DataPltMapper extends InfluxDbBaseMapper<DataPlt> {
}

View File

@@ -0,0 +1,20 @@
package com.njcn.dataProcess.dao.imapper;
import com.njcn.dataProcess.dto.LineDataVFiveItemDTO;
import com.njcn.dataProcess.po.influx.DataV;
import com.njcn.influx.base.InfluxDbBaseMapper;
import com.njcn.influx.query.InfluxQueryWrapper;
import java.util.List;
/**
* @author hongawen
* @version 1.0
* @data 2024/11/7 18:49
*/
public interface DataVMapper extends InfluxDbBaseMapper<DataV> {
List<LineDataVFiveItemDTO> queryDataValue(InfluxQueryWrapper dataVQueryWrapper);
}

View File

@@ -0,0 +1,21 @@
package com.njcn.dataProcess.dao.relation.mapper;
import com.njcn.dataProcess.po.relation.DataFlicker;
import com.njcn.db.mapper.BatchBaseMapper;
import org.apache.ibatis.annotations.Param;
import java.util.List;
/**
* <p>
* Mapper 接口
* </p>
*
* @author hongawen
* @since 2023-12-28
*/
public interface DataFlickerRelationMapper extends BatchBaseMapper<DataFlicker> {
void test(@Param("list") List<DataFlicker> collect);
}

View File

@@ -0,0 +1,42 @@
<?xml version="1.0" encoding="UTF-8" ?>
<!DOCTYPE mapper PUBLIC "-//mybatis.org//DTD Mapper 3.0//EN" "http://mybatis.org/dtd/mybatis-3-mapper.dtd" >
<mapper namespace="com.njcn.dataProcess.dao.relation.mapper.DataFlickerRelationMapper">
<insert id="test" parameterType="com.njcn.dataProcess.po.relation.DataFlicker">
MERGE INTO DATA_FLICKER AS target
USING
(
<foreach collection="list" item="item" separator="union all">
SELECT #{item.timeid} as TIMEID,
#{item.lineid} as LINEID,
#{item.phasicType} as PHASIC_TYPE,
#{item.fluc} as FLUC,
#{item.pst} as PST,
#{item.plt} as PLT,
#{item.qualityflag} as QUALITYFLAG
FROM dual
</foreach>
) AS source
ON (target.TIMEID = source.TIMEID and target.LINEID = source.LINEID and target.PHASIC_TYPE = source.PHASIC_TYPE)
WHEN matched THEN
UPDATE SET
target.FLUC = source.FLUC,
target.PST = source.PST,
target.PLT = source.PLT,
target.QUALITYFLAG = source.QUALITYFLAG
WHEN NOT matched THEN
INSERT
(TIMEID,LINEID,PHASIC_TYPE,FLUC,PST,PLT,QUALITYFLAG)
VALUES
(source.TIMEID,
source.LINEID,
source.PHASIC_TYPE,
source.FLUC,
source.PST,
source.PLT,
source.QUALITYFLAG)
</insert>
</mapper>

View File

@@ -0,0 +1,17 @@
package com.njcn.dataProcess.dao.relation.mapper;
import com.njcn.dataProcess.po.relation.DataFluc;
import com.njcn.db.mapper.BatchBaseMapper;
/**
* <p>
* Mapper 接口
* </p>
*
* @author hongawen
* @since 2023-12-28
*/
public interface DataFlucRelationMapper extends BatchBaseMapper<DataFluc> {
}

View File

@@ -0,0 +1,20 @@
package com.njcn.dataProcess.dao.relation.mapper;
import com.njcn.dataProcess.po.relation.DataHarmphasicI;
import com.njcn.db.mapper.BatchBaseMapper;
import org.apache.ibatis.annotations.Param;
import java.util.List;
/**
* <p>
* Mapper 接口
* </p>
*
* @author hongawen
* @since 2023-12-28
*/
public interface DataHarmphasicIRelationMapper extends BatchBaseMapper<DataHarmphasicI> {
void test(@Param("list") List<DataHarmphasicI> collect);
}

View File

@@ -0,0 +1,17 @@
package com.njcn.dataProcess.dao.relation.mapper;
import com.njcn.dataProcess.po.relation.DataHarmphasicV;
import com.njcn.db.mapper.BatchBaseMapper;
/**
* <p>
* Mapper 接口
* </p>
*
* @author hongawen
* @since 2023-12-28
*/
public interface DataHarmphasicVRelationMapper extends BatchBaseMapper<DataHarmphasicV> {
}

View File

@@ -0,0 +1,17 @@
package com.njcn.dataProcess.dao.relation.mapper;
import com.njcn.dataProcess.po.relation.DataHarmpowerP;
import com.njcn.db.mapper.BatchBaseMapper;
/**
* <p>
* Mapper 接口
* </p>
*
* @author hongawen
* @since 2023-12-28
*/
public interface DataHarmpowerPRelationMapper extends BatchBaseMapper<DataHarmpowerP> {
}

View File

@@ -0,0 +1,17 @@
package com.njcn.dataProcess.dao.relation.mapper;
import com.njcn.dataProcess.po.relation.DataHarmpowerQ;
import com.njcn.db.mapper.BatchBaseMapper;
/**
* <p>
* Mapper 接口
* </p>
*
* @author hongawen
* @since 2023-12-28
*/
public interface DataHarmpowerQRelationMapper extends BatchBaseMapper<DataHarmpowerQ> {
}

View File

@@ -0,0 +1,17 @@
package com.njcn.dataProcess.dao.relation.mapper;
import com.njcn.dataProcess.po.relation.DataHarmpowerS;
import com.njcn.db.mapper.BatchBaseMapper;
/**
* <p>
* Mapper 接口
* </p>
*
* @author hongawen
* @since 2023-12-28
*/
public interface DataHarmpowerSRelationMapper extends BatchBaseMapper<DataHarmpowerS> {
}

View File

@@ -0,0 +1,17 @@
package com.njcn.dataProcess.dao.relation.mapper;
import com.njcn.dataProcess.po.relation.DataHarmrateI;
import com.njcn.db.mapper.BatchBaseMapper;
/**
* <p>
* Mapper 接口
* </p>
*
* @author hongawen
* @since 2023-12-28
*/
public interface DataHarmrateIRelationMapper extends BatchBaseMapper<DataHarmrateI> {
}

View File

@@ -0,0 +1,17 @@
package com.njcn.dataProcess.dao.relation.mapper;
import com.njcn.dataProcess.po.relation.DataHarmrateV;
import com.njcn.db.mapper.BatchBaseMapper;
/**
* <p>
* Mapper 接口
* </p>
*
* @author hongawen
* @since 2023-12-28
*/
public interface DataHarmrateVRelationMapper extends BatchBaseMapper<DataHarmrateV> {
}

View File

@@ -0,0 +1,17 @@
package com.njcn.dataProcess.dao.relation.mapper;
import com.njcn.dataProcess.po.relation.DataI;
import com.njcn.db.mapper.BatchBaseMapper;
/**
* <p>
* Mapper 接口
* </p>
*
* @author hongawen
* @since 2023-12-28
*/
public interface DataIRelationMapper extends BatchBaseMapper<DataI> {
}

View File

@@ -0,0 +1,17 @@
package com.njcn.dataProcess.dao.relation.mapper;
import com.njcn.dataProcess.po.relation.DataInharmI;
import com.njcn.db.mapper.BatchBaseMapper;
/**
* <p>
* Mapper 接口
* </p>
*
* @author hongawen
* @since 2023-12-28
*/
public interface DataInharmIRelationMapper extends BatchBaseMapper<DataInharmI> {
}

View File

@@ -0,0 +1,17 @@
package com.njcn.dataProcess.dao.relation.mapper;
import com.njcn.dataProcess.po.relation.DataPlt;
import com.njcn.db.mapper.BatchBaseMapper;
/**
* <p>
* Mapper 接口
* </p>
*
* @author hongawen
* @since 2023-12-28
*/
public interface DataPltRelationMapper extends BatchBaseMapper<DataPlt> {
}

View File

@@ -0,0 +1,15 @@
package com.njcn.dataProcess.dao.relation.mapper;
import com.njcn.dataProcess.po.relation.DataInharmV;
import com.njcn.db.mapper.BatchBaseMapper;
/**
* @author hongawen
* @version 1.0
* @data 2024/11/7 18:49
*/
public interface DataVInharmVRelationMapper extends BatchBaseMapper<DataInharmV> {
}

View File

@@ -0,0 +1,17 @@
package com.njcn.dataProcess.dao.relation.mapper;
import com.njcn.dataProcess.po.relation.DataV;
import com.njcn.db.mapper.BatchBaseMapper;
/**
* <p>
* Mapper 接口
* </p>
*
* @author hongawen
* @since 2023-12-28
*/
public interface DataVRelationMapper extends BatchBaseMapper<DataV> {
}

View File

@@ -0,0 +1,17 @@
package com.njcn.dataProcess.dao.relation.mapper;
import com.njcn.dataProcess.po.relation.RmpEventDetail;
import com.njcn.db.mapper.BatchBaseMapper;
/**
* pqs
*
* @author cdf
* @date 2023/6/19
*/
public interface RmpEventDetailMapper extends BatchBaseMapper<RmpEventDetail> {
}

View File

@@ -0,0 +1,16 @@
package com.njcn.dataProcess.service;
import com.njcn.dataProcess.dto.DataFlickerDTO;
import java.util.List;
/**
* Description:
* Date: 2024/11/18 11:17【需求编号】
*
* @author clam
* @version V1.0.0
*/
public interface IDataFlicker {
void batchInsertion(List<DataFlickerDTO> dataIDTOList);
}

View File

@@ -0,0 +1,16 @@
package com.njcn.dataProcess.service;
import com.njcn.dataProcess.dto.DataFlucDTO;
import java.util.List;
/**
* Description:
* Date: 2024/11/18 11:17【需求编号】
*
* @author clam
* @version V1.0.0
*/
public interface IDataFluc {
void batchInsertion(List<DataFlucDTO> dataIDTOList);
}

View File

@@ -0,0 +1,16 @@
package com.njcn.dataProcess.service;
import com.njcn.dataProcess.dto.DataHarmphasicIDTO;
import java.util.List;
/**
* Description:
* Date: 2024/11/18 11:17【需求编号】
*
* @author clam
* @version V1.0.0
*/
public interface IDataHarmphasicI {
void batchInsertion(List<DataHarmphasicIDTO> dataIDTOList);
}

View File

@@ -0,0 +1,16 @@
package com.njcn.dataProcess.service;
import com.njcn.dataProcess.dto.DataHarmphasicVDTO;
import java.util.List;
/**
* Description:
* Date: 2024/11/18 11:17【需求编号】
*
* @author clam
* @version V1.0.0
*/
public interface IDataHarmphasicV {
void batchInsertion(List<DataHarmphasicVDTO> dataIDTOList);
}

View File

@@ -0,0 +1,16 @@
package com.njcn.dataProcess.service;
import com.njcn.dataProcess.dto.DataHarmpowerPDTO;
import java.util.List;
/**
* Description:
* Date: 2024/11/18 11:17【需求编号】
*
* @author clam
* @version V1.0.0
*/
public interface IDataHarmpowerP {
void batchInsertion(List<DataHarmpowerPDTO> dataIDTOList);
}

View File

@@ -0,0 +1,16 @@
package com.njcn.dataProcess.service;
import com.njcn.dataProcess.dto.DataHarmpowerQDTO;
import java.util.List;
/**
* Description:
* Date: 2024/11/18 11:17【需求编号】
*
* @author clam
* @version V1.0.0
*/
public interface IDataHarmpowerQ {
void batchInsertion(List<DataHarmpowerQDTO> dataIDTOList);
}

View File

@@ -0,0 +1,16 @@
package com.njcn.dataProcess.service;
import com.njcn.dataProcess.dto.DataHarmpowerSDTO;
import java.util.List;
/**
* Description:
* Date: 2024/11/18 11:17【需求编号】
*
* @author clam
* @version V1.0.0
*/
public interface IDataHarmpowerS {
void batchInsertion(List<DataHarmpowerSDTO> dataIDTOList);
}

View File

@@ -0,0 +1,16 @@
package com.njcn.dataProcess.service;
import com.njcn.dataProcess.dto.DataIDTO;
import java.util.List;
/**
* Description:
* Date: 2024/11/18 11:17【需求编号】
*
* @author clam
* @version V1.0.0
*/
public interface IDataI {
void batchInsertion(List<DataIDTO> dataIDTOList);
}

View File

@@ -0,0 +1,17 @@
package com.njcn.dataProcess.service;
import com.njcn.dataProcess.dto.DataInharmIDTO;
import java.util.List;
/**
* Description:
* Date: 2024/11/18 13:27【需求编号】
*
* @author clam
* @version V1.0.0
*/
public interface IDataInharmI {
void batchInsertion(List<DataInharmIDTO> dataIDTOList);
}

View File

@@ -0,0 +1,17 @@
package com.njcn.dataProcess.service;
import com.njcn.dataProcess.dto.DataInharmVDTO;
import java.util.List;
/**
* Description:
* Date: 2024/11/18 13:27【需求编号】
*
* @author clam
* @version V1.0.0
*/
public interface IDataInharmV {
void batchInsertion(List<DataInharmVDTO> dataIDTOList);
}

View File

@@ -0,0 +1,16 @@
package com.njcn.dataProcess.service;
import com.njcn.dataProcess.dto.DataPltDTO;
import java.util.List;
/**
* Description:
* Date: 2024/11/18 11:17【需求编号】
*
* @author clam
* @version V1.0.0
*/
public interface IDataPlt {
void batchInsertion(List<DataPltDTO> dataPltDTOList);
}

View File

@@ -0,0 +1,28 @@
package com.njcn.dataProcess.service;
import com.njcn.dataProcess.dto.DataVDTO;
import com.njcn.dataProcess.dto.DataVFiveItemDTO;
import com.njcn.dataProcess.param.LineCountEvaluateParam;
import java.time.LocalDateTime;
import java.util.List;
import java.util.Map;
/**
* @author hongawen
* @version 1.0
* @data 2024/11/7 10:54
*/
public interface IDataV {
/**
* 根据监测点时间范围获取监测点统计数据
* @param lineParam 监测点参数
*/
Map<String, List<DataVFiveItemDTO>> getLineCountEvaluate(LineCountEvaluateParam lineParam);
void batchInsertion(List<DataVDTO> dataVDTOList );
List<LocalDateTime> monitoringTime(String lineId, String localData);
}

View File

@@ -0,0 +1,14 @@
package com.njcn.dataProcess.service;
import com.njcn.dataProcess.dto.RmpEventDetailDTO;
/**
* Description:
* Date: 2024/11/28 9:04【需求编号】
*
* @author clam
* @version V1.0.0
*/
public interface IRmpEventDetail {
void batchInsertion(RmpEventDetailDTO rmpEventDetailDTO);
}

View File

@@ -0,0 +1,14 @@
package com.njcn.dataProcess.service;
import com.njcn.dataProcess.dto.LnDataDTO;
/**
* Description:
* Date: 2024/12/11 18:28【需求编号】
*
* @author clam
* @version V1.0.0
*/
public interface LnDataDealService {
void batchInsertion(LnDataDTO lnDataDTO);
}

View File

@@ -0,0 +1,58 @@
package com.njcn.dataProcess.service.impl;
import com.njcn.dataProcess.dto.LnDataDTO;
import com.njcn.dataProcess.annotation.InsertBean;
import com.njcn.dataProcess.service.*;
import org.springframework.stereotype.Service;
/**
* Description:
* Date: 2024/12/11 18:29【需求编号】
*
* @author clam
* @version V1.0.0
*/
@Service("RelationLnDataDealServiceImpl")
public class LnDataDealServiceImpl implements LnDataDealService {
@InsertBean
private IDataFlicker dataFlickerInsert;
@InsertBean
private IDataFluc dataFlucInsert;
@InsertBean
private IDataHarmphasicI dataHarmphasicIInsert;
@InsertBean
private IDataHarmphasicV dataHarmphasicVInsert;
@InsertBean
private IDataHarmpowerP dataHarmpowerPService;
@InsertBean
private IDataHarmpowerQ dataHarmpowerQService;
@InsertBean
private IDataHarmpowerS dataHarmpowerSService;
@InsertBean
private IDataI dataIService;
@InsertBean
private IDataInharmI dataInharmIService;
@InsertBean
private IDataInharmV dataInharmVService;
@InsertBean
private IDataPlt dataPltService;
@InsertBean
private IDataV dataVService;
@Override
public void batchInsertion(LnDataDTO lnDataDTO) {
dataVService.batchInsertion(lnDataDTO.getDataVList());
dataFlickerInsert.batchInsertion(lnDataDTO.getDataFlickerDTOList());
dataFlucInsert.batchInsertion(lnDataDTO.getDataFlucDTOList());
dataHarmphasicIInsert.batchInsertion(lnDataDTO.getDataHarmphasicIDTOList());
dataHarmphasicVInsert.batchInsertion(lnDataDTO.getDataHarmphasicVDTOList());
dataHarmpowerPService.batchInsertion(lnDataDTO.getDataHarmpowerPDTOList());
dataHarmpowerQService.batchInsertion(lnDataDTO.getDataHarmpowerQDTOList());
dataHarmpowerSService.batchInsertion(lnDataDTO.getDataHarmpowerSDTOList());
dataIService.batchInsertion(lnDataDTO.getDataIDTOList());
dataInharmIService.batchInsertion(lnDataDTO.getDataInharmIDTOList());
dataInharmVService.batchInsertion(lnDataDTO.getDataInharmVDTOList());
dataPltService.batchInsertion(lnDataDTO.getDataPltDTOList());
}
}

View File

@@ -0,0 +1,47 @@
package com.njcn.dataProcess.service.impl.influxdb;
import com.njcn.dataProcess.dto.DataFlickerDTO;
import com.njcn.dataProcess.dao.imapper.DataFlickerMapper;
import com.njcn.dataProcess.po.influx.DataFlicker;
import com.njcn.dataProcess.service.IDataFlicker;
import lombok.RequiredArgsConstructor;
import org.apache.commons.collections4.ListUtils;
import org.springframework.stereotype.Service;
import java.util.ArrayList;
import java.util.List;
import java.util.stream.Collectors;
/**
* Description:
* Date: 2024/11/18 14:33【需求编号】
*
* @author clam
* @version V1.0.0
*/
@Service("InfluxdbDataFlickerImpl")
@RequiredArgsConstructor
public class InfluxdbDataFlickerImpl implements IDataFlicker {
private final DataFlickerMapper dataFlickerMapper;
@Override
public void batchInsertion(List<DataFlickerDTO> dataFlickerDTOList) {
int totalCount = dataFlickerDTOList.size();
if(totalCount<=0){
return;
}
List<DataFlicker> collect = dataFlickerDTOList.stream().map(temp -> DataFlicker.relationToInfluxDB(temp)).collect(Collectors.toList());
int minSize = Math.min(1200000, collect.size());
List<List<DataFlicker>> partition = ListUtils.partition(collect, minSize);
for (List<DataFlicker> dataFlickerList : partition) {
List<DataFlicker> sublistAsOriginalListType = new ArrayList<>(dataFlickerList);
dataFlickerMapper.insertBatch(sublistAsOriginalListType);
}
}
}

View File

@@ -0,0 +1,47 @@
package com.njcn.dataProcess.service.impl.influxdb;
import com.njcn.dataProcess.dto.DataFlucDTO;
import com.njcn.dataProcess.dao.imapper.DataFlucMapper;
import com.njcn.dataProcess.po.influx.DataFluc;
import com.njcn.dataProcess.service.IDataFluc;
import lombok.RequiredArgsConstructor;
import org.apache.commons.collections4.ListUtils;
import org.springframework.stereotype.Service;
import java.util.ArrayList;
import java.util.List;
import java.util.stream.Collectors;
/**
* Description:
* Date: 2024/11/18 14:33【需求编号】
*
* @author clam
* @version V1.0.0
*/
@Service("InfluxdbDataFlucImpl")
@RequiredArgsConstructor
public class InfluxdbDataFlucImpl implements IDataFluc {
private final DataFlucMapper dataFlucMapper;
@Override
public void batchInsertion(List<DataFlucDTO> dataFlucDTOList) {
int totalCount = dataFlucDTOList.size();
if(totalCount<=0){
return;
}
List<DataFluc> collect = dataFlucDTOList.stream().map(temp -> DataFluc.relationToInfluxDB(temp)).collect(Collectors.toList());
int minSize = Math.min(1200000, collect.size());
List<List<DataFluc>> partition = ListUtils.partition(collect, minSize);
for (List<DataFluc> dataFlucList : partition) {
List<DataFluc> sublistAsOriginalListType = new ArrayList<>(dataFlucList);
dataFlucMapper.insertBatch(sublistAsOriginalListType);
}
}
}

View File

@@ -0,0 +1,47 @@
package com.njcn.dataProcess.service.impl.influxdb;
import com.njcn.dataProcess.dto.DataHarmphasicIDTO;
import com.njcn.dataProcess.dao.imapper.DataHarmphasicIMapper;
import com.njcn.dataProcess.po.influx.DataHarmphasicI;
import com.njcn.dataProcess.service.IDataHarmphasicI;
import lombok.RequiredArgsConstructor;
import org.apache.commons.collections4.ListUtils;
import org.springframework.stereotype.Service;
import java.util.ArrayList;
import java.util.List;
import java.util.stream.Collectors;
/**
* Description:
* Date: 2024/11/18 14:33【需求编号】
*
* @author clam
* @version V1.0.0
*/
@Service("InfluxdbDataHarmphasicIImpl")
@RequiredArgsConstructor
public class InfluxdbDataHarmphasicIImpl implements IDataHarmphasicI {
private final DataHarmphasicIMapper dataHarmphasicIMapper;
@Override
public void batchInsertion(List<DataHarmphasicIDTO> dataHarmphasicIDTOList) {
int totalCount = dataHarmphasicIDTOList.size();
if(totalCount<=0){
return;
}
List<DataHarmphasicI> collect = dataHarmphasicIDTOList.stream().flatMap(temp -> DataHarmphasicI.relationToInfluxDB(temp).stream()).collect(Collectors.toList());
int minSize = Math.min(1200000, collect.size());
List<List<DataHarmphasicI>> partition = ListUtils.partition(collect, minSize);
for (List<DataHarmphasicI> dataHarmphasicIList : partition) {
List<DataHarmphasicI> sublistAsOriginalListType = new ArrayList<>(dataHarmphasicIList);
dataHarmphasicIMapper.insertBatch(sublistAsOriginalListType);
}
}
}

View File

@@ -0,0 +1,47 @@
package com.njcn.dataProcess.service.impl.influxdb;
import com.njcn.dataProcess.dto.DataHarmphasicVDTO;
import com.njcn.dataProcess.dao.imapper.DataHarmphasicVMapper;
import com.njcn.dataProcess.po.influx.DataHarmphasicV;
import com.njcn.dataProcess.service.IDataHarmphasicV;
import lombok.RequiredArgsConstructor;
import org.apache.commons.collections4.ListUtils;
import org.springframework.stereotype.Service;
import java.util.ArrayList;
import java.util.List;
import java.util.stream.Collectors;
/**
* Description:
* Date: 2024/11/18 14:33【需求编号】
*
* @author clam
* @version V1.0.0
*/
@Service("InfluxdbDataHarmphasicVImpl")
@RequiredArgsConstructor
public class InfluxdbDataHarmphasicVImpl implements IDataHarmphasicV {
private final DataHarmphasicVMapper dataHarmphasicVMapper;
@Override
public void batchInsertion(List<DataHarmphasicVDTO> dataHarmphasicVDTOList) {
int totalCount = dataHarmphasicVDTOList.size();
if(totalCount<=0){
return;
}
List<DataHarmphasicV> collect = dataHarmphasicVDTOList.stream().flatMap(temp -> DataHarmphasicV.relationToInfluxDB(temp).stream()).collect(Collectors.toList());
int minSize = Math.min(1200000, collect.size());
List<List<DataHarmphasicV>> partition = ListUtils.partition(collect, minSize);
for (List<DataHarmphasicV> dataHarmphasicVList : partition) {
List<DataHarmphasicV> sublistAsOriginalListType = new ArrayList<>(dataHarmphasicVList);
dataHarmphasicVMapper.insertBatch(sublistAsOriginalListType);
}
}
}

View File

@@ -0,0 +1,46 @@
package com.njcn.dataProcess.service.impl.influxdb;
import com.njcn.dataProcess.dto.DataHarmpowerPDTO;
import com.njcn.dataProcess.dao.imapper.DataHarmpowerPMapper;
import com.njcn.dataProcess.po.influx.DataHarmpowerP;
import com.njcn.dataProcess.service.IDataHarmpowerP;
import lombok.RequiredArgsConstructor;
import org.apache.commons.collections4.ListUtils;
import org.springframework.stereotype.Service;
import java.util.ArrayList;
import java.util.List;
import java.util.stream.Collectors;
/**
* Description:
* Date: 2024/11/18 14:33【需求编号】
*
* @author clam
* @version V1.0.0
*/
@Service("InfluxdbDataHarmpowerPImpl")
@RequiredArgsConstructor
public class InfluxdbDataHarmpowerPImpl implements IDataHarmpowerP {
private final DataHarmpowerPMapper dataHarmpowerPMapper;
@Override
public void batchInsertion(List<DataHarmpowerPDTO> dataHarmpowerPDTOList) {
int totalCount = dataHarmpowerPDTOList.size();
if(totalCount<=0){
return;
}
List<DataHarmpowerP> collect = dataHarmpowerPDTOList.stream().flatMap(temp -> DataHarmpowerP.relationToInfluxDB(temp).stream()).collect(Collectors.toList());
int minSize = Math.min(1200000, collect.size());
List<List<DataHarmpowerP>> partition = ListUtils.partition(collect, minSize);
for (List<DataHarmpowerP> dataHarmpowerPList : partition) {
List<DataHarmpowerP> sublistAsOriginalListType = new ArrayList<>(dataHarmpowerPList);
dataHarmpowerPMapper.insertBatch(sublistAsOriginalListType);
}
}
}

View File

@@ -0,0 +1,46 @@
package com.njcn.dataProcess.service.impl.influxdb;
import com.njcn.dataProcess.dto.DataHarmpowerQDTO;
import com.njcn.dataProcess.dao.imapper.DataHarmpowerQMapper;
import com.njcn.dataProcess.po.influx.DataHarmpowerQ;
import com.njcn.dataProcess.service.IDataHarmpowerQ;
import lombok.RequiredArgsConstructor;
import org.apache.commons.collections4.ListUtils;
import org.springframework.stereotype.Service;
import java.util.ArrayList;
import java.util.List;
import java.util.stream.Collectors;
/**
* Description:
* Date: 2024/11/18 14:33【需求编号】
*
* @author clam
* @version V1.0.0
*/
@Service("InfluxdbDataHarmpowerQImpl")
@RequiredArgsConstructor
public class InfluxdbDataHarmpowerQImpl implements IDataHarmpowerQ {
private final DataHarmpowerQMapper dataHarmpowerQMapper;
@Override
public void batchInsertion(List<DataHarmpowerQDTO> dataHarmpowerQDTOList) {
int totalCount = dataHarmpowerQDTOList.size();
if(totalCount<=0){
return;
}
List<DataHarmpowerQ> collect = dataHarmpowerQDTOList.stream().flatMap(temp -> DataHarmpowerQ.relationToInfluxDB(temp).stream()).collect(Collectors.toList());
int minSize = Math.min(1200000, collect.size());
List<List<DataHarmpowerQ>> partition = ListUtils.partition(collect, minSize);
for (List<DataHarmpowerQ> dataHarmpowerQList : partition) {
List<DataHarmpowerQ> sublistAsOriginalListType = new ArrayList<>(dataHarmpowerQList);
dataHarmpowerQMapper.insertBatch(sublistAsOriginalListType);
}
}
}

View File

@@ -0,0 +1,46 @@
package com.njcn.dataProcess.service.impl.influxdb;
import com.njcn.dataProcess.dto.DataHarmpowerSDTO;
import com.njcn.dataProcess.dao.imapper.DataHarmpowerSMapper;
import com.njcn.dataProcess.po.influx.DataHarmpowerS;
import com.njcn.dataProcess.service.IDataHarmpowerS;
import lombok.RequiredArgsConstructor;
import org.apache.commons.collections4.ListUtils;
import org.springframework.stereotype.Service;
import java.util.ArrayList;
import java.util.List;
import java.util.stream.Collectors;
/**
* Description:
* Date: 2024/11/18 14:33【需求编号】
*
* @author clam
* @version V1.0.0
*/
@Service("InfluxdbDataHarmpowerSImpl")
@RequiredArgsConstructor
public class InfluxdbDataHarmpowerSImpl implements IDataHarmpowerS {
private final DataHarmpowerSMapper dataHarmpowerSMapper;
@Override
public void batchInsertion(List<DataHarmpowerSDTO> dataHarmpowerSDTOList) {
int totalCount = dataHarmpowerSDTOList.size();
if(totalCount<=0){
return;
}
List<DataHarmpowerS> collect = dataHarmpowerSDTOList.stream().flatMap(temp -> DataHarmpowerS.relationToInfluxDB(temp).stream()).collect(Collectors.toList());
int minSize = Math.min(1200000, collect.size());
List<List<DataHarmpowerS>> partition = ListUtils.partition(collect, minSize);
for (List<DataHarmpowerS> dataHarmpowerSList : partition) {
List<DataHarmpowerS> sublistAsOriginalListType = new ArrayList<>(dataHarmpowerSList);
dataHarmpowerSMapper.insertBatch(sublistAsOriginalListType);
}
}
}

View File

@@ -0,0 +1,46 @@
package com.njcn.dataProcess.service.impl.influxdb;
import com.njcn.dataProcess.dto.DataIDTO;
import com.njcn.dataProcess.dao.imapper.DataIMapper;
import com.njcn.dataProcess.po.influx.DataI;
import com.njcn.dataProcess.service.IDataI;
import lombok.RequiredArgsConstructor;
import org.apache.commons.collections4.ListUtils;
import org.springframework.stereotype.Service;
import java.util.ArrayList;
import java.util.List;
import java.util.stream.Collectors;
/**
* Description:
* Date: 2024/11/18 14:33【需求编号】
*
* @author clam
* @version V1.0.0
*/
@Service("InfluxdbDataIImpl")
@RequiredArgsConstructor
public class InfluxdbDataIImpl implements IDataI {
private final DataIMapper dataIMapper;
@Override
public void batchInsertion(List<DataIDTO> dataIDTOList) {
int totalCount = dataIDTOList.size();
if(totalCount<=0){
return;
}
List<DataI> collect = dataIDTOList.stream().flatMap(temp -> DataI.relationToInfluxDB(temp).stream()).collect(Collectors.toList());
int minSize = Math.min(1200000, collect.size());
List<List<DataI>> partition = ListUtils.partition(collect, minSize);
for (List<DataI> dataIList : partition) {
List<DataI> sublistAsOriginalListType = new ArrayList<>(dataIList);
dataIMapper.insertBatch(sublistAsOriginalListType);
}
}
}

View File

@@ -0,0 +1,46 @@
package com.njcn.dataProcess.service.impl.influxdb;
import com.njcn.dataProcess.dto.DataInharmIDTO;
import com.njcn.dataProcess.dao.imapper.DataInharmIMapper;
import com.njcn.dataProcess.po.influx.DataInharmI;
import com.njcn.dataProcess.service.IDataInharmI;
import lombok.RequiredArgsConstructor;
import org.apache.commons.collections4.ListUtils;
import org.springframework.stereotype.Service;
import java.util.ArrayList;
import java.util.List;
import java.util.stream.Collectors;
/**
* Description:
* Date: 2024/11/18 14:33【需求编号】
*
* @author clam
* @version V1.0.0
*/
@Service("InfluxdbDataInharmIImpl")
@RequiredArgsConstructor
public class InfluxdbDataInharmIImpl implements IDataInharmI {
private final DataInharmIMapper dataInharmIMapper;
@Override
public void batchInsertion(List<DataInharmIDTO> dataInharmIDTOList) {
int totalCount = dataInharmIDTOList.size();
if(totalCount<=0){
return;
}
List<DataInharmI> collect = dataInharmIDTOList.stream().flatMap(temp -> DataInharmI.relationToInfluxDB(temp).stream()).collect(Collectors.toList());
int minSize = Math.min(1200000, collect.size());
List<List<DataInharmI>> partition = ListUtils.partition(collect, minSize);
for (List<DataInharmI> dataInharmIList : partition) {
List<DataInharmI> sublistAsOriginalListType = new ArrayList<>(dataInharmIList);
dataInharmIMapper.insertBatch(sublistAsOriginalListType);
}
}
}

View File

@@ -0,0 +1,46 @@
package com.njcn.dataProcess.service.impl.influxdb;
import com.njcn.dataProcess.dto.DataInharmVDTO;
import com.njcn.dataProcess.dao.imapper.DataInharmVMapper;
import com.njcn.dataProcess.po.influx.DataInharmV;
import com.njcn.dataProcess.service.IDataInharmV;
import lombok.RequiredArgsConstructor;
import org.apache.commons.collections4.ListUtils;
import org.springframework.stereotype.Service;
import java.util.ArrayList;
import java.util.List;
import java.util.stream.Collectors;
/**
* Description:
* Date: 2024/11/18 14:33【需求编号】
*
* @author clam
* @version V1.0.0
*/
@Service("InfluxdbDataInharmVImpl")
@RequiredArgsConstructor
public class InfluxdbDataInharmVImpl implements IDataInharmV {
private final DataInharmVMapper dataInharmVMapper;
@Override
public void batchInsertion(List<DataInharmVDTO> dataInharmVDTOList) {
int totalCount = dataInharmVDTOList.size();
if(totalCount<=0){
return;
}
List<DataInharmV> collect = dataInharmVDTOList.stream().flatMap(temp -> DataInharmV.relationToInfluxDB(temp).stream()).collect(Collectors.toList());
int minSize = Math.min(1200000, collect.size());
List<List<DataInharmV>> partition = ListUtils.partition(collect, minSize);
for (List<DataInharmV> dataInharmVList : partition) {
List<DataInharmV> sublistAsOriginalListType = new ArrayList<>(dataInharmVList);
dataInharmVMapper.insertBatch(sublistAsOriginalListType);
}
}
}

View File

@@ -0,0 +1,47 @@
package com.njcn.dataProcess.service.impl.influxdb;
import com.njcn.dataProcess.dto.DataPltDTO;
import com.njcn.dataProcess.dao.imapper.DataPltMapper;
import com.njcn.dataProcess.po.influx.DataPlt;
import com.njcn.dataProcess.service.IDataPlt;
import lombok.RequiredArgsConstructor;
import org.apache.commons.collections4.ListUtils;
import org.springframework.stereotype.Service;
import java.util.ArrayList;
import java.util.List;
import java.util.stream.Collectors;
/**
* Description:
* Date: 2024/11/18 14:33【需求编号】
*
* @author clam
* @version V1.0.0
*/
@Service("InfluxdbDataPltImpl")
@RequiredArgsConstructor
public class InfluxdbDataPltImpl implements IDataPlt {
private final DataPltMapper dataPltMapper;
@Override
public void batchInsertion(List<DataPltDTO> dataPltDTOList) {
int totalCount = dataPltDTOList.size();
if(totalCount<=0){
return;
}
List<DataPlt> collect = dataPltDTOList.stream().map(temp -> DataPlt.relationToInfluxDB(temp)).collect(Collectors.toList());
int minSize = Math.min(1200000, collect.size());
List<List<DataPlt>> partition = ListUtils.partition(collect, minSize);
for (List<DataPlt> dataPltList : partition) {
List<DataPlt> sublistAsOriginalListType = new ArrayList<>(dataPltList);
dataPltMapper.insertBatch(sublistAsOriginalListType);
}
}
}

View File

@@ -0,0 +1,111 @@
package com.njcn.dataProcess.service.impl.influxdb;
import cn.hutool.core.collection.CollectionUtil;
import cn.hutool.core.collection.ListUtil;
import com.njcn.dataProcess.constant.InfluxDBTableConstant;
import com.njcn.dataProcess.constant.PhaseType;
import com.njcn.dataProcess.dto.DataVDTO;
import com.njcn.dataProcess.dto.DataVFiveItemDTO;
import com.njcn.dataProcess.dto.LineDataVFiveItemDTO;
import com.njcn.dataProcess.dao.imapper.DataVMapper;
import com.njcn.dataProcess.param.LineCountEvaluateParam;
import com.njcn.dataProcess.po.influx.DataV;
import com.njcn.dataProcess.service.IDataV;
import com.njcn.influx.query.InfluxQueryWrapper;
import org.apache.commons.collections4.ListUtils;
import org.springframework.stereotype.Service;
import javax.annotation.Resource;
import java.time.LocalDateTime;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.stream.Collectors;
/**
* @author hongawen
* @version 1.0
* @data 2024/11/7 11:02
*/
@Service("InfluxdbDataVImpl")
public class InfluxdbDataVImpl implements IDataV {
@Resource
private DataVMapper dataVMapper;
/**
* 注意influxdb不推荐采用in函数的方式批量查询监测点的数据效率很低容易造成崩溃故每次单测点查询
* @param lineParam 监测点参数
*/
@Override
public Map<String, List<DataVFiveItemDTO>> getLineCountEvaluate(LineCountEvaluateParam lineParam) {
Map<String, List<DataVFiveItemDTO>> result = new HashMap<>();
List<String> lineId = lineParam.getLineId();
InfluxQueryWrapper dataVQueryWrapper = new InfluxQueryWrapper(DataV.class, LineDataVFiveItemDTO.class);
if(CollectionUtil.isNotEmpty(lineId)){
for (String line : lineId) {
List<DataVFiveItemDTO> dataVFiveItemDTOS = new ArrayList<>();
// 准备查freq,v_thd,v_unbalance 取T项数据
dataVQueryWrapper.initSql();
dataVQueryWrapper
.select(DataV::getTime,DataV::getFreq,DataV::getVThd,DataV::getVUnbalance,DataV::getValueType)
.between(DataV::getTime,lineParam.getStartTime(),lineParam.getEndTime())
.eq(DataV::getLineId,line)
.eq(DataV::getPhasicType, PhaseType.PHASE_T)
.or(DataV::getValueType, ListUtil.of(InfluxDBTableConstant.MAX,InfluxDBTableConstant.MIN));
List<LineDataVFiveItemDTO> lineDataVFiveItemDTOSByT = dataVMapper.queryDataValue(dataVQueryWrapper);
// 准备查rms,rms_lvr 取ABC项数据任意一个数据
dataVQueryWrapper.initSql();
dataVQueryWrapper
.select(DataV::getTime,DataV::getRms,DataV::getRmsLvr)
.between(DataV::getTime,lineParam.getStartTime(),lineParam.getEndTime())
.eq(DataV::getLineId,line)
.eq(DataV::getPhasicType,PhaseType.PHASE_A)
.or(DataV::getValueType, ListUtil.of(InfluxDBTableConstant.MAX,InfluxDBTableConstant.MIN));
List<LineDataVFiveItemDTO> lineDataVFiveItemDTOSByA = dataVMapper.queryDataValue(dataVQueryWrapper);
System.out.println(123);
System.out.println(123);
System.out.println(123);
System.out.println(123);
}
}
System.out.println("InfluxdbDataVImpl");
return result;
}
@Override
public void batchInsertion(List<DataVDTO> dataVDTOList) {
int totalCount = dataVDTOList.size();
if(totalCount<=0){
return;
}
List<DataV> collect = dataVDTOList.stream().flatMap(temp -> DataV.relationToInfluxDB(temp).stream()).collect(Collectors.toList());
int minSize = Math.min(1200000, collect.size());
List<List<DataV>> partition = ListUtils.partition(collect, minSize);
for (List<DataV> dataVList : partition) {
List<DataV> sublistAsOriginalListType = new ArrayList<>(dataVList);
dataVMapper.insertBatch(sublistAsOriginalListType);
}
}
@Override
public List<LocalDateTime> monitoringTime(String lineId, String localData) {
return null;
}
}

View File

@@ -0,0 +1,24 @@
package com.njcn.dataProcess.service.impl.influxdb;
import com.njcn.dataProcess.dto.RmpEventDetailDTO;
import com.njcn.dataProcess.service.IRmpEventDetail;
import lombok.RequiredArgsConstructor;
import org.springframework.stereotype.Service;
/**
* Description:
* Date: 2024/11/28 9:07【需求编号】
*
* @author clam
* @version V1.0.0
*/
@Service("InfluxdbRmpEventDetailImpl")
@RequiredArgsConstructor
public class InfluxdbRmpEventDetailImpl implements IRmpEventDetail {
@Override
public void batchInsertion(RmpEventDetailDTO rmpEventDetailDTO) {
}
}

View File

@@ -0,0 +1,56 @@
package com.njcn.dataProcess.service.impl.relation;
import com.njcn.dataProcess.dto.DataFlickerDTO;
import com.njcn.dataProcess.dao.relation.mapper.DataFlickerRelationMapper;
import com.njcn.dataProcess.po.relation.DataFlicker;
import com.njcn.dataProcess.service.IDataFlicker;
import com.njcn.dataProcess.util.BeanFeildUtils;
import lombok.RequiredArgsConstructor;
import org.apache.commons.collections4.ListUtils;
import org.springframework.beans.BeanUtils;
import org.springframework.stereotype.Service;
import java.util.List;
import java.util.stream.Collectors;
/**
* Description:
* Date: 2024/11/18 14:33【需求编号】
*
* @author clam
* @version V1.0.0
*/
@Service("RelationDataFlickerImpl")
@RequiredArgsConstructor
public class RelationDataFlickerImpl implements IDataFlicker {
private final DataFlickerRelationMapper dataFlickerRelationMapper;
@Override
public void batchInsertion(List<DataFlickerDTO> dataFlickerDTOList) {
int totalCount = dataFlickerDTOList.size();
int minSize = Math.min(120, totalCount);
if(totalCount<=0){
return;
}
List<DataFlicker> collect = dataFlickerDTOList.stream().map(temp -> {
DataFlicker dataFlicker = new DataFlicker();
BeanUtils.copyProperties(temp, dataFlicker, BeanFeildUtils.getNullPropertyNames(temp));
return dataFlicker;
}).collect(Collectors.toList());
collect = collect.stream().collect(Collectors.toMap(
temp -> temp.getTimeid() + temp.getLineid() + temp.getPhasicType(),
temp -> temp,
(exist, replace) -> exist
)).values().stream().collect(Collectors.toList());
List<List<DataFlicker>> partition = ListUtils.partition(collect, minSize);
for (List<DataFlicker> dataFlickerList : partition) {
dataFlickerRelationMapper.insertBatchSomeColumn(dataFlickerList);
}
}
}

View File

@@ -0,0 +1,47 @@
package com.njcn.dataProcess.service.impl.relation;
import com.njcn.dataProcess.dto.DataFlucDTO;
import com.njcn.dataProcess.dao.relation.mapper.DataFlucRelationMapper;
import com.njcn.dataProcess.po.relation.DataFluc;
import com.njcn.dataProcess.service.IDataFluc;
import com.njcn.dataProcess.util.BeanFeildUtils;
import lombok.RequiredArgsConstructor;
import org.apache.commons.collections4.ListUtils;
import org.springframework.beans.BeanUtils;
import org.springframework.stereotype.Service;
import java.util.List;
import java.util.stream.Collectors;
/**
* Description:
* Date: 2024/11/18 15:06【需求编号】
*
* @author clam
* @version V1.0.0
*/
@Service("RelationDataFlucImpl")
@RequiredArgsConstructor
public class RelationDataFlucImpl implements IDataFluc {
private final DataFlucRelationMapper dataFlucRelationMapper;
@Override
public void batchInsertion(List<DataFlucDTO> dataFlucDTOList) {
int totalCount = dataFlucDTOList.size();
int minSize = Math.min(120, totalCount);
if(totalCount<=0){
return;
}
List<DataFluc> collect = dataFlucDTOList.stream().map(temp -> {
DataFluc dataFluc = new DataFluc();
BeanUtils.copyProperties(temp, dataFluc, BeanFeildUtils.getNullPropertyNames(temp));
return dataFluc;
}).collect(Collectors.toList());
List<List<DataFluc>> partition = ListUtils.partition(collect, minSize);
for (List<DataFluc> dataFlucList : partition) {
dataFlucRelationMapper.insertBatchSomeColumn(dataFlucList);
}
};
}

View File

@@ -0,0 +1,57 @@
package com.njcn.dataProcess.service.impl.relation;
import com.njcn.dataProcess.dto.DataHarmphasicIDTO;
import com.njcn.dataProcess.dao.relation.mapper.DataHarmphasicIRelationMapper;
import com.njcn.dataProcess.po.relation.DataHarmphasicI;
import com.njcn.dataProcess.service.IDataHarmphasicI;
import com.njcn.dataProcess.util.BeanFeildUtils;
import lombok.RequiredArgsConstructor;
import org.apache.commons.collections4.ListUtils;
import org.springframework.beans.BeanUtils;
import org.springframework.stereotype.Service;
import java.util.List;
import java.util.stream.Collectors;
/**
* Description:
* Date: 2024/11/18 15:06【需求编号】
*
* @author clam
* @version V1.0.0
*/
@Service("RelationDataHarmphasicIImpl")
@RequiredArgsConstructor
public class RelationDataHarmphasicIImpl implements IDataHarmphasicI {
private final DataHarmphasicIRelationMapper dataHarmphasicIRelationMapper;
@Override
public void batchInsertion(List<DataHarmphasicIDTO> dataHarmphasicIDTOList) {
int totalCount = dataHarmphasicIDTOList.size();
int minSize = Math.min(120, totalCount);
if(totalCount<=0){
return;
}
List<DataHarmphasicI> collect = dataHarmphasicIDTOList.stream().map(temp -> {
DataHarmphasicI dataHarmphasicI = new DataHarmphasicI();
BeanUtils.copyProperties(temp, dataHarmphasicI, BeanFeildUtils.getNullPropertyNames(temp));
return dataHarmphasicI;
}).collect(Collectors.toList());
collect = collect.stream().collect(Collectors.toMap(
temp -> temp.getTimeid() + temp.getLineid() + temp.getPhasicType(),
temp -> temp,
(exist, replace) -> exist
)).values().stream().collect(Collectors.toList());
List<List<DataHarmphasicI>> partition = ListUtils.partition(collect, minSize);
for (List<DataHarmphasicI> dataHarmphasicIList : partition) {
dataHarmphasicIRelationMapper.insertBatchSomeColumn(dataHarmphasicIList);
}
}
}

View File

@@ -0,0 +1,54 @@
package com.njcn.dataProcess.service.impl.relation;
import com.njcn.dataProcess.dto.DataHarmphasicVDTO;
import com.njcn.dataProcess.dao.relation.mapper.DataHarmphasicVRelationMapper;
import com.njcn.dataProcess.po.relation.DataHarmphasicV;
import com.njcn.dataProcess.service.IDataHarmphasicV;
import com.njcn.dataProcess.util.BeanFeildUtils;
import lombok.RequiredArgsConstructor;
import org.apache.commons.collections4.ListUtils;
import org.springframework.beans.BeanUtils;
import org.springframework.stereotype.Service;
import java.util.List;
import java.util.stream.Collectors;
/**
* Description:
* Date: 2024/11/18 15:06【需求编号】
*
* @author clam
* @version V1.0.0
*/
@Service("RelationDataHarmphasicVImpl")
@RequiredArgsConstructor
public class RelationDataHarmphasicVImpl implements IDataHarmphasicV {
private final DataHarmphasicVRelationMapper dataHarmphasicVRelationMapper;
@Override
public void batchInsertion(List<DataHarmphasicVDTO> dataHarmphasicVDTOList) {
int totalCount = dataHarmphasicVDTOList.size();
int minSize = Math.min(120, totalCount);
if(totalCount<=0){
return;
}
List<DataHarmphasicV> collect = dataHarmphasicVDTOList.stream().map(temp -> {
DataHarmphasicV dataHarmphasicV = new DataHarmphasicV();
BeanUtils.copyProperties(temp, dataHarmphasicV, BeanFeildUtils.getNullPropertyNames(temp));
return dataHarmphasicV;
}).collect(Collectors.toList());
collect = collect.stream().collect(Collectors.toMap(
temp -> temp.getTimeid() + temp.getLineid() + temp.getPhasicType(),
temp -> temp,
(exist, replace) -> exist
)).values().stream().collect(Collectors.toList());
List<List<DataHarmphasicV>> partition = ListUtils.partition(collect, minSize);
for (List<DataHarmphasicV> dataHarmphasicVList : partition) {
dataHarmphasicVRelationMapper.insertBatchSomeColumn(dataHarmphasicVList);
}
}
}

View File

@@ -0,0 +1,56 @@
package com.njcn.dataProcess.service.impl.relation;
import com.njcn.dataProcess.dto.DataHarmpowerPDTO;
import com.njcn.dataProcess.dao.relation.mapper.DataHarmpowerPRelationMapper;
import com.njcn.dataProcess.po.relation.DataHarmpowerP;
import com.njcn.dataProcess.service.IDataHarmpowerP;
import com.njcn.dataProcess.util.BeanFeildUtils;
import lombok.RequiredArgsConstructor;
import org.apache.commons.collections4.ListUtils;
import org.springframework.beans.BeanUtils;
import org.springframework.stereotype.Service;
import java.util.List;
import java.util.stream.Collectors;
/**
* Description:
* Date: 2024/11/18 15:06【需求编号】
*
* @author clam
* @version V1.0.0
*/
@Service("RelationDataHarmpowerPImpl")
@RequiredArgsConstructor
public class RelationDataHarmpowerPImpl implements IDataHarmpowerP {
private final DataHarmpowerPRelationMapper dataHarmpowerPRelationMapper;
@Override
public void batchInsertion(List<DataHarmpowerPDTO> dataHarmpowerPDTOList) {
int totalCount = dataHarmpowerPDTOList.size();
int minSize = Math.min(120, totalCount);
if(totalCount<=0){
return;
}
List<DataHarmpowerP> collect = dataHarmpowerPDTOList.stream().map(temp -> {
DataHarmpowerP dataHarmpowerP = new DataHarmpowerP();
BeanUtils.copyProperties(temp, dataHarmpowerP, BeanFeildUtils.getNullPropertyNames(temp));
return dataHarmpowerP;
}).collect(Collectors.toList());
collect = collect.stream().collect(Collectors.toMap(
temp -> temp.getTimeid() + temp.getLineid() + temp.getPhasicType(),
temp -> temp,
(exist, replace) -> exist
)).values().stream().collect(Collectors.toList());
List<List<DataHarmpowerP>> partition = ListUtils.partition(collect, minSize);
for (List<DataHarmpowerP> dataHarmpowerPList : partition) {
dataHarmpowerPRelationMapper.insertBatchSomeColumn(dataHarmpowerPList);
}
}
}

View File

@@ -0,0 +1,59 @@
package com.njcn.dataProcess.service.impl.relation;
import com.njcn.dataProcess.dto.DataHarmpowerQDTO;
import com.njcn.dataProcess.po.relation.DataHarmpowerQ;
import com.njcn.dataProcess.dao.relation.mapper.DataHarmpowerQRelationMapper;
import com.njcn.dataProcess.service.IDataHarmpowerQ;
import com.njcn.dataProcess.util.BeanFeildUtils;
import lombok.RequiredArgsConstructor;
import lombok.extern.slf4j.Slf4j;
import org.apache.commons.collections4.ListUtils;
import org.springframework.beans.BeanUtils;
import org.springframework.stereotype.Service;
import java.util.List;
import java.util.stream.Collectors;
/**
* Description:
* Date: 2024/11/18 15:06【需求编号】
*
* @author clam
* @version V1.0.0
*/
@Service("RelationDataHarmpowerQImpl")
@RequiredArgsConstructor
@Slf4j
public class RelationDataHarmpowerQImpl implements IDataHarmpowerQ {
private final DataHarmpowerQRelationMapper dataHarmpowerQRelationMapper;
@Override
public void batchInsertion(List<DataHarmpowerQDTO> dataHarmpowerQDTOList) {
int totalCount = dataHarmpowerQDTOList.size();
int minSize = Math.min(120, totalCount);
if(totalCount<=0){
return;
}
List<DataHarmpowerQ> collect = dataHarmpowerQDTOList.stream().map(temp -> {
DataHarmpowerQ dataHarmpowerQ = new DataHarmpowerQ();
BeanUtils.copyProperties(temp, dataHarmpowerQ, BeanFeildUtils.getNullPropertyNames(temp));
return dataHarmpowerQ;
}).collect(Collectors.toList());
collect = collect.stream().collect(Collectors.toMap(
temp -> temp.getTimeid() + temp.getLineid() + temp.getPhasicType(),
temp -> temp,
(exist, replace) -> exist
)).values().stream().collect(Collectors.toList());
long ss =System.currentTimeMillis();
List<List<DataHarmpowerQ>> partition = ListUtils.partition(collect, minSize);
for (List<DataHarmpowerQ> dataHarmpowerQList : partition) {
dataHarmpowerQRelationMapper.insertBatchSomeColumn(dataHarmpowerQList);
}
long sss =System.currentTimeMillis();
log.info("处理时长-----"+( sss-ss));
}
}

View File

@@ -0,0 +1,52 @@
package com.njcn.dataProcess.service.impl.relation;
import com.njcn.dataProcess.dto.DataHarmpowerSDTO;
import com.njcn.dataProcess.dao.relation.mapper.DataHarmpowerSRelationMapper;
import com.njcn.dataProcess.po.relation.DataHarmpowerS;
import com.njcn.dataProcess.service.IDataHarmpowerS;
import com.njcn.dataProcess.util.BeanFeildUtils;
import lombok.RequiredArgsConstructor;
import org.apache.commons.collections4.ListUtils;
import org.springframework.beans.BeanUtils;
import org.springframework.stereotype.Service;
import java.util.List;
import java.util.stream.Collectors;
/**
* Description:
* Date: 2024/11/18 15:06【需求编号】
*
* @author clam
* @version V1.0.0
*/
@Service("RelationDataHarmpowerSImpl")
@RequiredArgsConstructor
public class RelationDataHarmpowerSImpl implements IDataHarmpowerS {
private final DataHarmpowerSRelationMapper dataHarmpowerSRelationMapper;
@Override
public void batchInsertion(List<DataHarmpowerSDTO> dataHarmpowerSDTOList) {
int totalCount = dataHarmpowerSDTOList.size();
int minSize = Math.min(120, totalCount);
if(totalCount<=0){
return;
}
List<DataHarmpowerS> collect = dataHarmpowerSDTOList.stream().map(temp -> {
DataHarmpowerS dataHarmpowerS = new DataHarmpowerS();
BeanUtils.copyProperties(temp, dataHarmpowerS, BeanFeildUtils.getNullPropertyNames(temp));
return dataHarmpowerS;
}).collect(Collectors.toList());
collect = collect.stream().collect(Collectors.toMap(
temp -> temp.getTimeid() + temp.getLineid() + temp.getPhasicType(),
temp -> temp,
(exist, replace) -> exist
)).values().stream().collect(Collectors.toList());
List<List<DataHarmpowerS>> partition = ListUtils.partition(collect, minSize);
for (List<DataHarmpowerS> dataHarmpowerSList : partition) {
dataHarmpowerSRelationMapper.insertBatchSomeColumn(dataHarmpowerSList);
}
}
}

View File

@@ -0,0 +1,54 @@
package com.njcn.dataProcess.service.impl.relation;
import com.njcn.dataProcess.dto.DataIDTO;
import com.njcn.dataProcess.dao.relation.mapper.DataIRelationMapper;
import com.njcn.dataProcess.po.relation.DataI;
import com.njcn.dataProcess.service.IDataI;
import com.njcn.dataProcess.util.BeanFeildUtils;
import lombok.RequiredArgsConstructor;
import org.apache.commons.collections4.ListUtils;
import org.springframework.beans.BeanUtils;
import org.springframework.stereotype.Service;
import javax.annotation.Resource;
import java.util.List;
import java.util.stream.Collectors;
/**
* Description:
* Date: 2024/11/18 11:18【需求编号】
*
* @author clam
* @version V1.0.0
*/
@Service("RelationDataIImpl")
@RequiredArgsConstructor
public class RelationDataIImpl implements IDataI {
@Resource
private DataIRelationMapper dataIRelationMapper;
@Override
public void batchInsertion(List<DataIDTO> dataIDTOList) {
int totalCount = dataIDTOList.size();
int minSize = Math.min(120, totalCount);
if(totalCount<=0){
return;
}
List<DataI> collect = dataIDTOList.stream().map(temp -> {
DataI dataI = new DataI();
BeanUtils.copyProperties(temp, dataI, BeanFeildUtils.getNullPropertyNames(temp));
return dataI;
}).collect(Collectors.toList());
collect = collect.stream().collect(Collectors.toMap(
temp -> temp.getTimeid() + temp.getLineid() + temp.getPhasicType(),
temp -> temp,
(exist, replace) -> exist
)).values().stream().collect(Collectors.toList());
List<List<DataI>> partition = ListUtils.partition(collect, minSize);
for (List<DataI> dataIList : partition) {
dataIRelationMapper.insertBatchSomeColumn(dataIList);
}
}
}

View File

@@ -0,0 +1,53 @@
package com.njcn.dataProcess.service.impl.relation;
import com.njcn.dataProcess.dto.DataInharmIDTO;
import com.njcn.dataProcess.dao.relation.mapper.DataInharmIRelationMapper;
import com.njcn.dataProcess.po.relation.DataInharmI;
import com.njcn.dataProcess.service.IDataInharmI;
import com.njcn.dataProcess.util.BeanFeildUtils;
import lombok.RequiredArgsConstructor;
import org.apache.commons.collections4.ListUtils;
import org.springframework.beans.BeanUtils;
import org.springframework.stereotype.Service;
import java.util.List;
import java.util.stream.Collectors;
/**
* @author hongawen
* @version 1.0
* @data 2024/11/7 11:02
*/
@Service("RelationDataInharmIImpl")
@RequiredArgsConstructor
public class RelationDataInharmIImpl implements IDataInharmI {
private final DataInharmIRelationMapper dataVInharmIMapper;
@Override
public void batchInsertion(List<DataInharmIDTO> dataInharmIDTOList) {
int totalCount = dataInharmIDTOList.size();
int minSize = Math.min(120, totalCount);
if(totalCount<=0){
return;
}
List<DataInharmI> collect = dataInharmIDTOList.stream().map(temp -> {
DataInharmI dataInharmI = new DataInharmI();
BeanUtils.copyProperties(temp, dataInharmI, BeanFeildUtils.getNullPropertyNames(temp));
return dataInharmI;
}).collect(Collectors.toList());
collect = collect.stream().collect(Collectors.toMap(
temp -> temp.getTimeid() + temp.getLineid() + temp.getPhasicType(),
temp -> temp,
(exist, replace) -> exist
)).values().stream().collect(Collectors.toList());
List<List<DataInharmI>> partition = ListUtils.partition(collect, minSize);
for (List<DataInharmI> dataInharmIList : partition) {
dataVInharmIMapper.insertBatchSomeColumn(dataInharmIList);
}
}
}

View File

@@ -0,0 +1,54 @@
package com.njcn.dataProcess.service.impl.relation;
import com.njcn.dataProcess.dto.DataInharmVDTO;
import com.njcn.dataProcess.dao.relation.mapper.DataVInharmVRelationMapper;
import com.njcn.dataProcess.po.relation.DataInharmV;
import com.njcn.dataProcess.service.IDataInharmV;
import com.njcn.dataProcess.util.BeanFeildUtils;
import lombok.RequiredArgsConstructor;
import org.apache.commons.collections4.ListUtils;
import org.springframework.beans.BeanUtils;
import org.springframework.stereotype.Service;
import java.util.List;
import java.util.stream.Collectors;
/**
* @author hongawen
* @version 1.0
* @data 2024/11/7 11:02
*/
@Service("RelationDataInharmVImpl")
@RequiredArgsConstructor
public class RelationDataInharmVImpl implements IDataInharmV {
private final DataVInharmVRelationMapper dataVInharmVRelationMapper;
@Override
public void batchInsertion(List<DataInharmVDTO> dataInharmVDTOList) {
int totalCount = dataInharmVDTOList.size();
int minSize = Math.min(120, totalCount);
if(totalCount<=0){
return;
}
List<DataInharmV> collect = dataInharmVDTOList.stream().map(temp -> {
DataInharmV dataInharmV = new DataInharmV();
BeanUtils.copyProperties(temp, dataInharmV, BeanFeildUtils.getNullPropertyNames(temp));
return dataInharmV;
}).collect(Collectors.toList());
collect = collect.stream().collect(Collectors.toMap(
temp -> temp.getTimeid() + temp.getLineid() + temp.getPhasicType(),
temp -> temp,
(exist, replace) -> exist
)).values().stream().collect(Collectors.toList());
List<List<DataInharmV>> partition = ListUtils.partition(collect, minSize);
for (List<DataInharmV> dataInharmVList : partition) {
dataVInharmVRelationMapper.insertBatchSomeColumn(dataInharmVList);
}
}
}

View File

@@ -0,0 +1,54 @@
package com.njcn.dataProcess.service.impl.relation;
import com.njcn.dataProcess.dto.DataPltDTO;
import com.njcn.dataProcess.dao.relation.mapper.DataPltRelationMapper;
import com.njcn.dataProcess.po.relation.DataPlt;
import com.njcn.dataProcess.service.IDataPlt;
import com.njcn.dataProcess.util.BeanFeildUtils;
import lombok.RequiredArgsConstructor;
import org.apache.commons.collections4.ListUtils;
import org.springframework.beans.BeanUtils;
import org.springframework.stereotype.Service;
import java.util.List;
import java.util.stream.Collectors;
/**
* Description:
* Date: 2024/11/28 14:47【需求编号】
*
* @author clam
* @version V1.0.0
*/
@Service("RelationDataPltImpl")
@RequiredArgsConstructor
public class RelationDataPltImpl implements IDataPlt {
private final DataPltRelationMapper dataPltRelationMapper;
@Override
public void batchInsertion(List<DataPltDTO> dataPltDTOList) {
int totalCount = dataPltDTOList.size();
int minSize = Math.min(120, totalCount);
if(totalCount<=0){
return;
}
List<DataPlt> collect = dataPltDTOList.stream().map(temp -> {
DataPlt dataPlt = new DataPlt();
BeanUtils.copyProperties(temp, dataPlt, BeanFeildUtils.getNullPropertyNames(temp));
return dataPlt;
}).collect(Collectors.toList());
collect = collect.stream().collect(Collectors.toMap(
temp -> temp.getTimeid() + temp.getLineid() + temp.getPhasicType(),
temp -> temp,
(exist, replace) -> exist
)).values().stream().collect(Collectors.toList());
List<List<DataPlt>> partition = ListUtils.partition(collect, minSize);
for (List<DataPlt> dataPltList : partition) {
dataPltRelationMapper.insertBatchSomeColumn(dataPltList);
}
}
}

View File

@@ -0,0 +1,76 @@
package com.njcn.dataProcess.service.impl.relation;
import com.baomidou.mybatisplus.core.conditions.query.QueryWrapper;
import com.njcn.dataProcess.dto.DataVDTO;
import com.njcn.dataProcess.dto.DataVFiveItemDTO;
import com.njcn.dataProcess.dao.relation.mapper.DataVRelationMapper;
import com.njcn.dataProcess.param.LineCountEvaluateParam;
import com.njcn.dataProcess.po.relation.DataV;
import com.njcn.dataProcess.service.IDataV;
import com.njcn.dataProcess.util.BeanFeildUtils;
import lombok.RequiredArgsConstructor;
import org.apache.commons.collections4.ListUtils;
import org.springframework.beans.BeanUtils;
import org.springframework.stereotype.Service;
import javax.annotation.Resource;
import java.time.LocalDateTime;
import java.util.List;
import java.util.Map;
import java.util.stream.Collectors;
/**
* @author hongawen
* @version 1.0
* @data 2024/11/7 11:02
*/
@Service("RelationDataVImpl")
@RequiredArgsConstructor
public class RelationDataVImpl implements IDataV {
@Resource
private DataVRelationMapper dataVRelationMapper;
@Override
public Map<String, List<DataVFiveItemDTO>> getLineCountEvaluate(LineCountEvaluateParam lineParam) {
return null;
}
@Override
public void batchInsertion(List<DataVDTO> dataVDTOList) {
int totalCount = dataVDTOList.size();
if(totalCount<=0){
return;
}
int minSize = Math.min(120, totalCount);
List<DataV> collect = dataVDTOList.stream().map(temp -> {
DataV dataV = new DataV();
BeanUtils.copyProperties(temp, dataV, BeanFeildUtils.getNullPropertyNames(temp));
return dataV;
}).collect(Collectors.toList());
collect = collect.stream().collect(Collectors.toMap(
temp -> temp.getTimeid() + temp.getLineid() + temp.getPhasicType(),
temp -> temp,
(exist, replace) -> exist
)).values().stream().collect(Collectors.toList());
List<List<DataV>> partition = ListUtils.partition(collect, minSize);
for (List<DataV> dataVList : partition) {
dataVRelationMapper.insertBatchSomeColumn(dataVList);
}
}
@Override
public List<LocalDateTime> monitoringTime(String lineId, String localData) {
QueryWrapper<DataV> queryWrapper = new QueryWrapper<>();
queryWrapper.lambda().select(DataV::getTimeid).eq(DataV::getLineid,lineId).between(DataV::getTimeid,localData+" 00:00:00",localData+" 23:59:59").eq(DataV::getPhasicType,"A").orderByAsc(DataV::getTimeid);
List<DataV> dataVS = dataVRelationMapper.selectList(queryWrapper);
List<LocalDateTime> result = dataVS.stream().map(DataV::getTimeid).distinct().collect(Collectors.toList());
return result;
}
}

View File

@@ -0,0 +1,39 @@
package com.njcn.dataProcess.service.impl.relation;
import com.baomidou.mybatisplus.core.conditions.query.QueryWrapper;
import com.njcn.dataProcess.dto.RmpEventDetailDTO;
import com.njcn.dataProcess.dao.relation.mapper.RmpEventDetailMapper;
import com.njcn.dataProcess.po.relation.RmpEventDetail;
import com.njcn.dataProcess.service.IRmpEventDetail;
import com.njcn.dataProcess.util.BeanFeildUtils;
import lombok.RequiredArgsConstructor;
import org.springframework.beans.BeanUtils;
import org.springframework.stereotype.Service;
/**
* Description:
* Date: 2024/11/28 9:07【需求编号】
*
* @author clam
* @version V1.0.0
*/
@Service("RelationRmpEventDetailImpl")
@RequiredArgsConstructor
public class RelationRmpEventDetailImpl implements IRmpEventDetail {
private final RmpEventDetailMapper rmpEventDetailMapper;
@Override
public void batchInsertion(RmpEventDetailDTO rmpEventDetailDTO) {
RmpEventDetail rmpEventDetail = new RmpEventDetail();
BeanUtils.copyProperties(rmpEventDetailDTO,rmpEventDetail, BeanFeildUtils.getNullPropertyNames(rmpEventDetailDTO));
rmpEventDetailMapper.delete(new QueryWrapper<RmpEventDetail>()
.lambda()
.eq(RmpEventDetail::getMeasurementPointId,rmpEventDetail.getMeasurementPointId())
.eq(RmpEventDetail::getStartTime,rmpEventDetail.getStartTime())
);
rmpEventDetailMapper.insert(rmpEventDetail);
}
}

View File

@@ -0,0 +1,72 @@
#当前服务的基本信息
microservice:
ename: @artifactId@
name: '@name@'
version: @version@
sentinel:
url: @sentinel.url@
gateway:
url: @gateway.url@
server:
port: 10405
#feign接口开启服务熔断降级处理
feign:
sentinel:
enabled: true
spring:
application:
name: @artifactId@
#nacos注册中心以及配置中心的指定
cloud:
nacos:
discovery:
ip: @service.server.url@
server-addr: @nacos.url@
namespace: @nacos.namespace@
config:
server-addr: @nacos.url@
namespace: @nacos.namespace@
file-extension: yaml
shared-configs:
- data-id: share-config.yaml
refresh: true
- data-Id: data-platform.yaml
refresh: true
main:
allow-bean-definition-overriding: true
servlet:
multipart:
max-file-size: 100MB
max-request-size: 100MB
jackson:
time-zone: GMT+8
#项目日志的配置
logging:
config: http://@nacos.url@/nacos/v1/cs/configs?tenant=@nacos.namespace@&group=DEFAULT_GROUP&dataId=logback.xml
level:
root: info
mqtt:
client-id: @artifactId@${random.value}
data:
source:
query: Relation
insert: Relation
#mybatis配置信息
mybatis-plus:
configuration:
#配置sql日志输出
log-impl: org.apache.ibatis.logging.nologging.NoLoggingImpl
# type-aliases-package: com.njcn.harmonic.pojo
# type-handlers-package: com.njcn.db.handler
# global-config:
# db-config:
# date-format: yyyy-MM-dd HH:mm:ss
# global-config:
# enable-sql-runner: true

View File

@@ -0,0 +1,38 @@
package com.njcn.dataProcess;
import junit.framework.Test;
import junit.framework.TestCase;
import junit.framework.TestSuite;
/**
* Unit test for simple App.
*/
public class AppTest
extends TestCase
{
/**
* Create the test case
*
* @param testName name of the test case
*/
public AppTest( String testName )
{
super( testName );
}
/**
* @return the suite of tests being tested
*/
public static Test suite()
{
return new TestSuite( AppTest.class );
}
/**
* Rigourous Test :-)
*/
public void testApp()
{
assertTrue( true );
}
}