完成责任量化功能

This commit is contained in:
2023-07-26 11:20:12 +08:00
parent ae00d7671d
commit c3f8592160
104 changed files with 4709 additions and 1670 deletions

View File

@@ -39,7 +39,20 @@
<version>3.0.9</version>
<scope>compile</scope>
</dependency>
<dependency>
<groupId>com.njcn</groupId>
<artifactId>common-poi</artifactId>
<version>${project.version}</version>
</dependency>
<dependency>
<groupId>com.njcn</groupId>
<artifactId>common-oss</artifactId>
<version>${project.version}</version>
</dependency>
<dependency>
<groupId>com.njcn</groupId>
<artifactId>harmonic-api</artifactId>
<version>${project.version}</version>
</dependency>
</dependencies>
</project>

View File

@@ -0,0 +1,52 @@
package com.njcn.advance.enums;
import lombok.Getter;
/**
* @author hongawen
* @version 1.0.0
* @date 2021年04月13日 10:50
*/
@Getter
public enum AdvanceResponseEnum {
ANALYSIS_USER_DATA_ERROR("A0101","解析用采数据内容失败"),
INTERNAL_ERROR("A0101","系统内部异常"),
USER_DATA_EMPTY("A0101","用采数据内容为空"),
USER_DATA_NOT_FOUND("A0101","未找到用采数据"),
RESP_DATA_NOT_FOUND("A0101","未找到责任划分数据"),
WIN_TIME_ERROR("A0101","限值时间小于窗口"),
CALCULATE_INTERVAL_ERROR("A0101","对齐计算间隔值非法"),
RESP_RESULT_DATA_NOT_FOUND("A0101","未找到责任划分缓存数据"),
USER_DATA_P_NODE_PARAMETER_ERROR("A0101","无用采用户或所有用户的完整性均不满足条件"),
RESPONSIBILITY_PARAMETER_ERROR("A0101","调用接口程序计算失败,参数非法")
;
private final String code;
private final String message;
AdvanceResponseEnum(String code, String message) {
this.code = code;
this.message = message;
}
public static String getCodeByMsg(String msg){
for (AdvanceResponseEnum userCodeEnum : AdvanceResponseEnum.values()) {
if (userCodeEnum.message.equalsIgnoreCase(msg)) {
return userCodeEnum.code;
}
}
return "";
}
}

View File

@@ -0,0 +1,48 @@
package com.njcn.advance.pojo.bo.responsibility;
import lombok.AllArgsConstructor;
import lombok.Data;
import lombok.NoArgsConstructor;
import java.io.Serializable;
import java.util.Arrays;
import java.util.List;
/**
* 当根据动态责任数据获取用户责任量化结果时,将需要的一些参数进行缓存
* 比如 harmNum,pNode,HKData,FKData,HarmData,监测点的测量间隔,win窗口最小公倍数
* 以及FKData每个时间点的p对应的用户List<name>
*
* @author hongawen
* @Date: 2019/4/29 16:06
*/
@Data
@NoArgsConstructor
@AllArgsConstructor
public class CacheQvvrData implements Serializable {
private int pNode;
private int harmNum;
private float[] harmData;
private PDataStruct[] FKdata;
private HKDataStruct[] HKdata;
private List<String> names;
private int lineInterval;
private int win;
//最小公倍数
private int minMultiple;
//横轴时间
private List<Long> times;
}

View File

@@ -0,0 +1,37 @@
package com.njcn.advance.pojo.bo.responsibility;
import lombok.Data;
import java.io.Serializable;
import java.util.*;
/**
* 处理用采原始数据得到的一个结果
*
* @author hongawen
* @Date: 2019/4/26 15:57
*/
@Data
public class DealDataResult implements Serializable {
/***
* String 户号@监测点号@户名
* String yyyy-MM-dd
* Date 数据的详细日期
* UserDataExcel 数据详细信息
* 先以测量局号分组,再以该测量局号下每个日期分组
*/
private Map<String, Map<String, Map<Date, UserDataExcel>>> totalData = new HashMap<>();
private List<String> dates = new ArrayList<>();
/***
* String 户号@监测点号@户名
* String yyyy-MM-dd
* UserDataExcel 数据详细信息
* 先以测量局号分组,再以该测量局号下每个日期分组
*/
private Map<String, Map<String, List<UserDataExcel>>> totalListData = new HashMap<>();
}

View File

@@ -0,0 +1,31 @@
package com.njcn.advance.pojo.bo.responsibility;
import com.njcn.advance.pojo.po.responsibility.RespUserDataIntegrity;
import lombok.Data;
import java.io.Serializable;
import java.util.ArrayList;
import java.util.List;
/**
* 针对天处理用采数据的结果实体
* @author hongawen
* @Date: 2019/4/19 14:38
*/
@Data
public class DealUserDataResult implements Serializable {
//处理好的数据
private List<UserDataExcel> completed = new ArrayList<>();
//因当日完整性不足90没有处理直接返回
private List<UserDataExcel> lack = new ArrayList<>();
//完整性不足时,用户信息描述
private String detail;
//完整性不足的具体信息
private RespUserDataIntegrity respUserDataIntegrity;
}

View File

@@ -0,0 +1,37 @@
package com.njcn.advance.pojo.bo.responsibility;
import com.sun.jna.Structure;
import lombok.Data;
import lombok.NoArgsConstructor;
import java.io.Serializable;
import java.util.List;
@Data
@NoArgsConstructor
public class HKDataStruct extends Structure implements Serializable {
public float hk[] = new float[QvvrStruct.MAX_P_NODE + 1];
@Override
protected List getFieldOrder() {
return null;
}
public HKDataStruct(double[] hk) {
for (int i = 0; i < hk.length; i++) {
this.hk[i] = (float) hk[i];
}
}
public static class ByReference extends HKDataStruct implements Structure.ByReference {
public ByReference(double[] p) {
super(p);
}
}
public static class ByValue extends HKDataStruct implements Structure.ByValue {
public ByValue(double[] p) {
super(p);
}
}
}

View File

@@ -0,0 +1,41 @@
package com.njcn.advance.pojo.bo.responsibility;
import com.sun.jna.Structure;
import lombok.Data;
import lombok.NoArgsConstructor;
import java.io.Serializable;
import java.util.Arrays;
import java.util.List;
@Data
@NoArgsConstructor
public class PDataStruct extends Structure implements Serializable {
public float p[] = new float[QvvrStruct.MAX_P_NODE];
@Override
protected List getFieldOrder() {
return null;
}
public PDataStruct(double[] p) {
for (int i = 0; i < p.length; i++) {
this.p[i] = (float) p[i];
}
}
public static class ByReference extends PDataStruct implements Structure.ByReference {
public ByReference(double[] p) {
super(p);
}
}
public static class ByValue extends PDataStruct implements Structure.ByValue {
public ByValue(double[] p) {
super(p);
}
}
}

View File

@@ -0,0 +1,200 @@
package com.njcn.advance.pojo.bo.responsibility;
import com.sun.jna.Structure;
import java.io.Serializable;
import java.util.Arrays;
import java.util.List;
public class QvvrStruct extends Structure implements Serializable {
public static final int MAX_P_NODE= 200; //功率节点个数限制按200个限制
public static final int MAX_P_NUM= 96 * 100; //功率数据按15分钟间隔100天处理
public static final int MAX_HARM_NUM= 1440 * 100; //谐波数据按一分钟间隔100天处理
public static final int MAX_WIN_LEN=96 * 10; //按15分钟算10天
public static final int MIN_WIN_LEN = 4; //按15分钟算1小时
//输入参数
public int cal_flag; //计算标志0默认用电压和功率数据计算相关系数和责任1用代入的动态相关系数计算责任
public int harm_num; //谐波数据个数
public int p_num; //功率数据个数
public int p_node; //功率负荷节点数
public int win; //数据窗大小
public int res_num; //代入的责任数据个数
public float harm_mk; //谐波电压门槛
public float harm_data[]; //谐波数据序列
public PDataStruct p_data[]; //功率数据序列
public PDataStruct sim_data[]; //动态相关系数数据序列,可作为输入或者输出
public PDataStruct FKdata[]; //不包含背景动态谐波责任数据序列,可作为输入或者输出
public HKDataStruct HKdata[]; //包含背景动态谐波责任数据序列,可作为输入或者输出
public float Core[]; //典则相关系数
public float BjCore[]; //包含背景典则相关系数
//输出结果
public int cal_ok; //是否计算正确标志置位0表示未计算置位1表示计算完成
public float sumFKdata[];//不包含背景谐波责任
public float sumHKdata[];//包含背景谐波责任
public QvvrStruct() {
cal_flag = 0;
harm_data = new float[MAX_HARM_NUM];
p_data = new PDataStruct[MAX_P_NUM];
sim_data = new PDataStruct[MAX_P_NUM];
FKdata = new PDataStruct[MAX_P_NUM];
HKdata = new HKDataStruct[MAX_P_NUM];
Core = new float[MAX_P_NUM];
BjCore = new float[MAX_P_NUM];
sumFKdata = new float[MAX_P_NODE];
sumHKdata = new float[MAX_P_NODE + 1];
}
public static class ByReference extends QvvrStruct implements Structure.ByReference {
}
public static class ByValue extends QvvrStruct implements Structure.ByValue {
}
public PDataStruct[] getFKdata() {
return FKdata;
}
public void setFKdata(PDataStruct[] FKdata) {
this.FKdata = FKdata;
}
public HKDataStruct[] getHKdata() {
return HKdata;
}
public void setHKdata(HKDataStruct[] HKdata) {
this.HKdata = HKdata;
}
public float[] getSumFKdata() {
return sumFKdata;
}
public void setSumFKdata(float[] sumFKdata) {
this.sumFKdata = sumFKdata;
}
public float[] getSumHKdata() {
return sumHKdata;
}
public void setSumHKdata(float[] sumHKdata) {
this.sumHKdata = sumHKdata;
}
public int getCal_flag() {
return cal_flag;
}
public void setCal_flag(int cal_flag) {
this.cal_flag = cal_flag;
}
public int getHarm_num() {
return harm_num;
}
public void setHarm_num(int harm_num) {
this.harm_num = harm_num;
}
public float getHarm_mk() {
return harm_mk;
}
public void setHarm_mk(float harm_mk) {
this.harm_mk = harm_mk;
}
public float[] getHarm_data() {
return harm_data;
}
public void setHarm_data(float[] harm_data) {
this.harm_data = harm_data;
}
public float[] getCore() {
return Core;
}
public void setCore(float[] core) {
Core = core;
}
public float[] getBjCore() {
return BjCore;
}
public void setBjCore(float[] bjCore) {
BjCore = bjCore;
}
public int getCal_ok() {
return cal_ok;
}
public void setCal_ok(int cal_ok) {
this.cal_ok = cal_ok;
}
public int getP_num() {
return p_num;
}
public void setP_num(int p_num) {
this.p_num = p_num;
}
public int getP_node() {
return p_node;
}
public void setP_node(int p_node) {
this.p_node = p_node;
}
public int getWin() {
return win;
}
public void setWin(int win) {
this.win = win;
}
public int getRes_num() {
return res_num;
}
public void setRes_num(int res_num) {
this.res_num = res_num;
}
public PDataStruct[] getP_data() {
return p_data;
}
public void setP_data(PDataStruct[] p_data) {
this.p_data = p_data;
}
public PDataStruct[] getSim_data() {
return sim_data;
}
public void setSim_data(PDataStruct[] sim_data) {
this.sim_data = sim_data;
}
@Override
protected List getFieldOrder() {
return Arrays.asList(new String[]{"sumFKdata", "sumHKdata"});
}
}

View File

@@ -0,0 +1,47 @@
package com.njcn.advance.pojo.bo.responsibility;
import cn.afterturn.easypoi.excel.annotation.Excel;
import cn.hutool.core.date.DatePattern;
import cn.hutool.core.date.DateUtil;
import lombok.Data;
import java.io.Serializable;
import java.math.BigDecimal;
import java.util.Date;
/**
* 提取用采数据或者将用采数据写进excel的实体类
*
* @author hongawen
* @date 2019/4/11 10:43
*/
@Data
public class UserDataExcel implements Serializable, Comparable<UserDataExcel> {
@Excel(name = "时间")
private String time;
@Excel(name = "瞬时功率")
private BigDecimal work;
@Excel(name = "户号")
private String userId;
@Excel(name = "测量点局号")
private String line;
@Excel(name = "户名")
private String userName;
@Override
public int compareTo(UserDataExcel o) {
if (DateUtil.parse(this.time, DatePattern.NORM_DATETIME_PATTERN).getTime() > DateUtil.parse(o.getTime(), DatePattern.NORM_DATETIME_PATTERN).getTime()) {
return 1;
} else if (DateUtil.parse(this.time, DatePattern.NORM_DATETIME_PATTERN).getTime() == DateUtil.parse(o.getTime(), DatePattern.NORM_DATETIME_PATTERN).getTime()) {
return 0;
}
return -1;
}
}

View File

@@ -0,0 +1,27 @@
package com.njcn.advance.pojo.dto.responsibility;
import lombok.Data;
import java.io.Serializable;
import java.util.ArrayList;
import java.util.List;
/**
* @author hongawen
* @Date: 2019/4/3 13:34
*/
@Data
public class CustomerData implements Serializable {
/***
* 用户名称
*/
private String customerName;
/***
* 每时刻的数据
*/
private List<Float> valueDatas=new ArrayList<>();
}

View File

@@ -0,0 +1,28 @@
package com.njcn.advance.pojo.dto.responsibility;
import lombok.AllArgsConstructor;
import lombok.Data;
import lombok.NoArgsConstructor;
import java.io.Serializable;
/**
* @author hongawen
* @Date: 2019/4/3 13:35
*/
@Data
@NoArgsConstructor
@AllArgsConstructor
public class CustomerResponsibility implements Serializable {
/***
* 用户名
*/
private String customerName;
/***
* 责任值
*/
private float responsibilityData;
}

View File

@@ -0,0 +1,29 @@
package com.njcn.advance.pojo.dto.responsibility;
import com.njcn.advance.pojo.po.responsibility.RespData;
import lombok.Data;
import java.io.Serializable;
/**
* @author hongawen
* @version 1.0.0
* @date 2023年07月24日 17:49
*/
@Data
public class RespDataDTO extends RespData implements Serializable {
private String userDataName;
private String gdName;
private String subName;
private String devName;
private String ip;
private String lineName;
}

View File

@@ -0,0 +1,53 @@
package com.njcn.advance.pojo.dto.responsibility;
import lombok.Data;
import java.io.Serializable;
import java.util.ArrayList;
import java.util.List;
/**
* 谐波责任量化最终结果,包括动态数据和责任量化结果
*
* @author hongawen
* @Date: 2019/4/3 15:00
*/
@Data
public class ResponsibilityResult implements Serializable {
/***
* 限值
*/
private String limitValue;
/***
* 指定起始时间
*/
private String limitSTime;
/***
* 指定结束时间
*/
private String limitETime;
/***
* 责任划分结果存库数据
*/
private String responsibilityDataIndex;
/***
* 每个用户的详细时刻的责任数据
*/
private List<CustomerData> datas;
/***
* 时间轴
*/
private List<Long> timeDatas=new ArrayList<>();
/***
* 用户责任的表格数据
*/
private List<CustomerResponsibility> responsibilities;
}

View File

@@ -0,0 +1,50 @@
package com.njcn.advance.pojo.param;
import com.njcn.common.pojo.constant.PatternRegex;
import io.swagger.annotations.ApiModelProperty;
import lombok.Data;
import javax.validation.constraints.Max;
import javax.validation.constraints.Min;
import javax.validation.constraints.NotBlank;
import javax.validation.constraints.Pattern;
import java.io.Serializable;
/**
* @author hongawen
* @version 1.0.0
* @date 2023年07月21日 10:20
*/
@Data
public class ResponsibilityCalculateParam implements Serializable {
@ApiModelProperty("开始时间")
@NotBlank(message = "参数不能为空")
@Pattern(regexp = PatternRegex.TIME_FORMAT, message = "时间格式错误")
private String searchBeginTime;
@ApiModelProperty("结束时间")
@NotBlank(message = "参数不能为空")
@Pattern(regexp = PatternRegex.TIME_FORMAT, message = "时间格式错误")
private String searchEndTime;
@NotBlank(message = "参数不能为空")
@ApiModelProperty("监测点索引")
private String lineId;
@NotBlank(message = "参数不能为空")
@ApiModelProperty("用采数据索引")
private String userDataId;
@Min(0)
@Max(1)
@ApiModelProperty("0-电流 1-电压")
private int type;
@Min(2)
@Max(50)
@ApiModelProperty("谐波次数")
private Integer time;
}

View File

@@ -0,0 +1,46 @@
package com.njcn.advance.pojo.param;
import com.njcn.common.pojo.constant.PatternRegex;
import io.swagger.annotations.ApiModelProperty;
import lombok.Data;
import javax.validation.constraints.Max;
import javax.validation.constraints.Min;
import javax.validation.constraints.NotBlank;
import javax.validation.constraints.Pattern;
import java.io.Serializable;
/**
* @author hongawen
* @version 1.0.0
* @date 2023年07月24日 15:47
*/
@Data
public class ResponsibilitySecondCalParam implements Serializable {
@NotBlank(message = "参数不能为空")
@ApiModelProperty("责任数据索引")
private String resDataId;
@Min(2)
@Max(50)
@ApiModelProperty("谐波次数")
private Integer time;
@Min(0)
@Max(1)
@ApiModelProperty("0-电流 1-电压")
private int type;
@ApiModelProperty("限值")
private float limitValue;
@ApiModelProperty("开始时间(yyyy-MM-dd HH:mm:ss)")
@NotBlank(message = "参数不能为空")
private String limitStartTime;
@ApiModelProperty("结束时间(yyyy-MM-dd HH:mm:ss)")
@NotBlank(message = "参数不能为空")
private String limitEndTime;
}

View File

@@ -0,0 +1,19 @@
package com.njcn.advance.pojo.param;
import com.njcn.web.pojo.param.BaseParam;
import lombok.Data;
import java.io.Serializable;
/**
* @author hongawen
* @version 1.0.0
* @date 2023年07月25日 14:14
*/
@Data
public class UserDataIntegrityParam extends BaseParam implements Serializable {
private String userDataId;
}

View File

@@ -0,0 +1,57 @@
package com.njcn.advance.pojo.po.responsibility;
import com.baomidou.mybatisplus.annotation.TableName;
import com.njcn.db.bo.BaseEntity;
import java.io.Serializable;
import java.time.LocalDateTime;
import lombok.Getter;
import lombok.Setter;
/**
*
* @author hongawen
* @since 2023-07-21
*/
@Getter
@Setter
@TableName("pqs_resp_data")
public class RespData extends BaseEntity {
private static final long serialVersionUID = 1L;
/**
* 责任量化数据结果
*/
private String id;
/**
* 监测点索引
*/
private String lineId;
/**
* 用采数据索引
*/
private String userDataId;
/**
* 谐波类型(谐波电压、谐波电流)
*/
private String dataType;
/**
* 谐波次数
*/
private String dataTimes;
/**
* 计算的时间窗口
*/
private String timeWindow;
/**
* 状态0 删除 1正常
*/
private Integer state;
}

View File

@@ -0,0 +1,82 @@
package com.njcn.advance.pojo.po.responsibility;
import com.baomidou.mybatisplus.annotation.TableName;
import com.njcn.db.bo.BaseEntity;
import java.io.Serializable;
import java.time.LocalDateTime;
import java.util.Date;
import lombok.Getter;
import lombok.Setter;
/**
* <p>
*
* </p>
*
* @author hongawen
* @since 2023-07-24
*/
@Getter
@Setter
@TableName("pqs_resp_data_result")
public class RespDataResult extends BaseEntity {
private static final long serialVersionUID = 1L;
/**
* 责任划分结果数据文件保存记录表
*/
private String id;
/**
* 责任划分结果表id
*/
private String resDataId;
/**
* 限值
*/
private Float limitValue;
/***
* 起始时间
*/
private Date startTime;
/***
* 结束时间
*/
private Date endTime;
/**
* 谐波次数
*/
private Integer time;
/**
* 用户责任数据地址
*/
private String userDetailData;
/**
* 用户责任时间数据地址
*/
private String timeData;
/**
* 前10用户的每刻对应的责任数据地址
*/
private String userResponsibility;
/**
* 调用高级算法后的数据结果地址,提供二次计算
*/
private String qvvrData;
/**
* 状态0 删除 1正常
*/
private Integer state;
}

View File

@@ -0,0 +1,60 @@
package com.njcn.advance.pojo.po.responsibility;
import com.baomidou.mybatisplus.annotation.TableName;
import com.njcn.db.bo.BaseEntity;
import java.io.Serializable;
import java.time.LocalDate;
import java.time.LocalDateTime;
import lombok.Getter;
import lombok.Setter;
/**
*
* @author hongawen
* @since 2023-07-13
*/
@Getter
@Setter
@TableName("pqs_resp_user_data")
public class RespUserData extends BaseEntity {
private static final long serialVersionUID = 1L;
/**
* 用采数据表id
*/
private String id;
/**
* 用采数据名称
*/
private String name;
/**
* 起始日期
*/
private LocalDate startTime;
/**
* 截止日期
*/
private LocalDate endTime;
/**
* 0 存在数据不完整的1 存在数据完整
*/
private Integer integrity = 1;
/**
* 用采数据存放地址
*/
private String dataPath;
/**
* 状态0 删除 1正常
*/
private Integer state;
}

View File

@@ -0,0 +1,64 @@
package com.njcn.advance.pojo.po.responsibility;
import com.baomidou.mybatisplus.annotation.TableName;
import com.njcn.db.bo.BaseEntity;
import java.io.Serializable;
import java.math.BigDecimal;
import java.time.LocalDate;
import java.time.LocalDateTime;
import lombok.Getter;
import lombok.Setter;
/**
*
* @author hongawen
* @since 2023-07-13
*/
@Getter
@Setter
@TableName("pqs_resp_user_data_integrity")
public class RespUserDataIntegrity extends BaseEntity {
private static final long serialVersionUID = 1L;
/**
* 用采数据完整不足表Id
*/
private String id;
/**
* 用采数据表id
*/
private String userDataId;
/**
* 用户名称
*/
private String userName;
/**
* 用户户号
*/
private String userNo;
/**
* 测量点局号
*/
private String lineNo;
/**
* 数据不完整的日期
*/
private LocalDate lackDate;
/**
* 完整率低于90%会记录)
*/
private BigDecimal integrity;
/**
* 状态0 删除 1正常
*/
private Integer state;
}

View File

@@ -0,0 +1,95 @@
package com.njcn.advance.controller.responsibility;
import com.baomidou.mybatisplus.extension.plugins.pagination.Page;
import com.njcn.advance.pojo.dto.responsibility.RespDataDTO;
import com.njcn.advance.pojo.dto.responsibility.ResponsibilityResult;
import com.njcn.advance.pojo.param.ResponsibilityCalculateParam;
import com.njcn.advance.pojo.param.ResponsibilitySecondCalParam;
import com.njcn.advance.service.responsibility.IRespDataResultService;
import com.njcn.advance.service.responsibility.IRespDataService;
import com.njcn.common.pojo.annotation.OperateInfo;
import com.njcn.common.pojo.enums.common.LogEnum;
import com.njcn.common.pojo.enums.response.CommonResponseEnum;
import com.njcn.common.pojo.response.HttpResult;
import com.njcn.common.utils.HttpResultUtil;
import com.njcn.web.controller.BaseController;
import com.njcn.web.pojo.param.BaseParam;
import io.swagger.annotations.Api;
import io.swagger.annotations.ApiImplicitParam;
import io.swagger.annotations.ApiOperation;
import lombok.RequiredArgsConstructor;
import org.springframework.validation.annotation.Validated;
import org.springframework.web.bind.annotation.*;
import java.util.List;
/**
* @author hongawen
* @version 1.0.0
* @date 2023年07月21日 10:06
*/
@RestController
@RequestMapping("responsibility")
@Api(tags = "谐波责任划分-谐波责任数据处理")
@RequiredArgsConstructor
public class ResponsibilityController extends BaseController {
private final IRespDataService respDataService;
private final IRespDataResultService respDataResultService;
@OperateInfo(info = LogEnum.BUSINESS_COMMON)
@PostMapping("/responsibilityList")
@ApiOperation("列表分页")
@ApiImplicitParam(name = "queryParam", value = "查询参数", required = true)
public HttpResult<Page<RespDataDTO>> responsibilityList(@RequestBody @Validated BaseParam queryParam) {
String methodDescribe = getMethodDescribe("responsibilityList");
Page<RespDataDTO> list = respDataService.responsibilityList(queryParam);
return HttpResultUtil.assembleCommonResponseResult(CommonResponseEnum.SUCCESS, list, methodDescribe);
}
@OperateInfo(info = LogEnum.BUSINESS_COMMON)
@PostMapping("/deleteByIds")
@ApiOperation("删除责任划分数据")
@ApiImplicitParam(name = "ids", value = "待删除的责任id集合", required = true)
public HttpResult<Page<RespDataDTO>> deleteByIds(@RequestBody List<String> ids) {
String methodDescribe = getMethodDescribe("deleteByIds");
respDataService.deleteByIds(ids);
return HttpResultUtil.assembleCommonResponseResult(CommonResponseEnum.SUCCESS, null, methodDescribe);
}
@PostMapping("getDynamicData")
@OperateInfo(info = LogEnum.BUSINESS_COMMON)
@ApiOperation("动态谐波责任划分")
@ApiImplicitParam(name = "responsibilityCalculateParam", value = "谐波责任动态划分参数", required = true)
public HttpResult<ResponsibilityResult> getDynamicData(@RequestBody @Validated ResponsibilityCalculateParam responsibilityCalculateParam) {
String methodDescribe = getMethodDescribe("getDynamicData");
ResponsibilityResult datas = respDataService.getDynamicData(responsibilityCalculateParam);
return HttpResultUtil.assembleCommonResponseResult(CommonResponseEnum.SUCCESS, datas, methodDescribe);
}
@PostMapping("getResponsibilityData")
@OperateInfo(info = LogEnum.BUSINESS_COMMON)
@ApiOperation("二次计算责任划分")
@ApiImplicitParam(name = "responsibilitySecondCalParam", value = "二次计算责任划分参数", required = true)
public HttpResult<ResponsibilityResult> getResponsibilityData(@RequestBody @Validated ResponsibilitySecondCalParam responsibilitySecondCalParam) {
String methodDescribe = getMethodDescribe("getResponsibilityData");
ResponsibilityResult datas = respDataService.getResponsibilityData(responsibilitySecondCalParam);
return HttpResultUtil.assembleCommonResponseResult(CommonResponseEnum.SUCCESS, datas, methodDescribe);
}
@GetMapping("displayHistoryData")
@OperateInfo(info = LogEnum.BUSINESS_COMMON)
@ApiOperation("回显历史责任划分结果")
@ApiImplicitParam(name = "id", value = "责任数据id", required = true)
public HttpResult<List<ResponsibilityResult>> displayHistoryData(String id,Integer time) {
String methodDescribe = getMethodDescribe("displayHistoryData");
List<ResponsibilityResult> datas = respDataResultService.displayHistoryData(id,time);
return HttpResultUtil.assembleCommonResponseResult(CommonResponseEnum.SUCCESS, datas, methodDescribe);
}
}

View File

@@ -0,0 +1,112 @@
package com.njcn.advance.controller.responsibility;
import com.baomidou.mybatisplus.extension.plugins.pagination.Page;
import com.njcn.advance.pojo.dto.responsibility.RespDataDTO;
import com.njcn.advance.pojo.param.UserDataIntegrityParam;
import com.njcn.advance.pojo.po.responsibility.RespUserData;
import com.njcn.advance.pojo.po.responsibility.RespUserDataIntegrity;
import com.njcn.advance.service.responsibility.IRespUserDataIntegrityService;
import com.njcn.advance.service.responsibility.IRespUserDataService;
import com.njcn.common.pojo.annotation.OperateInfo;
import com.njcn.common.pojo.dto.SelectOption;
import com.njcn.common.pojo.enums.common.LogEnum;
import com.njcn.common.pojo.enums.response.CommonResponseEnum;
import com.njcn.common.pojo.exception.BusinessException;
import com.njcn.common.pojo.response.HttpResult;
import com.njcn.common.utils.HttpResultUtil;
import com.njcn.web.controller.BaseController;
import com.njcn.web.pojo.param.BaseParam;
import io.swagger.annotations.Api;
import io.swagger.annotations.ApiImplicitParam;
import io.swagger.annotations.ApiOperation;
import io.swagger.annotations.ApiParam;
import lombok.RequiredArgsConstructor;
import org.springframework.validation.annotation.Validated;
import org.springframework.web.bind.annotation.*;
import org.springframework.web.multipart.MultipartFile;
import javax.servlet.http.HttpServletResponse;
import java.util.List;
/**
* @author hongawen
* @version 1.0.0
* @date 2023年07月13日 14:11
*/
@RestController
@RequestMapping("responsibility")
@Api(tags = "谐波责任划分-用采数据处理")
@RequiredArgsConstructor
public class UserDataController extends BaseController {
private final IRespUserDataService respUserDataService;
private final IRespUserDataIntegrityService respUserDataIntegrityService;
@OperateInfo(info = LogEnum.BUSINESS_COMMON)
@PostMapping("/userDataList")
@ApiOperation("列表分页")
@ApiImplicitParam(name = "queryParam", value = "查询参数", required = true)
public HttpResult<Page<RespUserData>> userDataList(@RequestBody @Validated BaseParam queryParam) {
String methodDescribe = getMethodDescribe("userDataList");
Page<RespUserData> list = respUserDataService.userDataList(queryParam);
return HttpResultUtil.assembleCommonResponseResult(CommonResponseEnum.SUCCESS, list, methodDescribe);
}
@OperateInfo(info = LogEnum.BUSINESS_COMMON)
@GetMapping("/userDataIntegrityList")
@ApiOperation("用采完整性不足列表")
@ApiImplicitParam(name = "userDataIntegrityParam", value = "查询参数", required = true)
public HttpResult<Page<RespUserDataIntegrity>> userDataIntegrityList(@RequestBody @Validated UserDataIntegrityParam userDataIntegrityParam) {
String methodDescribe = getMethodDescribe("userDataIntegrityList");
Page<RespUserDataIntegrity> list = respUserDataIntegrityService.userDataIntegrityList(userDataIntegrityParam);
return HttpResultUtil.assembleCommonResponseResult(CommonResponseEnum.SUCCESS, list, methodDescribe);
}
@OperateInfo(info = LogEnum.BUSINESS_COMMON)
@PostMapping("/deleteUserDataByIds")
@ApiOperation("删除用采数据")
@ApiImplicitParam(name = "ids", value = "待删除用采数据id集合", required = true)
public HttpResult<Page<RespDataDTO>> deleteUserDataByIds(@RequestBody List<String> ids) {
String methodDescribe = getMethodDescribe("deleteUserDataByIds");
respUserDataService.deleteUserDataByIds(ids);
return HttpResultUtil.assembleCommonResponseResult(CommonResponseEnum.SUCCESS, null, methodDescribe);
}
@OperateInfo(info = LogEnum.BUSINESS_COMMON)
@GetMapping("/userDataSelect")
@ApiOperation("用采数据下拉")
public HttpResult<List<SelectOption>> userDataSelect() {
String methodDescribe = getMethodDescribe("userDataSelect");
List<SelectOption> listOption = respUserDataService.userDataSelect();
return HttpResultUtil.assembleCommonResponseResult(CommonResponseEnum.SUCCESS, listOption, methodDescribe);
}
/**
* 上传用采数据,并对用采数据进行数据分析并缓存
*
* @param file 上传的表格
*/
@PostMapping("uploadUserData")
@OperateInfo(info = LogEnum.BUSINESS_COMMON)
@ApiOperation("上传用采数据")
public HttpResult<Object> uploadUserData(@ApiParam(value = "文件", required = true) @RequestPart("file") MultipartFile file, HttpServletResponse response) {
String methodDescribe = getMethodDescribe("uploadUserData");
String fileName = file.getOriginalFilename();
long fileSize = file.getSize() / 1024;
//判断文件大小
if (fileSize > 3072) {
throw new BusinessException(CommonResponseEnum.FILE_SIZE_ERROR);
}
if (!fileName.matches("^.+\\.(?i)(xlsx)$") && !fileName.matches("^.+\\.(?i)(xls)$")) {
throw new BusinessException(CommonResponseEnum.FILE_XLSX_ERROR);
}
respUserDataService.uploadUserData(file, response);
return HttpResultUtil.assembleCommonResponseResult(CommonResponseEnum.SUCCESS, null, methodDescribe);
}
}

View File

@@ -0,0 +1,25 @@
package com.njcn.advance.mapper.responsibility;
import com.baomidou.mybatisplus.core.conditions.query.QueryWrapper;
import com.baomidou.mybatisplus.core.mapper.BaseMapper;
import com.baomidou.mybatisplus.extension.plugins.pagination.Page;
import com.njcn.advance.pojo.dto.responsibility.RespDataDTO;
import com.njcn.advance.pojo.po.responsibility.RespData;
import org.apache.ibatis.annotations.Param;
import java.util.List;
/**
* <p>
* Mapper 接口
* </p>
*
* @author hongawen
* @since 2023-07-21
*/
public interface RespDataMapper extends BaseMapper<RespData> {
Page<RespDataDTO> page(@Param("page") Page<Object> objectPage, @Param("ew")QueryWrapper<RespDataDTO> queryWrapper);
void deleteByIds(@Param("ids") List<String> ids);
}

View File

@@ -0,0 +1,16 @@
package com.njcn.advance.mapper.responsibility;
import com.baomidou.mybatisplus.core.mapper.BaseMapper;
import com.njcn.advance.pojo.po.responsibility.RespDataResult;
/**
* <p>
* Mapper 接口
* </p>
*
* @author hongawen
* @since 2023-07-24
*/
public interface RespDataResultMapper extends BaseMapper<RespDataResult> {
}

View File

@@ -0,0 +1,21 @@
package com.njcn.advance.mapper.responsibility;
import com.baomidou.mybatisplus.core.conditions.query.LambdaQueryWrapper;
import com.baomidou.mybatisplus.core.conditions.query.QueryWrapper;
import com.baomidou.mybatisplus.core.mapper.BaseMapper;
import com.baomidou.mybatisplus.extension.plugins.pagination.Page;
import com.njcn.advance.pojo.po.responsibility.RespUserDataIntegrity;
import org.apache.ibatis.annotations.Param;
/**
* <p>
* Mapper 接口
* </p>
*
* @author hongawen
* @since 2023-07-13
*/
public interface RespUserDataIntegrityMapper extends BaseMapper<RespUserDataIntegrity> {
Page<RespUserDataIntegrity> page(@Param("page") Page<Object> objectPage, @Param("ew") QueryWrapper<RespUserDataIntegrity> lambdaQueryWrapper);
}

View File

@@ -0,0 +1,24 @@
package com.njcn.advance.mapper.responsibility;
import com.baomidou.mybatisplus.core.conditions.query.QueryWrapper;
import com.baomidou.mybatisplus.core.mapper.BaseMapper;
import com.baomidou.mybatisplus.extension.plugins.pagination.Page;
import com.njcn.advance.pojo.po.responsibility.RespUserData;
import org.apache.ibatis.annotations.Param;
import java.util.List;
/**
* <p>
* Mapper 接口
* </p>
*
* @author hongawen
* @since 2023-07-13
*/
public interface RespUserDataMapper extends BaseMapper<RespUserData> {
Page<RespUserData> page(@Param("page")Page<Object> objectPage, @Param("ew")QueryWrapper<RespUserData> respUserDataQueryWrapper);
void deleteUserDataByIds(@Param("ids") List<String> ids);
}

View File

@@ -0,0 +1,26 @@
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE mapper PUBLIC "-//mybatis.org//DTD Mapper 3.0//EN" "http://mybatis.org/dtd/mybatis-3-mapper.dtd">
<mapper namespace="com.njcn.advance.mapper.responsibility.RespDataMapper">
<select id="page" resultType="RespDataDTO">
SELECT pqs_resp_data.*,
pqs_resp_user_data.name userDataName
FROM pqs_resp_data pqs_resp_data
,pqs_resp_user_data pqs_resp_user_data
WHERE pqs_resp_data.user_data_id = pqs_resp_user_data.id
AND ${ew.sqlSegment}
</select>
<update id="deleteByIds" >
update
pqs_resp_data
set state = 0
where
id
in
<foreach collection="ids" item="item" separator="," open="(" close=")">
#{item}
</foreach>
</update>
</mapper>

View File

@@ -0,0 +1,5 @@
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE mapper PUBLIC "-//mybatis.org//DTD Mapper 3.0//EN" "http://mybatis.org/dtd/mybatis-3-mapper.dtd">
<mapper namespace="com.njcn.advance.mapper.responsibility.RespDataResultMapper">
</mapper>

View File

@@ -0,0 +1,11 @@
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE mapper PUBLIC "-//mybatis.org//DTD Mapper 3.0//EN" "http://mybatis.org/dtd/mybatis-3-mapper.dtd">
<mapper namespace="com.njcn.advance.mapper.responsibility.RespUserDataIntegrityMapper">
<select id="page" resultType="RespUserDataIntegrity">
SELECT pqs_resp_user_data_integrity.*
FROM pqs_resp_user_data_integrity pqs_resp_user_data_integrity
WHERE ${ew.sqlSegment}
</select>
</mapper>

View File

@@ -0,0 +1,24 @@
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE mapper PUBLIC "-//mybatis.org//DTD Mapper 3.0//EN" "http://mybatis.org/dtd/mybatis-3-mapper.dtd">
<mapper namespace="com.njcn.advance.mapper.responsibility.RespUserDataMapper">
<select id="page" resultType="RespUserData">
SELECT pqs_resp_user_data.*
FROM pqs_resp_user_data pqs_resp_user_data
WHERE ${ew.sqlSegment}
</select>
<update id="deleteUserDataByIds" >
update
pqs_resp_user_data
set state = 0
where
id
in
<foreach collection="ids" item="item" separator="," open="(" close=")">
#{item}
</foreach>
</update>
</mapper>

View File

@@ -0,0 +1,20 @@
package com.njcn.advance.service.responsibility;
import com.baomidou.mybatisplus.extension.service.IService;
import com.njcn.advance.pojo.dto.responsibility.ResponsibilityResult;
import com.njcn.advance.pojo.po.responsibility.RespDataResult;
import java.util.List;
/**
* <p>
* 服务类
* </p>
*
* @author hongawen
* @since 2023-07-24
*/
public interface IRespDataResultService extends IService<RespDataResult> {
List<ResponsibilityResult> displayHistoryData(String id, Integer time);
}

View File

@@ -0,0 +1,32 @@
package com.njcn.advance.service.responsibility;
import com.baomidou.mybatisplus.extension.plugins.pagination.Page;
import com.baomidou.mybatisplus.extension.service.IService;
import com.njcn.advance.pojo.dto.responsibility.RespDataDTO;
import com.njcn.advance.pojo.dto.responsibility.ResponsibilityResult;
import com.njcn.advance.pojo.param.ResponsibilityCalculateParam;
import com.njcn.advance.pojo.param.ResponsibilitySecondCalParam;
import com.njcn.advance.pojo.po.responsibility.RespData;
import com.njcn.web.pojo.param.BaseParam;
import java.util.List;
/**
* <p>
* 服务类
* </p>
*
* @author hongawen
* @since 2023-07-21
*/
public interface IRespDataService extends IService<RespData> {
ResponsibilityResult getDynamicData(ResponsibilityCalculateParam responsibilityCalculateParam);
ResponsibilityResult getResponsibilityData(ResponsibilitySecondCalParam responsibilitySecondCalParam);
Page<RespDataDTO> responsibilityList(BaseParam queryParam);
void deleteByIds(List<String> ids);
}

View File

@@ -0,0 +1,19 @@
package com.njcn.advance.service.responsibility;
import com.baomidou.mybatisplus.extension.plugins.pagination.Page;
import com.baomidou.mybatisplus.extension.service.IService;
import com.njcn.advance.pojo.param.UserDataIntegrityParam;
import com.njcn.advance.pojo.po.responsibility.RespUserDataIntegrity;
/**
* <p>
* 服务类
* </p>
*
* @author hongawen
* @since 2023-07-13
*/
public interface IRespUserDataIntegrityService extends IService<RespUserDataIntegrity> {
Page<RespUserDataIntegrity> userDataIntegrityList(UserDataIntegrityParam userDataIntegrityParam);
}

View File

@@ -0,0 +1,36 @@
package com.njcn.advance.service.responsibility;
import com.baomidou.mybatisplus.extension.plugins.pagination.Page;
import com.baomidou.mybatisplus.extension.service.IService;
import com.njcn.advance.pojo.po.responsibility.RespUserData;
import com.njcn.common.pojo.dto.SelectOption;
import com.njcn.web.pojo.param.BaseParam;
import org.springframework.web.multipart.MultipartFile;
import javax.servlet.http.HttpServletResponse;
import java.util.List;
/**
* <p>
* 服务类
* </p>
*
* @author hongawen
* @since 2023-07-13
*/
public interface IRespUserDataService extends IService<RespUserData> {
/**
* 解析用采数据并保存
* @author hongawen
* @date 2023/7/13 19:48
* @param file 用采数据
*/
void uploadUserData(MultipartFile file, HttpServletResponse response);
Page<RespUserData> userDataList(BaseParam queryParam);
List<SelectOption> userDataSelect();
void deleteUserDataByIds(List<String> ids);
}

File diff suppressed because one or more lines are too long

View File

@@ -0,0 +1,21 @@
package com.njcn.advance.service.responsibility.impl;
import com.njcn.advance.pojo.bo.responsibility.QvvrStruct;
import com.sun.jna.Library;
import com.sun.jna.Native;
/**
* @author hongawen
* @version 1.0.0
* @date 2021年07月14日 16:17
*/
public interface JnaLibrary extends Library {
JnaLibrary INSTANCE = (JnaLibrary)
Native.loadLibrary(JnaLibrary.class.getResource("/harm_response.dll")
.getPath()
.substring(1),// substring(1)的原因是在Windows下获取到的路径前面会多一个斜杠但在Linux下不会,
JnaLibrary.class);
void harm_response(QvvrStruct outData);
}

View File

@@ -0,0 +1,90 @@
package com.njcn.advance.service.responsibility.impl;
import cn.hutool.core.collection.CollectionUtil;
import cn.hutool.core.date.DatePattern;
import cn.hutool.core.date.DateUtil;
import cn.hutool.core.io.IoUtil;
import cn.hutool.core.text.StrPool;
import cn.hutool.core.util.CharsetUtil;
import com.alibaba.fastjson.JSONArray;
import com.baomidou.mybatisplus.core.conditions.query.LambdaQueryWrapper;
import com.baomidou.mybatisplus.extension.service.impl.ServiceImpl;
import com.njcn.advance.mapper.responsibility.RespDataResultMapper;
import com.njcn.advance.pojo.bo.responsibility.UserDataExcel;
import com.njcn.advance.pojo.dto.responsibility.CustomerData;
import com.njcn.advance.pojo.dto.responsibility.CustomerResponsibility;
import com.njcn.advance.pojo.dto.responsibility.ResponsibilityResult;
import com.njcn.advance.pojo.po.responsibility.RespData;
import com.njcn.advance.pojo.po.responsibility.RespDataResult;
import com.njcn.advance.service.responsibility.IRespDataResultService;
import com.njcn.advance.service.responsibility.IRespDataService;
import com.njcn.oss.utils.FileStorageUtil;
import lombok.RequiredArgsConstructor;
import org.springframework.context.annotation.Lazy;
import org.springframework.stereotype.Service;
import javax.annotation.Resource;
import java.io.InputStream;
import java.util.ArrayList;
import java.util.List;
import java.util.Objects;
/**
* <p>
* 服务实现类
* </p>
*
* @author hongawen
* @since 2023-07-24
*/
@Service
public class RespDataResultServiceImpl extends ServiceImpl<RespDataResultMapper, RespDataResult> implements IRespDataResultService {
@Resource
private FileStorageUtil fileStorageUtil;
@Lazy
@Resource
private IRespDataService respDataService;
@Override
public List<ResponsibilityResult> displayHistoryData(String id, Integer time) {
List<ResponsibilityResult> responsibilityResults = new ArrayList<>();
if (Objects.isNull(time)) {
RespData respData = respDataService.getById(id);
String[] split = respData.getDataTimes().split(StrPool.COMMA);
time = Integer.parseInt(split[0]);
}
LambdaQueryWrapper<RespDataResult> respDataResultLambdaQueryWrapper = new LambdaQueryWrapper<>();
respDataResultLambdaQueryWrapper.eq(RespDataResult::getResDataId, id)
.eq(RespDataResult::getTime, time);
List<RespDataResult> respDataResults = this.baseMapper.selectList(respDataResultLambdaQueryWrapper);
if (CollectionUtil.isNotEmpty(respDataResults)) {
ResponsibilityResult responsibilityResult;
for (RespDataResult respDataResult : respDataResults) {
responsibilityResult = new ResponsibilityResult();
responsibilityResult.setLimitValue(String.valueOf(respDataResult.getLimitValue()));
responsibilityResult.setLimitSTime(DateUtil.format(respDataResult.getStartTime(), DatePattern.NORM_DATETIME_PATTERN));
responsibilityResult.setLimitETime(DateUtil.format(respDataResult.getEndTime(), DatePattern.NORM_DATETIME_PATTERN));
responsibilityResult.setResponsibilityDataIndex(respDataResult.getResDataId());
//处理时间轴数据
InputStream timeDataStream = fileStorageUtil.getFileStream(respDataResult.getTimeData());
String timeDataStr = IoUtil.read(timeDataStream, CharsetUtil.UTF_8);
List<Long> timeData = JSONArray.parseArray(timeDataStr, Long.class);
responsibilityResult.setTimeDatas(timeData);
//处理用户详细数据
InputStream userDetailStream = fileStorageUtil.getFileStream(respDataResult.getUserDetailData());
String userDetailStr = IoUtil.read(userDetailStream, CharsetUtil.UTF_8);
List<CustomerData> customerData = JSONArray.parseArray(userDetailStr, CustomerData.class);
responsibilityResult.setDatas(customerData);
//处理排名前10数据
InputStream respStream = fileStorageUtil.getFileStream(respDataResult.getUserResponsibility());
String respStr = IoUtil.read(respStream, CharsetUtil.UTF_8);
List<CustomerResponsibility> respData = JSONArray.parseArray(respStr, CustomerResponsibility.class);
responsibilityResult.setResponsibilities(respData);
responsibilityResults.add(responsibilityResult);
}
}
return responsibilityResults;
}
}

View File

@@ -0,0 +1,876 @@
package com.njcn.advance.service.responsibility.impl;
import cn.hutool.core.bean.BeanUtil;
import cn.hutool.core.collection.CollectionUtil;
import cn.hutool.core.date.DatePattern;
import cn.hutool.core.date.DateUtil;
import cn.hutool.core.io.IoUtil;
import cn.hutool.core.text.StrPool;
import cn.hutool.core.util.CharsetUtil;
import cn.hutool.core.util.ObjectUtil;
import cn.hutool.core.util.StrUtil;
import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONArray;
import com.alibaba.fastjson.JSONObject;
import com.baomidou.mybatisplus.core.conditions.query.LambdaQueryWrapper;
import com.baomidou.mybatisplus.core.conditions.query.QueryWrapper;
import com.baomidou.mybatisplus.extension.plugins.pagination.Page;
import com.baomidou.mybatisplus.extension.service.impl.ServiceImpl;
import com.njcn.advance.enums.AdvanceResponseEnum;
import com.njcn.advance.mapper.responsibility.RespDataMapper;
import com.njcn.advance.pojo.bo.responsibility.*;
import com.njcn.advance.pojo.dto.responsibility.CustomerData;
import com.njcn.advance.pojo.dto.responsibility.CustomerResponsibility;
import com.njcn.advance.pojo.dto.responsibility.RespDataDTO;
import com.njcn.advance.pojo.dto.responsibility.ResponsibilityResult;
import com.njcn.advance.pojo.param.ResponsibilityCalculateParam;
import com.njcn.advance.pojo.param.ResponsibilitySecondCalParam;
import com.njcn.advance.pojo.po.responsibility.RespData;
import com.njcn.advance.pojo.po.responsibility.RespDataResult;
import com.njcn.advance.pojo.po.responsibility.RespUserData;
import com.njcn.advance.service.responsibility.IRespDataResultService;
import com.njcn.advance.service.responsibility.IRespDataService;
import com.njcn.advance.service.responsibility.IRespUserDataService;
import com.njcn.advance.utils.JnaCallBalance;
import com.njcn.advance.utils.JnaCallDllOrSo;
import com.njcn.advance.utils.ResponsibilityCallDllOrSo;
import com.njcn.common.pojo.enums.common.DataStateEnum;
import com.njcn.common.pojo.exception.BusinessException;
import com.njcn.common.pojo.response.HttpResult;
import com.njcn.common.utils.FileUtil;
import com.njcn.common.utils.PubUtils;
import com.njcn.db.constant.DbConstant;
import com.njcn.device.biz.pojo.po.Overlimit;
import com.njcn.device.pq.api.LineFeignClient;
import com.njcn.device.pq.pojo.vo.LineDetailDataVO;
import com.njcn.device.pq.pojo.vo.LineDetailVO;
import com.njcn.harmonic.api.HarmDataFeignClient;
import com.njcn.harmonic.pojo.param.HistoryHarmParam;
import com.njcn.influx.pojo.dto.HarmData;
import com.njcn.influx.pojo.dto.HarmHistoryDataDTO;
import com.njcn.oss.constant.OssPath;
import com.njcn.oss.enums.OssResponseEnum;
import com.njcn.oss.utils.FileStorageUtil;
import com.njcn.system.pojo.vo.DictDataVO;
import com.njcn.web.factory.PageFactory;
import com.njcn.web.pojo.param.BaseParam;
import lombok.RequiredArgsConstructor;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
import org.springframework.util.CollectionUtils;
import java.io.*;
import java.math.BigDecimal;
import java.text.DecimalFormat;
import java.util.*;
import java.util.stream.Collectors;
import java.util.stream.Stream;
/**
* <p>
* 服务实现类
* </p>
*
* @author hongawen
* @since 2023-07-21
*/
@Service
@RequiredArgsConstructor
public class RespDataServiceImpl extends ServiceImpl<RespDataMapper, RespData> implements IRespDataService {
private final IRespUserDataService iRespUserDataService;
private final FileStorageUtil fileStorageUtil;
private final LineFeignClient lineFeignClient;
private final HarmDataFeignClient harmDataFeignClient;
private final GetQvvrData getQvvrData;
private final IRespDataResultService respDataResultService;
@Override
public Page<RespDataDTO> responsibilityList(BaseParam queryParam) {
QueryWrapper<RespDataDTO> queryWrapper = new QueryWrapper<>();
if (ObjectUtil.isNotNull(queryParam)) {
//查询参数不为空,进行条件填充
if (StrUtil.isNotBlank(queryParam.getSearchValue())) {
//仅提供用采名称
queryWrapper.and(param -> param.like("pqs_resp_user_data.name", queryParam.getSearchValue()));
}
//排序
if (ObjectUtil.isAllNotEmpty(queryParam.getSortBy(), queryParam.getOrderBy())) {
queryWrapper.orderBy(true, queryParam.getOrderBy().equals(DbConstant.ASC), StrUtil.toUnderlineCase(queryParam.getSortBy()));
} else {
//没有排序参数默认根据sort字段排序没有排序字段的根据updateTime更新时间排序
queryWrapper.orderBy(true, false, "pqs_resp_data.create_time");
}
queryWrapper.between("pqs_resp_data.create_time",queryParam.getSearchBeginTime(),queryParam.getSearchEndTime());
}
queryWrapper.eq("pqs_resp_data.state", DataStateEnum.ENABLE.getCode());
Page<RespDataDTO> page = this.baseMapper.page(new Page<>(PageFactory.getPageNum(queryParam), PageFactory.getPageSize(queryParam)), queryWrapper);
List<RespDataDTO> records = page.getRecords();
if(CollectionUtil.isNotEmpty(records)){
//获取该监测点的详细信息
for (RespDataDTO respDataDTO : records) {
LineDetailVO lineSubGdDetail = lineFeignClient.getLineSubGdDetail(respDataDTO.getLineId()).getData();
BeanUtil.copyProperties(lineSubGdDetail,respDataDTO);
}
}
return page.setRecords(records);
}
/***
* 批量逻辑删除责任划分数据
* @author hongawen
* @date 2023/7/24 19:16
*/
@Override
public void deleteByIds(List<String> ids) {
this.baseMapper.deleteByIds(ids);
}
@Override
public ResponsibilityResult getDynamicData(ResponsibilityCalculateParam responsibilityCalculateParam) {
ResponsibilityResult result = new ResponsibilityResult();
//调用c++依赖需要待初始化的参数
int pNode, pNum, win, harmNum;
float harmMk;
LambdaQueryWrapper<RespUserData> userDataLambdaQueryWrapper = new LambdaQueryWrapper<>();
userDataLambdaQueryWrapper.eq(RespUserData::getId, responsibilityCalculateParam.getUserDataId()).eq(RespUserData::getState, DataStateEnum.ENABLE.getCode());
RespUserData respUserData = iRespUserDataService.getOne(userDataLambdaQueryWrapper);
if (Objects.isNull(respUserData)) {
throw new BusinessException(AdvanceResponseEnum.USER_DATA_NOT_FOUND);
}
InputStream fileStream = fileStorageUtil.getFileStream(respUserData.getDataPath());
String excelDataStr = IoUtil.read(fileStream, CharsetUtil.UTF_8);
//将文件流转为list集合
List<UserDataExcel> userDataExcels = JSONArray.parseArray(excelDataStr, UserDataExcel.class);
if (CollectionUtils.isEmpty(userDataExcels)) {
throw new BusinessException(AdvanceResponseEnum.USER_DATA_NOT_FOUND);
}
//开始处理,根据接口参数需求,需要节点数(用户数,用户名+监测点号为一个用户),时间范围内功率数据
DealDataResult dealDataResult = RespUserDataServiceImpl.getStanderData(userDataExcels, 1);
Map<String/*户号@监测点号@户名*/, Map<String/*yyyy-MM-dd天日期*/, List<UserDataExcel>>> totalData = dealDataResult.getTotalListData();
Map<String/*户号@监测点号@户名*/, Map<String/*yyyy-MM-dd天日期*/, List<UserDataExcel>>> finalData = new HashMap<>();
/*第一个参数pNode 如果时间范围内完整性不足90%的节点,不参与责任量化统计,因为之前处理过用采数据此时只需要判断是否满足100%就可以判断*/
//根据时间天数,获取理论上多少次用采数据
List<String> dateStr = PubUtils.getTimes(DateUtil.parse(responsibilityCalculateParam.getSearchBeginTime(), DatePattern.NORM_DATE_PATTERN), DateUtil.parse(responsibilityCalculateParam.getSearchEndTime(), DatePattern.NORM_DATE_PATTERN));
int dueCounts = dateStr.size() * 96;
Set<String> userNames = totalData.keySet();
for (String userName : userNames) {
int realCounts = 0;
Map<String, List<UserDataExcel>> temp = totalData.get(userName);
for (String date : dateStr) {
if (CollectionUtil.isNotEmpty(temp.get(date))) {
realCounts = realCounts + temp.get(date).size();
}
}
if (realCounts == dueCounts) {
//只有期望和实际数量一致的时候才作为计算用户
finalData.put(userName, temp);
}
}
//至此finalData便是我们最终获得的用于计算责任数据,第一个参数节点数值pNode获取到
pNode = finalData.size();
if (pNode < 1) {
//没有合理的用采数据直接返回
throw new BusinessException(AdvanceResponseEnum.USER_DATA_P_NODE_PARAMETER_ERROR);
}
//第二个参数pNum根据起始时间和截止时间以及监测点测量间隔计算数量
LineDetailDataVO lineDetailData = lineFeignClient.getLineDetailData(responsibilityCalculateParam.getLineId()).getData();
int lineInterval = lineDetailData.getTimeInterval();
int userIntervalTime;
if (lineInterval == 1 || lineInterval == 3 || lineInterval == 5) {
userIntervalTime = 15;
pNum = dateStr.size() * 96;
} else {
userIntervalTime = 30;
pNum = dateStr.size() * 48;
finalData = dealFinalDataByLineInterval(finalData);
}
//第三个参数win,根据起始时间和截止时间的间隔
if (dateStr.size() > 1) {
win = 96;
} else {
win = 4;
}
//第四个参数harmMk默认为0f
harmMk = 0f;
//第五个参数harmNum与功率数据保持一致
harmNum = pNum;
//至此基础数据组装完毕,开始组装功率数据和谐波数据
//先做谐波数据理论上到这步的时候谐波数据是满足完整性并已经补充完整性到100%,此处需要将谐波数据与功率数据长度匹配上
HarmHistoryDataDTO data = harmDataFeignClient.getHistoryHarmData(new HistoryHarmParam(responsibilityCalculateParam.getSearchBeginTime(), responsibilityCalculateParam.getSearchEndTime(), responsibilityCalculateParam.getLineId(), responsibilityCalculateParam.getType(), responsibilityCalculateParam.getTime())).getData();
List<HarmData> historyData = data.getHistoryData();
historyData = getDataWithLineInterval(historyData, lineInterval);
//理论上此处的historyData的长度等于pNum,开始填充harm_data
float[] harmData = new float[144000];
//谐波波形的横轴时间集合
List<Long> harmTime = new ArrayList<>();
for (int i = 0; i < historyData.size(); i++) {
Float value = historyData.get(i).getValue();
if (value != null) {
value = value * 1000;
}
harmData[i] = value;
harmTime.add(PubUtils.instantToDate(historyData.get(i).getTime()).getTime());
}
//harmData填充完毕后开始组装功率数据
//首先获取当前时间内的各个用户的数据
Map<String/*用户名*/, List<UserDataExcel>> originalPData = new HashMap<>();
List<String> names = new ArrayList<>();
Set<String> userNamesFinal = finalData.keySet();
for (String userName : userNamesFinal) {
List<UserDataExcel> tempData = new ArrayList<>();
//根据日期将日期数据全部获取出来
Map<String, List<UserDataExcel>> tempResult = finalData.get(userName);
for (String date : dateStr) {
tempData.addAll(tempResult.get(date));
}
//按日期排序
Collections.sort(tempData);
originalPData.put(userName, tempData);
names.add(userName);
}
//然后开始组装数据
PDataStruct[] pData = new PDataStruct[QvvrStruct.MAX_P_NUM];
for (int i = 0; i < names.size(); i++) {
//当前某用户测量节点的所有数据
List<UserDataExcel> userDataExcelBodies1 = originalPData.get(names.get(i));
for (int k = 0; k < userDataExcelBodies1.size(); k++) {
PDataStruct pDataStruct = pData[k];
if (pDataStruct == null) {
pDataStruct = new PDataStruct();
}
float[] p = pDataStruct.getP();
p[i] = userDataExcelBodies1.get(k).getWork().floatValue();
pData[k] = pDataStruct;
}
}
//至此功率数据也组装完毕,调用友谊提供的接口
QvvrStruct qvvrStruct = new QvvrStruct();
qvvrStruct.cal_flag = 0;
qvvrStruct.p_node = pNode;
qvvrStruct.p_num = pNum;
qvvrStruct.win = win;
qvvrStruct.harm_num = harmNum;
qvvrStruct.harm_mk = harmMk;
qvvrStruct.p_data = pData;
qvvrStruct.harm_data = harmData;
ResponsibilityCallDllOrSo responsibilityCallDllOrSo = new ResponsibilityCallDllOrSo("harm_response.dll");
responsibilityCallDllOrSo.setPath();
ResponsibilityCallDllOrSo.ResponsibilityLibrary responsibilityLibrary = ResponsibilityCallDllOrSo.ResponsibilityLibrary.INSTANTCE;
responsibilityLibrary.harm_response(qvvrStruct);
//至此接口调用结束,开始组装动态责任数据和用户责任量化结果
//首先判断cal_ok的标识位是否为1为0表示程序没有计算出结果
if (qvvrStruct.cal_ok == 0) {
throw new BusinessException(AdvanceResponseEnum.RESPONSIBILITY_PARAMETER_ERROR);
}
//没问题后,先玩动态责任数据
CustomerData[] customerDatas = new CustomerData[qvvrStruct.p_node];
PDataStruct[] fKdata/*无背景的动态责任数据*/ = qvvrStruct.getFKdata();
//第一个时间节点是起始时间+win窗口得到的时间
Date sTime = DateUtil.parse(dateStr.get(0).concat(" 00:00:00"), DatePattern.NORM_DATETIME_PATTERN);
Calendar calendar = Calendar.getInstance();
calendar.setTime(sTime);
calendar.add(Calendar.MINUTE, (win - 1) * userIntervalTime);
List<Long> timeDatas = new ArrayList<>();
for (int i = 0; i < qvvrStruct.p_num - qvvrStruct.win; i++) {
calendar.add(Calendar.MINUTE, userIntervalTime);
//一个时间点所有的用户数据
PDataStruct fKdatum = fKdata[i];
for (int k = 0; k < qvvrStruct.p_node; k++) {
CustomerData customerData = customerDatas[k];
if (null == customerData) {
customerData = new CustomerData();
customerData.setCustomerName(names.get(k));
}
List<Float> valueDatas = customerData.getValueDatas();
Float valueTemp = fKdatum.getP()[k];
if (valueTemp.isNaN()) {
valueTemp = 0.0f;
}
valueDatas.add(valueTemp);
customerData.setValueDatas(valueDatas);
customerDatas[k] = customerData;
}
timeDatas.add(calendar.getTimeInMillis());
}
//OK拿到所有测量点的数据了现在就是看如何将相同户号的动态数据进行算术和求值之前的用户name为户号@测量点号@用户名
Map<String/*用户名(户号)*/, List<CustomerData>> customerDataTemp = new HashMap<>();
for (int i = 0; i < customerDatas.length; i++) {
String customerName = customerDatas[i].getCustomerName();
String[] customerInfo = customerName.split("@");
String name = customerInfo[2] + "(" + customerInfo[0] + ")";
List<CustomerData> customerData = customerDataTemp.get(name);
CustomerData temp = customerDatas[i];
temp.setCustomerName(name);
if (CollectionUtils.isEmpty(customerData)) {
customerData = new ArrayList<>();
}
customerData.add(temp);
customerDataTemp.put(name, customerData);
}
//动态数据组装完成后,开始组装责任数据
List<CustomerResponsibility> customerResponsibilities = getCustomerResponsibilityData(names, qvvrStruct.sumFKdata, qvvrStruct.p_node);
//根据前十的用户数据,获取这些用户的动态责任数据
List<CustomerData> customerData = new ArrayList<>();
for (CustomerResponsibility customerResponsibility : customerResponsibilities) {
String cusName = customerResponsibility.getCustomerName();
List<CustomerData> customerData1 = customerDataTemp.get(cusName);
if (CollectionUtils.isEmpty(customerData1)) {
continue;
}
if (customerData1.size() == 1) {
//表示用户唯一的
customerData.add(customerData1.get(0));
} else {
//表示用户可能包含多个监测点号,需要进行数据累加
CustomerData customerDataT = new CustomerData();
customerDataT.setCustomerName(cusName);
//进行数值累加
List<Float> valueDatas = new ArrayList<>();
for (int i = 0; i < customerData1.get(0).getValueDatas().size(); i++) {
float original = 0.0f;
for (int k = 0; k < customerData1.size(); k++) {
original = original + customerData1.get(k).getValueDatas().get(i);
}
valueDatas.add(original);
}
customerDataT.setValueDatas(valueDatas);
customerData.add(customerDataT);
}
}
result.setDatas(customerData);
result.setTimeDatas(timeDatas);
result.setResponsibilities(customerResponsibilities);
//此次的操作进行入库操作responsibilityData表数据
//根据监测点名称+谐波框选的时间来查询,是否做过责任量化
String timeWin = responsibilityCalculateParam.getSearchBeginTime().replaceAll(StrPool.DASHED, "").concat(StrPool.DASHED).concat(responsibilityCalculateParam.getSearchEndTime().replaceAll(StrPool.DASHED, ""));
String type = responsibilityCalculateParam.getType() == 0 ? "谐波电流" : "谐波电压";
//为了避免有监测点名称重复的,最终还是选择使用监测点索引来判断唯一性
LambdaQueryWrapper<RespData> respDataLambdaQueryWrapper = new LambdaQueryWrapper<>();
respDataLambdaQueryWrapper.eq(RespData::getLineId, responsibilityCalculateParam.getLineId())
.eq(RespData::getUserDataId, responsibilityCalculateParam.getUserDataId())
.eq(RespData::getTimeWindow, timeWin)
.eq(RespData::getDataType, type)
.eq(RespData::getState, DataStateEnum.ENABLE.getCode());
List<RespData> responsibilityDataTemp = this.baseMapper.selectList(respDataLambdaQueryWrapper);
RespData responsibilityData;
if (CollectionUtils.isEmpty(responsibilityDataTemp)) {
responsibilityData = new RespData();
//库中没有记录则可以新建数据进行插入
responsibilityData.setLineId(responsibilityCalculateParam.getLineId());
responsibilityData.setUserDataId(responsibilityCalculateParam.getUserDataId());
responsibilityData.setDataType(type);
responsibilityData.setDataTimes(responsibilityCalculateParam.getTime().toString());
responsibilityData.setTimeWindow(timeWin);
responsibilityData.setState(DataStateEnum.ENABLE.getCode());
//进行插入操作
this.baseMapper.insert(responsibilityData);
} else {
//库中存在记录只需要判断次数进行数据更新
responsibilityData = responsibilityDataTemp.get(0);
String times = responsibilityData.getDataTimes();
List<String> timesList = Stream.of(times.split(StrPool.COMMA)).collect(Collectors.toList());
Integer time = responsibilityCalculateParam.getTime();
if (!timesList.contains(time.toString())) {
timesList.add(time.toString());
timesList = timesList.stream().sorted().collect(Collectors.toList());
responsibilityData.setDataTimes(String.join(StrPool.COMMA, timesList));
}
//执行更新操作
this.baseMapper.updateById(responsibilityData);
}
//入库完毕之后,需要将必要数据进行序列化存储,方便后期的重复利用
/**
* 需要序列化三种数据结构 1 cal_flag置为1时需要的一些列参数的CacheQvvrData 2 cal_flag为0时的动态结果。3 用户责任量化结果
* 其中1/2都只需要一个文件即可
* 3因为用户限值的变化调整可能存在很多个文件具体根据用户的选择而定
*
* 路径的结构为temPath+userData+excelName+type+timeWin+lineIndex+time+文件名
* 用户责任量化结果,需要再细化到限值
*/
//首先判断有没有存储记录,没有则存储,有就略过 指定测点、时间窗口、谐波类型、谐波次数判断唯一性
LambdaQueryWrapper<RespDataResult> respDataResultLambdaQueryWrapper = new LambdaQueryWrapper<>();
respDataResultLambdaQueryWrapper.eq(RespDataResult::getResDataId, responsibilityData.getId())
.eq(RespDataResult::getTime, responsibilityCalculateParam.getTime())
.eq(RespDataResult::getStartTime, DateUtil.parse(responsibilityCalculateParam.getSearchBeginTime()+" 00:00:00",DatePattern.NORM_DATETIME_PATTERN))
.eq(RespDataResult::getEndTime, DateUtil.parse(responsibilityCalculateParam.getSearchEndTime()+" 23:59:59",DatePattern.NORM_DATETIME_PATTERN))
.eq(RespDataResult::getLimitValue, data.getOverLimit());
RespDataResult respDataResult = respDataResultService.getOne(respDataResultLambdaQueryWrapper);
if (Objects.isNull(respDataResult)) {
respDataResult = new RespDataResult();
respDataResult.setResDataId(responsibilityData.getId());
respDataResult.setTime(responsibilityCalculateParam.getTime());
respDataResult.setStartTime(DateUtil.parse(responsibilityCalculateParam.getSearchBeginTime()+" 00:00:00",DatePattern.NORM_DATETIME_PATTERN));
respDataResult.setEndTime(DateUtil.parse(responsibilityCalculateParam.getSearchEndTime()+" 23:59:59",DatePattern.NORM_DATETIME_PATTERN));
respDataResult.setLimitValue(data.getOverLimit());
//时间横轴数据 timeDatas
JSONArray timeDataJson = JSONArray.parseArray(JSON.toJSONString(timeDatas));
InputStream timeDataStream = IoUtil.toStream(timeDataJson.toString(), CharsetUtil.UTF_8);
String timeDataPath = fileStorageUtil.uploadStream(timeDataStream, OssPath.RESPONSIBILITY_USER_RESULT_DATA, FileUtil.generateFileName("json"));
respDataResult.setTimeData(timeDataPath);
//用户每时刻对应的责任数据
JSONArray customerDataJson = JSONArray.parseArray(JSON.toJSONString(customerData));
InputStream customerStream = IoUtil.toStream(customerDataJson.toString(), CharsetUtil.UTF_8);
String customerPath = fileStorageUtil.uploadStream(customerStream, OssPath.RESPONSIBILITY_USER_RESULT_DATA, FileUtil.generateFileName("json"));
respDataResult.setUserDetailData(customerPath);
//调用qvvr生成的中间数据
CacheQvvrData cacheQvvrData = new CacheQvvrData(qvvrStruct.getP_node(), qvvrStruct.getHarm_num(), qvvrStruct.getHarm_data(), qvvrStruct.FKdata, qvvrStruct.HKdata, names, userIntervalTime, qvvrStruct.win, userIntervalTime, harmTime);
JSONObject cacheQvvrDataDataJson = (JSONObject) JSONObject.toJSON(cacheQvvrData);
InputStream cacheQvvrDataStream = IoUtil.toStream(cacheQvvrDataDataJson.toString(), CharsetUtil.UTF_8);
String cacheQvvrDataPath = fileStorageUtil.uploadStream(cacheQvvrDataStream, OssPath.RESPONSIBILITY_USER_RESULT_DATA, FileUtil.generateFileName("json"));
respDataResult.setQvvrData(cacheQvvrDataPath);
//用户前10数据存储
JSONArray customerResJson = JSONArray.parseArray(JSON.toJSONString(customerResponsibilities));
InputStream customerResStream = IoUtil.toStream(customerResJson.toString(), CharsetUtil.UTF_8);
String customerResPath = fileStorageUtil.uploadStream(customerResStream, OssPath.RESPONSIBILITY_USER_RESULT_DATA, FileUtil.generateFileName("json"));
respDataResult.setUserResponsibility(customerResPath);
respDataResultService.save(respDataResult);
}
//防止过程中创建了大量的对象主动调用下GC处理
System.gc();
result.setResponsibilityDataIndex(responsibilityData.getId());
return result;
}
@Override
public ResponsibilityResult getResponsibilityData(ResponsibilitySecondCalParam responsibilitySecondCalParam) {
ResponsibilityResult result = new ResponsibilityResult();
//根据时间天数,获取理论上多少次用采数据
RespData responsibilityData = this.baseMapper.selectById(responsibilitySecondCalParam.getResDataId());
if (Objects.isNull(responsibilityData)) {
throw new BusinessException(AdvanceResponseEnum.RESP_DATA_NOT_FOUND);
}
Overlimit overlimit = lineFeignClient.getOverLimitData(responsibilityData.getLineId()).getData();
//获取总数据
LambdaQueryWrapper<RespDataResult> respDataResultLambdaQueryWrapper = new LambdaQueryWrapper<>();
respDataResultLambdaQueryWrapper.eq(RespDataResult::getResDataId, responsibilityData.getId())
.eq(RespDataResult::getTime, responsibilitySecondCalParam.getTime());
if (responsibilitySecondCalParam.getType() == 0) {
respDataResultLambdaQueryWrapper.eq(RespDataResult::getLimitValue, PubUtils.getValueByMethod(overlimit, "getIharm", responsibilitySecondCalParam.getTime()));
} else {
respDataResultLambdaQueryWrapper.eq(RespDataResult::getLimitValue, PubUtils.getValueByMethod(overlimit, "getUharm", responsibilitySecondCalParam.getTime()));
}
RespDataResult respDataResultTemp = respDataResultService.getOne(respDataResultLambdaQueryWrapper);
if (Objects.isNull(respDataResultTemp)) {
throw new BusinessException(AdvanceResponseEnum.RESP_DATA_NOT_FOUND);
}
CacheQvvrData cacheQvvrData;
try {
InputStream fileStream = fileStorageUtil.getFileStream(respDataResultTemp.getQvvrData());
String qvvrDataStr = IoUtil.read(fileStream, CharsetUtil.UTF_8);
cacheQvvrData = JSONObject.parseObject(qvvrDataStr, CacheQvvrData.class);
} catch (Exception exception) {
throw new BusinessException(AdvanceResponseEnum.RESP_RESULT_DATA_NOT_FOUND);
}
//获取成功后,延长该缓存的生命周期为初始生命时长
int win = cacheQvvrData.getWin();
//不管窗口为4或者96都需要考虑最小公倍数
//最小公倍数根据监测点测量间隔来获取,可以考虑也由第一步操作缓存起来
int minMultiple = cacheQvvrData.getMinMultiple();
//谐波横轴所有的时间
List<Long> times = cacheQvvrData.getTimes();
//首先根据窗口判断限值时间范围是否满足最小窗口
Long limitSL = DateUtil.parse(responsibilitySecondCalParam.getLimitStartTime(),DatePattern.NORM_DATETIME_PATTERN).getTime();
Long limitEL = DateUtil.parse(responsibilitySecondCalParam.getLimitEndTime(),DatePattern.NORM_DATETIME_PATTERN).getTime();
List<Integer> temp = getTimes(times, limitSL, limitEL);
//在动态责任数据中,时间的起始索引位置和截止索引位置
Integer timeStartIndex = temp.get(0);
Integer timeEndIndex = temp.get(1);
//间隔中的时间长度
int minus = timeEndIndex - timeStartIndex + 1;
//组装参数
QvvrStruct qvvrStruct = new QvvrStruct();
qvvrStruct.cal_flag = 1;
qvvrStruct.p_node = cacheQvvrData.getPNode();
qvvrStruct.harm_mk = responsibilitySecondCalParam.getLimitValue();
qvvrStruct.win = win;
int resNum;
PDataStruct[] FKdata = new PDataStruct[9600];
HKDataStruct[] HKdata = new HKDataStruct[9600];
float[] harmData = new float[1440 * 100];
PDataStruct[] fKdataOriginal = cacheQvvrData.getFKdata();
HKDataStruct[] hKdataOriginal = cacheQvvrData.getHKdata();
float[] harmDataOriginal = cacheQvvrData.getHarmData();
//如果起始索引与截止索引的差值等于时间轴的长度,则说明用户没有选择限值时间,直接带入全部的原始数据,参与计算即可
if (minus == times.size()) {
qvvrStruct.harm_num = cacheQvvrData.getHarmNum();
qvvrStruct.res_num = cacheQvvrData.getHarmNum() - cacheQvvrData.getWin();
qvvrStruct.setFKdata(cacheQvvrData.getFKdata());
qvvrStruct.setHKdata(cacheQvvrData.getHKdata());
qvvrStruct.harm_data = cacheQvvrData.getHarmData();
} else {
if (win == 4) {
//当窗口为4时,两个时间限制范围在最小公倍数为15时最起码有5个有效时间点在最小公倍数为30时最起码有3个有效时间点
if (minMultiple == 15) {
if (minus < 5) {
throw new BusinessException(AdvanceResponseEnum.WIN_TIME_ERROR);
}
resNum = minus - 4;
} else if (minMultiple == 30) {
if (minus < 3) {
throw new BusinessException(AdvanceResponseEnum.WIN_TIME_ERROR);
}
resNum = minus - 2;
} else {
throw new BusinessException(AdvanceResponseEnum.CALCULATE_INTERVAL_ERROR);
}
} else if (win == 96) {
//当窗口为96时,两个时间限值范围在最小公倍数为15时最起码有97个有效时间点在最小公倍数为30时最起码有49个有效时间点
if (minMultiple == 15) {
if (minus < 97) {
throw new BusinessException(AdvanceResponseEnum.WIN_TIME_ERROR);
}
resNum = minus - 96;
} else if (minMultiple == 30) {
if (minus < 49) {
throw new BusinessException(AdvanceResponseEnum.WIN_TIME_ERROR);
}
resNum = minus - 48;
} else {
throw new BusinessException(AdvanceResponseEnum.CALCULATE_INTERVAL_ERROR);
}
} else {
throw new BusinessException(AdvanceResponseEnum.CALCULATE_INTERVAL_ERROR);
}
qvvrStruct.res_num = resNum;
qvvrStruct.harm_num = minus;
//因为限值时间实际是含头含尾的所以harmNum需要索引差值+1
for (int i = timeStartIndex; i <= timeEndIndex; i++) {
harmData[i - timeStartIndex] = harmDataOriginal[i];
}
qvvrStruct.harm_data = harmData;
//FKData与HKData的值则等于resNum
for (int i = timeStartIndex; i < timeStartIndex + resNum; i++) {
FKdata[i - timeStartIndex] = fKdataOriginal[i];
HKdata[i - timeStartIndex] = hKdataOriginal[i];
}
qvvrStruct.setFKdata(FKdata);
qvvrStruct.setHKdata(HKdata);
}
qvvrStruct = getQvvrData.getResponsibilityResult(qvvrStruct);
if (qvvrStruct.cal_ok == 0) {
throw new BusinessException(AdvanceResponseEnum.RESPONSIBILITY_PARAMETER_ERROR);
}
//没问题后,先玩动态责任数据
List<String> names = cacheQvvrData.getNames();
CustomerData[] customerDatas = new CustomerData[qvvrStruct.p_node];
PDataStruct[] fKdata/*无背景的动态责任数据*/ = qvvrStruct.getFKdata();
//第一个时间节点是起始时间+win窗口得到的时间
Date sTime = new Date();
sTime.setTime(times.get(timeStartIndex));
Calendar calendar = Calendar.getInstance();
calendar.setTime(sTime);
calendar.add(Calendar.MINUTE, (win - 1) * minMultiple);
List<Long> timeDatas = new ArrayList<>();
for (int i = 0; i < qvvrStruct.harm_num - qvvrStruct.win; i++) {
calendar.add(Calendar.MINUTE, minMultiple);
//一个时间点所有的用户数据
PDataStruct fKdatum = fKdata[i];
for (int k = 0; k < qvvrStruct.p_node; k++) {
CustomerData customerData = customerDatas[k];
if (null == customerData) {
customerData = new CustomerData();
customerData.setCustomerName(names.get(k));
}
List<Float> valueDatas = customerData.getValueDatas();
Float valueTemp = fKdatum.getP()[k];
if (valueTemp.isNaN()) {
valueTemp = 0.0f;
}
valueDatas.add(valueTemp);
customerData.setValueDatas(valueDatas);
customerDatas[k] = customerData;
}
timeDatas.add(calendar.getTimeInMillis());
}
//OK拿到所有测量点的数据了现在就是看如何将相同户号的动态数据进行算术和求值之前的用户name为户号@测量点号@用户名
Map<String/*用户名(户号)*/, List<CustomerData>> customerDataTemp = new HashMap<>();
for (int i = 0; i < customerDatas.length; i++) {
String customerName = customerDatas[i].getCustomerName();
String[] customerInfo = customerName.split("@");
String name = customerInfo[2] + "(" + customerInfo[0] + ")";
List<CustomerData> customerData = customerDataTemp.get(name);
CustomerData customerTemp = customerDatas[i];
customerTemp.setCustomerName(name);
if (CollectionUtils.isEmpty(customerData)) {
customerData = new ArrayList<>();
}
customerData.add(customerTemp);
customerDataTemp.put(name, customerData);
}
//调用程序接口后,首先组装责任量化结果
float[] sumFKdata = qvvrStruct.sumFKdata;
List<CustomerResponsibility> customerResponsibilities = getCustomerResponsibilityData(names, sumFKdata, qvvrStruct.p_node);
//根据前十的用户数据,获取这些用户的动态责任数据
List<CustomerData> customerData = new ArrayList<>();
for (CustomerResponsibility customerResponsibility : customerResponsibilities) {
String cusName = customerResponsibility.getCustomerName();
List<CustomerData> customerData1 = customerDataTemp.get(cusName);
if (CollectionUtils.isEmpty(customerData1)) {
continue;
}
if (customerData1.size() == 1) {
//表示用户唯一的
customerData.add(customerData1.get(0));
} else {
//表示用户可能包含多个监测点号,需要进行数据累加
CustomerData customerDataT = new CustomerData();
customerDataT.setCustomerName(cusName);
//进行数值累加
List<Float> valueDatas = new ArrayList<>();
for (int i = 0; i < customerData1.get(0).getValueDatas().size(); i++) {
float original = 0.0f;
for (int k = 0; k < customerData1.size(); k++) {
original = original + customerData1.get(k).getValueDatas().get(i);
}
valueDatas.add(original);
}
customerDataT.setValueDatas(valueDatas);
customerData.add(customerDataT);
}
}
//接着组装动态数据结果
result.setResponsibilities(customerResponsibilities);
result.setDatas(customerData);
result.setTimeDatas(timeDatas);
//首先判断有没有存储记录,没有则存储,有就略过 指定测点、时间窗口、谐波类型、谐波次数判断唯一性
LambdaQueryWrapper<RespDataResult> respDataResultLambdaQueryWrapper1 = new LambdaQueryWrapper<>();
respDataResultLambdaQueryWrapper1.eq(RespDataResult::getResDataId, responsibilityData.getId())
.eq(RespDataResult::getTime, responsibilitySecondCalParam.getTime())
.eq(RespDataResult::getStartTime, DateUtil.parse(responsibilitySecondCalParam.getLimitStartTime(),DatePattern.NORM_DATETIME_PATTERN))
.eq(RespDataResult::getEndTime, DateUtil.parse(responsibilitySecondCalParam.getLimitEndTime(),DatePattern.NORM_DATETIME_PATTERN))
.eq(RespDataResult::getLimitValue, responsibilitySecondCalParam.getLimitValue());
RespDataResult respDataResult = respDataResultService.getOne(respDataResultLambdaQueryWrapper1);
if (Objects.isNull(respDataResult)) {
respDataResult = new RespDataResult();
respDataResult.setResDataId(responsibilityData.getId());
respDataResult.setTime(responsibilitySecondCalParam.getTime());
respDataResult.setStartTime(DateUtil.parse(responsibilitySecondCalParam.getLimitStartTime(),DatePattern.NORM_DATETIME_PATTERN));
respDataResult.setEndTime(DateUtil.parse(responsibilitySecondCalParam.getLimitEndTime(),DatePattern.NORM_DATETIME_PATTERN));
respDataResult.setLimitValue(responsibilitySecondCalParam.getLimitValue());
//时间横轴数据 timeDatas
JSONArray timeDataJson = JSONArray.parseArray(JSON.toJSONString(timeDatas));
InputStream timeDataStream = IoUtil.toStream(timeDataJson.toString(), CharsetUtil.UTF_8);
String timeDataPath = fileStorageUtil.uploadStream(timeDataStream, OssPath.RESPONSIBILITY_USER_RESULT_DATA, FileUtil.generateFileName("json"));
respDataResult.setTimeData(timeDataPath);
//用户每时刻对应的责任数据
JSONArray customerDataJson = JSONArray.parseArray(JSON.toJSONString(customerData));
InputStream customerStream = IoUtil.toStream(customerDataJson.toString(), CharsetUtil.UTF_8);
String customerPath = fileStorageUtil.uploadStream(customerStream, OssPath.RESPONSIBILITY_USER_RESULT_DATA, FileUtil.generateFileName("json"));
respDataResult.setUserDetailData(customerPath);
//用户前10数据存储
JSONArray customerResJson = JSONArray.parseArray(JSON.toJSONString(customerResponsibilities));
InputStream customerResStream = IoUtil.toStream(customerResJson.toString(), CharsetUtil.UTF_8);
String customerResPath = fileStorageUtil.uploadStream(customerResStream, OssPath.RESPONSIBILITY_USER_RESULT_DATA, FileUtil.generateFileName("json"));
respDataResult.setUserResponsibility(customerResPath);
respDataResultService.save(respDataResult);
}
//防止过程中创建了大量的对象主动调用下GC处理
System.gc();
result.setResponsibilityDataIndex(responsibilityData.getId());
return result;
}
/**
* 监测点测量间隔获取最后用于计算的功率数据
*
* @param finalData 参数计算的功率数据
*/
private Map<String, Map<String, List<UserDataExcel>>> dealFinalDataByLineInterval(Map<String, Map<String, List<UserDataExcel>>> finalData) {
DecimalFormat decimalFormat = new DecimalFormat("0.0000");
Map<String, Map<String, List<UserDataExcel>>> result;
//当监测点测量间隔为10分钟时功率数据需要调整为30分钟数据
result = new HashMap<>();
Set<String> userNames = finalData.keySet();
for (String userName : userNames) {
Map<String, List<UserDataExcel>> temp = new HashMap<>();
Map<String, List<UserDataExcel>> original = finalData.get(userName);
Set<String> dates = original.keySet();
for (String date : dates) {
List<UserDataExcel> single = original.get(date);//某当天的数据
//先根据事时间排序
Collections.sort(single);
//此时根据当天所有的数据,重新计算出所有时间点的数据,担心这个过程会消耗过长时间
List<UserDataExcel> tempDatas = new ArrayList<>();
for (int i = 0; i < 96; i = i + 2) {
//30分钟内的2个15分钟功率数据相加作平均计算30分钟内的功率数据最终的数据序列时间间隔30分钟。by 友谊文档
UserDataExcel tempData = new UserDataExcel();
tempData.setUserName(single.get(i).getUserName());
tempData.setUserId(single.get(i).getUserId());
tempData.setLine(single.get(i).getLine());
tempData.setTime(single.get(i).getTime());
//功率为 2个15分钟功率数据相加作平均
double work = single.get(i).getWork().doubleValue() + single.get(i + 1).getWork().doubleValue();
tempData.setWork(new BigDecimal(decimalFormat.format(work / 2.0)));
tempDatas.add(tempData);
}
temp.put(date, tempDatas);
}
result.put(userName, temp);
}
return result;
}
/**
* 通过监测点测量间隔计算对齐后的谐波数据
* 暂且认为最小公倍数就15、30两种可能
*
* @param historyData 原始的谐波数据
* @param lineInterval 测量间隔
*/
private List<HarmData> getDataWithLineInterval(List<HarmData> historyData, int lineInterval) {
List<HarmData> result = new ArrayList<>();
switch (lineInterval) {
case 1:
result = getHarmResultByTimes(historyData, 15);
break;
case 3:
result = getHarmResultByTimes(historyData, 5);
break;
case 5:
case 10:
result = getHarmResultByTimes(historyData, 3);
break;
}
return result.stream().sorted(Comparator.comparing(HarmData::getTime)).collect(Collectors.toList());
}
/**
* 通过监测点测量间隔计算对齐后的谐波数据
*
* @param historyData 原始的谐波数据
*/
private List<HarmData> getHarmResultByTimes(List<HarmData> historyData, int times) {
List<HarmData> result = new ArrayList<>();
DecimalFormat decimalFormat = new DecimalFormat("0.0000");
for (int i = 0; i < historyData.size(); i = i + times) {
float temp = 0.0f;
for (int j = 0; j < times; j++) {
int index = i + j;
temp = temp + historyData.get(index).getValue();
}
//求平均值
temp = Float.parseFloat(decimalFormat.format(temp / (float) times));
HarmData resTemp = new HarmData();
resTemp.setTime(historyData.get(i).getTime());
resTemp.setValue(temp);
result.add(resTemp);
}
return result;
}
/**
* 根据接口返回值组装需要显示的责任量化数据
*/
private List<CustomerResponsibility> getCustomerResponsibilityData(List<String> names, float[] sumFKdata, int pNode) {
Map<String/*用户名(户号)*/, CustomerResponsibility> customerResponsibilityMap = new HashMap<>();
for (int i = 0; i < pNode; i++) {
String[] customerInfo = names.get(i).split("@");/*用户ID 测量点ID 用户名*/
String name = customerInfo[2] + "(" + customerInfo[0] + ")";
CustomerResponsibility customerResponsibility;
if (customerResponsibilityMap.containsKey(name)) {
customerResponsibility = customerResponsibilityMap.get(name);
customerResponsibility.setResponsibilityData(customerResponsibility.getResponsibilityData() + sumFKdata[i]);
} else {
customerResponsibility = new CustomerResponsibility();
customerResponsibility.setCustomerName(name);
customerResponsibility.setResponsibilityData(sumFKdata[i]);
}
customerResponsibilityMap.put(name, customerResponsibility);
}
//map转为list
List<CustomerResponsibility> customerResponsibilities = new ArrayList<>();
Set<String> cusNames = customerResponsibilityMap.keySet();
for (String cusName : cusNames) {
customerResponsibilities.add(customerResponsibilityMap.get(cusName));
}
//取出前十的用户责任数据
customerResponsibilities = customerResponsibilities.stream().sorted(Comparator.comparing(CustomerResponsibility::getResponsibilityData).reversed()).collect(Collectors.toList());
if (customerResponsibilities.size() > 10) {
//当用户超出10将前十用户保留然后剩余归类为其他用户
float tenTotal = 0.0f;
for (int i = 0; i < 10; i++) {
float temp = PubUtils.floatRound(3, customerResponsibilities.get(i).getResponsibilityData());
tenTotal = tenTotal + temp;
}
int size = customerResponsibilities.size() - 10;
customerResponsibilities = customerResponsibilities.subList(0, 10);
CustomerResponsibility others = new CustomerResponsibility();
others.setCustomerName("其他用户(" + size + ")");
others.setResponsibilityData(PubUtils.floatRound(3, 100.0f - tenTotal));
customerResponsibilities.add(others);
}
return customerResponsibilities;
}
/**
* 根据起始时间获取在集合中最接近的起始和截止值
*
* @param times 时间集合
* @param limitSL 起始值
* @param limitEL 截止值
*/
private List<Integer> getTimes(List<Long> times, Long limitSL, Long limitEL) {
List<Integer> result = new ArrayList<>();
Integer temps = null;
Integer tempe = null;
//因为可以知道times是为4的倍数所以长度肯定是偶数不会出现索引越界的异常
if (limitSL < times.get(0)) {
temps = 0;
}
if (limitEL > times.get(times.size() - 1)) {
tempe = times.size() - 1;
}
for (int i = 0; i < times.size() - 1; i++) {
if (temps != null & tempe != null) {
//判断都已经赋值后,跳出循环
break;
}
//锁定前值
if (times.get(i).equals(limitSL)) {
//相等则给起始时间赋值
temps = i;
} else if (times.get(i + 1).equals(limitSL)) {
temps = i + 1;
} else if (times.get(i) < limitSL & times.get(i + 1) > limitSL) {
//当起始时间处于中间时将后值赋值给temps
temps = i + 1;
}
//锁定后值
if (times.get(i).equals(limitEL)) {
//相等则给起始时间赋值
tempe = i;
} else if (times.get(i + 1).equals(limitEL)) {
tempe = i + 1;
} else if (times.get(i) < limitEL & times.get(i + 1) > limitEL) {//
//当起始时间处于中间时将前值赋值给temps
tempe = i;
}
}
if (temps == null) {
temps = 0;
}
if (tempe == null) {
tempe = times.size() - 1;
}
result.add(temps);
result.add(tempe);
return result;
}
}

View File

@@ -0,0 +1,33 @@
package com.njcn.advance.service.responsibility.impl;
import com.baomidou.mybatisplus.core.conditions.query.LambdaQueryWrapper;
import com.baomidou.mybatisplus.core.conditions.query.QueryWrapper;
import com.baomidou.mybatisplus.extension.plugins.pagination.Page;
import com.baomidou.mybatisplus.extension.service.impl.ServiceImpl;
import com.njcn.advance.mapper.responsibility.RespUserDataIntegrityMapper;
import com.njcn.advance.pojo.param.UserDataIntegrityParam;
import com.njcn.advance.pojo.po.responsibility.RespUserDataIntegrity;
import com.njcn.advance.service.responsibility.IRespUserDataIntegrityService;
import com.njcn.web.factory.PageFactory;
import org.springframework.stereotype.Service;
/**
* <p>
* 服务实现类
* </p>
*
* @author hongawen
* @since 2023-07-13
*/
@Service
public class RespUserDataIntegrityServiceImpl extends ServiceImpl<RespUserDataIntegrityMapper, RespUserDataIntegrity> implements IRespUserDataIntegrityService {
@Override
public Page<RespUserDataIntegrity> userDataIntegrityList(UserDataIntegrityParam userDataIntegrityParam) {
QueryWrapper<RespUserDataIntegrity> lambdaQueryWrapper = new QueryWrapper<>();
lambdaQueryWrapper.eq("pqs_resp_user_data_integrity.user_data_id", userDataIntegrityParam.getUserDataId())
.orderByDesc("pqs_resp_user_data_integrity.create_time");
return this.baseMapper.page(new Page<>(PageFactory.getPageNum(userDataIntegrityParam), PageFactory.getPageSize(userDataIntegrityParam)), lambdaQueryWrapper);
}
}

View File

@@ -0,0 +1,465 @@
package com.njcn.advance.service.responsibility.impl;
import cn.afterturn.easypoi.excel.ExcelImportUtil;
import cn.afterturn.easypoi.excel.entity.ImportParams;
import cn.afterturn.easypoi.excel.entity.result.ExcelImportResult;
import cn.afterturn.easypoi.handler.inter.IReadHandler;
import cn.hutool.core.collection.CollectionUtil;
import cn.hutool.core.date.DatePattern;
import cn.hutool.core.date.DateUtil;
import cn.hutool.core.date.LocalDateTimeUtil;
import cn.hutool.core.io.IoUtil;
import cn.hutool.core.text.StrPool;
import cn.hutool.core.util.CharsetUtil;
import cn.hutool.core.util.ObjectUtil;
import cn.hutool.core.util.StrUtil;
import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONArray;
import com.baomidou.mybatisplus.core.conditions.query.LambdaQueryWrapper;
import com.baomidou.mybatisplus.core.conditions.query.QueryWrapper;
import com.baomidou.mybatisplus.extension.plugins.pagination.Page;
import com.baomidou.mybatisplus.extension.service.impl.ServiceImpl;
import com.njcn.advance.enums.AdvanceResponseEnum;
import com.njcn.advance.mapper.responsibility.RespUserDataMapper;
import com.njcn.advance.pojo.bo.responsibility.DealDataResult;
import com.njcn.advance.pojo.bo.responsibility.DealUserDataResult;
import com.njcn.advance.pojo.bo.responsibility.UserDataExcel;
import com.njcn.advance.pojo.po.responsibility.RespUserData;
import com.njcn.advance.pojo.po.responsibility.RespUserDataIntegrity;
import com.njcn.advance.service.responsibility.IRespUserDataIntegrityService;
import com.njcn.advance.service.responsibility.IRespUserDataService;
import com.njcn.common.pojo.dto.SelectOption;
import com.njcn.common.pojo.enums.common.DataStateEnum;
import com.njcn.common.pojo.exception.BusinessException;
import com.njcn.common.utils.FileUtil;
import com.njcn.common.utils.PubUtils;
import com.njcn.db.constant.DbConstant;
import com.njcn.oss.constant.OssPath;
import com.njcn.oss.utils.FileStorageUtil;
import com.njcn.poi.util.PoiUtil;
import com.njcn.web.factory.PageFactory;
import com.njcn.web.pojo.param.BaseParam;
import lombok.RequiredArgsConstructor;
import org.springframework.stereotype.Service;
import org.springframework.util.CollectionUtils;
import org.springframework.web.multipart.MultipartFile;
import javax.servlet.http.HttpServletResponse;
import java.io.InputStream;
import java.math.BigDecimal;
import java.text.SimpleDateFormat;
import java.time.LocalDate;
import java.util.*;
import java.util.stream.Collectors;
/**
* <p>
* 服务实现类
* </p>
*
* @author hongawen
* @since 2023-07-13
*/
@Service
@RequiredArgsConstructor
public class RespUserDataServiceImpl extends ServiceImpl<RespUserDataMapper, RespUserData> implements IRespUserDataService {
private final FileStorageUtil fileStorageUtil;
private final IRespUserDataIntegrityService respUserDataIntegrityService;
@Override
public void uploadUserData(MultipartFile file, HttpServletResponse response) {
ImportParams params = new ImportParams();
List<UserDataExcel> userDataExcels = new ArrayList<>();
try {
ExcelImportUtil.importExcelBySax(file.getInputStream(), UserDataExcel.class, params, new IReadHandler<UserDataExcel>() {
@Override
public void handler(UserDataExcel o) {
userDataExcels.add(o);
}
@Override
public void doAfterAll() {
}
});
//处理用户上传的用采数据内容
analysisUserData(userDataExcels, file.getOriginalFilename());
} catch (Exception e) {
throw new BusinessException(AdvanceResponseEnum.ANALYSIS_USER_DATA_ERROR);
}
}
@Override
public Page<RespUserData> userDataList(BaseParam queryParam) {
QueryWrapper<RespUserData> respUserDataQueryWrapper = new QueryWrapper<>();
if (ObjectUtil.isNotNull(queryParam)) {
//查询参数不为空,进行条件填充
if (StrUtil.isNotBlank(queryParam.getSearchValue())) {
//仅提供用采名称
respUserDataQueryWrapper.and(param -> param.like("pqs_resp_user_data.name", queryParam.getSearchValue()));
}
//排序
if (ObjectUtil.isAllNotEmpty(queryParam.getSortBy(), queryParam.getOrderBy())) {
respUserDataQueryWrapper.orderBy(true, queryParam.getOrderBy().equals(DbConstant.ASC), StrUtil.toUnderlineCase(queryParam.getSortBy()));
} else {
//没有排序参数默认根据sort字段排序没有排序字段的根据updateTime更新时间排序
respUserDataQueryWrapper.orderBy(true, false, "pqs_resp_user_data.update_time");
}
} else {
respUserDataQueryWrapper.orderBy(true, false, "pqs_resp_user_data.update_time");
}
respUserDataQueryWrapper.eq("pqs_resp_user_data.state", DataStateEnum.ENABLE.getCode());
return this.baseMapper.page(new Page<>(PageFactory.getPageNum(queryParam), PageFactory.getPageSize(queryParam)), respUserDataQueryWrapper);
}
@Override
public List<SelectOption> userDataSelect() {
List<SelectOption> selectOptions = new ArrayList<>();
LambdaQueryWrapper<RespUserData> respUserDataLambdaQueryWrapper = new LambdaQueryWrapper<>();
respUserDataLambdaQueryWrapper.eq(RespUserData::getState, DataStateEnum.ENABLE.getCode())
.orderByDesc(RespUserData::getUpdateTime);
List<RespUserData> respUserData = this.baseMapper.selectList(respUserDataLambdaQueryWrapper);
if (CollectionUtil.isNotEmpty(respUserData)) {
selectOptions = respUserData.stream().map(temp -> new SelectOption(temp.getName(), temp.getId())).collect(Collectors.toList());
}
return selectOptions;
}
@Override
public void deleteUserDataByIds(List<String> ids) {
this.baseMapper.deleteUserDataByIds(ids);
}
/**
* 根据流获取出用采有功功率数据
*/
private void analysisUserData(List<UserDataExcel> userDataExcelList, String fileName) {
List<UserDataExcel> exportExcelList = new ArrayList<>();
RespUserData respUserData;
//判断数据提取情况
if (CollectionUtils.isEmpty(userDataExcelList)) {
throw new BusinessException(AdvanceResponseEnum.USER_DATA_EMPTY);
}
DealDataResult dealDataResult = getStanderData(userDataExcelList, 0);
Map<String/*户号@监测点号@户名*/, Map<String/*yyyy-MM-dd天日期*/, Map<Date/*yyyy-MM-dd HH:mm:ss日期格式*/, UserDataExcel>>> totalData = dealDataResult.getTotalData();
//收集所有的日期,以便获取起始日期和截止日期
List<String> dates = dealDataResult.getDates();
//将前面获取出来的日期进行排序,提供入库
List<LocalDate> resultDates = getSortDate(dates);
LocalDate endTime = resultDates.get(resultDates.size() - 1);
LocalDate startTime = resultDates.get(0);
//针对每个用户的数据进行完整度的判断 todo 暂且认为所有用户的时间跨度是一样的比如都是15天或者都是30天不存在有的用户5天数据有的用户10天数据
Map<String, List<UserDataExcel>> tempResult = new HashMap<>();
List<RespUserDataIntegrity> respUserDataIntegrities = new ArrayList<>();
Set<String> userNames = totalData.keySet();
for (String name : userNames) {
Map<String, Map<Date, UserDataExcel>> userDataTemp = totalData.get(name);
//现在数据拿到了但是因为是hashkey所以日期顺序是乱的-->怎么变成有序的呢
Set<String> times = userDataTemp.keySet();
//循环日期处理数据
for (String time : times) {
DealUserDataResult dealtData = dealUserData(name, userDataTemp.get(time), time, 15);
List<UserDataExcel> UserDataExcelTemp = dealtData.getCompleted();
List<UserDataExcel> UserDataExcel;
if (CollectionUtils.isEmpty(UserDataExcelTemp) && Objects.nonNull(dealtData.getRespUserDataIntegrity())) {
//为空,说明补齐操作没有进行,选择填充缺失数据即可
respUserDataIntegrities.add(dealtData.getRespUserDataIntegrity());
UserDataExcel = dealtData.getLack();
} else {
//填充补齐完整性后的数据
UserDataExcel = UserDataExcelTemp;
}
List<UserDataExcel> userDatas = tempResult.get(name);
if (CollectionUtil.isNotEmpty(UserDataExcel)) {
if (CollectionUtils.isEmpty(userDatas)) {
userDatas = new ArrayList<>(UserDataExcel);
} else {
userDatas.addAll(UserDataExcel);
}
}
tempResult.put(name, userDatas);
}
}
//完成后,开始将数据按公司排序,然后输出到指定表格中,方便下次使用
for (String name : userNames) {
List<UserDataExcel> tempUserData = tempResult.get(name);
//按时间排序
Collections.sort(tempUserData);
exportExcelList.addAll(tempUserData);
}
//输出到报表中
String fileNameWithOutSuffix = fileName.substring(0, fileName.indexOf('.'));
fileNameWithOutSuffix = fileNameWithOutSuffix.concat(LocalDateTimeUtil.format(startTime, DatePattern.PURE_DATE_PATTERN)).concat(StrPool.DASHED).concat(LocalDateTimeUtil.format(endTime, DatePattern.PURE_DATE_PATTERN));
//处理完后的用采数据生成json文件流到oss服务器
JSONArray finalUserData = JSONArray.parseArray(JSON.toJSONString(exportExcelList));
InputStream reportStream = IoUtil.toStream(finalUserData.toString(), CharsetUtil.UTF_8);
String ossPath = fileStorageUtil.uploadStream(reportStream, OssPath.RESPONSIBILITY_USER_DATA, FileUtil.generateFileName("json"));
//入库前进行查询操作,存在则更新,不存在则插入
LambdaQueryWrapper<RespUserData> respUserDataLambdaQueryWrapper = new LambdaQueryWrapper<>();
respUserDataLambdaQueryWrapper.eq(RespUserData::getName, fileNameWithOutSuffix)
.eq(RespUserData::getStartTime, startTime)
.eq(RespUserData::getEndTime, endTime)
.eq(RespUserData::getState, DataStateEnum.ENABLE.getCode());
respUserData = this.baseMapper.selectOne(respUserDataLambdaQueryWrapper);
//不存在则插入
if (Objects.isNull(respUserData)) {
respUserData = new RespUserData();
respUserData.setEndTime(endTime);
respUserData.setStartTime(startTime);
respUserData.setName(fileNameWithOutSuffix);
respUserData.setDataPath(ossPath);
respUserData.setState(DataStateEnum.ENABLE.getCode());
this.baseMapper.insert(respUserData);
if (CollectionUtil.isNotEmpty(respUserDataIntegrities)) {
//关联插入数据 户号,监测点号,户名,时间,完整性
for (RespUserDataIntegrity respUserDataIntegrity : respUserDataIntegrities) {
respUserDataIntegrity.setUserDataId(respUserData.getId());
}
//插入操作
respUserDataIntegrityService.saveBatch(respUserDataIntegrities);
respUserData.setIntegrity(1);
} else {
respUserData.setIntegrity(0);
}
this.baseMapper.updateById(respUserData);
} else {
//存在则更新,需要删除之前的oss文件
fileStorageUtil.deleteFile(respUserData.getDataPath());
if (CollectionUtil.isNotEmpty(respUserDataIntegrities)) {
LambdaQueryWrapper<RespUserDataIntegrity> respUserDataIntegrityLambdaQueryWrapper = new LambdaQueryWrapper<>();
respUserDataIntegrityLambdaQueryWrapper.eq(RespUserDataIntegrity::getUserDataId, respUserData.getId());
respUserDataIntegrityService.remove(respUserDataIntegrityLambdaQueryWrapper);
for (RespUserDataIntegrity respUserDataIntegrity : respUserDataIntegrities) {
respUserDataIntegrity.setUserDataId(respUserData.getId());
}
//插入操作
respUserDataIntegrityService.saveBatch(respUserDataIntegrities);
respUserData.setIntegrity(1);
} else {
respUserData.setIntegrity(0);
}
respUserData.setDataPath(ossPath);
this.baseMapper.updateById(respUserData);
}
}
/**
* 解析用采数据为一个标准格式
*/
public static DealDataResult getStanderData(List<UserDataExcel> userDataExcelBodies, int flag) {
DealDataResult result = new DealDataResult();
//收集所有的日期,以便获取起始日期和截止日期
List<String> dates = new ArrayList<>();
Map<String, Map<String, Map<Date, UserDataExcel>>> totalData = new HashMap<>();
Map<String, Map<String, List<UserDataExcel>>> totalListData = new HashMap<>();
for (UserDataExcel UserDataExcel : userDataExcelBodies) {
//第一个key
String key = UserDataExcel.getUserId() + "@" + UserDataExcel.getLine() + "@" + UserDataExcel.getUserName();
String time = UserDataExcel.getTime().substring(0, 10);
if (!dates.contains(time)) {
dates.add(time);
}
if (!totalData.containsKey(key)) {
if (flag == 0) {
//Map形式避免后面补齐数据嵌套循环
Map<Date, UserDataExcel> userDatas = new HashMap<>();
userDatas.put(PubUtils.getSecondsAsZero(DateUtil.parse(UserDataExcel.getTime(), DatePattern.NORM_DATETIME_PATTERN)), UserDataExcel);
Map<String, Map<Date, UserDataExcel>> dataToUserDatas = new HashMap<>();
dataToUserDatas.put(time, userDatas);
totalData.put(key, dataToUserDatas);
} else if (flag == 1) {
//List形式,避免后面责任数据提取嵌套循环
List<UserDataExcel> userListDatas = new ArrayList<>();
userListDatas.add(UserDataExcel);
Map<String, List<UserDataExcel>> dataToUserListDatas = new HashMap<>();
dataToUserListDatas.put(time, userListDatas);
totalData.put(key, new HashMap<>());
totalListData.put(key, dataToUserListDatas);
}
} else {
if (flag == 0) {
//Map形式避免后面补齐数据嵌套循环
Map<String, Map<Date, UserDataExcel>> dataToUserDatas = totalData.get(key);
Map<Date, UserDataExcel> userDatas = dataToUserDatas.get(time);
//某日凌晨,还没存放该日的数据
if (CollectionUtils.isEmpty(userDatas)) {
userDatas = new HashMap<>();
userDatas.put(PubUtils.getSecondsAsZero(DateUtil.parse(UserDataExcel.getTime(), DatePattern.NORM_DATETIME_PATTERN)), UserDataExcel);
dataToUserDatas.put(time, userDatas);
} else {
//累加该日的数据
userDatas.put(PubUtils.getSecondsAsZero(DateUtil.parse(UserDataExcel.getTime(), DatePattern.NORM_DATETIME_PATTERN)), UserDataExcel);
dataToUserDatas.put(time, userDatas);
}
totalData.put(key, dataToUserDatas);
} else if (flag == 1) {
//List形式,避免后面责任数据提取嵌套循环
Map<String, List<UserDataExcel>> dataToUserListDatas = totalListData.get(key);
List<UserDataExcel> userListDatas = dataToUserListDatas.get(time);
if (CollectionUtils.isEmpty(userListDatas)) {
userListDatas = new ArrayList<>();
userListDatas.add(UserDataExcel);
dataToUserListDatas.put(time, userListDatas);
} else {
userListDatas.add(UserDataExcel);
dataToUserListDatas.put(time, userListDatas);
}
totalListData.put(key, dataToUserListDatas);
}
}
}
result.setDates(dates);
result.setTotalData(totalData);
result.setTotalListData(totalListData);
return result;
}
/**
* 将日期排序后返回
*/
private List<LocalDate> getSortDate(List<String> dates) {
List<LocalDate> result = new ArrayList<>();
for (String date : dates) {
LocalDate temp = LocalDateTimeUtil.parseDate(date, DatePattern.NORM_DATE_PATTERN);
result.add(temp);
}
if (!CollectionUtils.isEmpty(result)) {
Collections.sort(result);
}
return result;
}
/**
* 处理用户每日数据
*
* @param name 用户名
* @param beforeDeal 处理前的用户数据
*/
private DealUserDataResult dealUserData(String name, Map<Date, UserDataExcel> beforeDeal, String time, int step) {
DealUserDataResult result = new DealUserDataResult();
String[] userFlag = name.split("@");
//每天的最开是的数据是从00:00:00开始的所以起始时间为time + 00:00:00
List<UserDataExcel> completed = new ArrayList<>();
List<UserDataExcel> lack = new ArrayList<>();
if (CollectionUtils.isEmpty(beforeDeal)) {
return result;
} else {
String timeTemp = time + " 00:00:00";
Date date = DateUtil.parse(timeTemp, DatePattern.NORM_DATETIME_PATTERN);
int count = 24 * 60 / 15;
if ((float) beforeDeal.size() / (float) count < 0.9) {
Set<Date> dates = beforeDeal.keySet();
for (Date tempDate : dates) {
UserDataExcel UserDataExcel = beforeDeal.get(tempDate);
if (UserDataExcel.getWork() != null) {
lack.add(UserDataExcel);
}
}
RespUserDataIntegrity respUserDataIntegrity = new RespUserDataIntegrity();
respUserDataIntegrity.setIntegrity(BigDecimal.valueOf((double) lack.size() / 96.0));
respUserDataIntegrity.setLackDate(LocalDateTimeUtil.parseDate(time, DatePattern.NORM_DATE_PATTERN));
respUserDataIntegrity.setUserName(userFlag[2]);
respUserDataIntegrity.setLineNo(userFlag[1]);
respUserDataIntegrity.setUserNo(userFlag[0]);
result.setLack(lack);
result.setRespUserDataIntegrity(respUserDataIntegrity);
return result;
} else {
for (int i = 0; i < count; i++) {
Calendar calendar = Calendar.getInstance();
calendar.setTime(date);
calendar.add(Calendar.MINUTE, step * i);
UserDataExcel UserDataExcel = beforeDeal.get(calendar.getTime());
if (UserDataExcel != null && UserDataExcel.getWork() != null) {
completed.add(UserDataExcel);
} else {
//找到前一个时间点值
Float perValue = getPreValue(date, calendar.getTime(), beforeDeal);
//找到后一个时间点值
Float appendValue = getAppendValue(date, count, step, calendar.getTime(), beforeDeal);
UserDataExcel temp = new UserDataExcel();
SimpleDateFormat sdf = new SimpleDateFormat(DatePattern.NORM_DATETIME_PATTERN);
temp.setTime(sdf.format(calendar.getTime()));
temp.setUserId(userFlag[0]);
temp.setLine(userFlag[1]);
temp.setUserName(userFlag[2]);
//还需要判断前值和后值为空的情况
if (null == perValue && null == appendValue) {
temp.setWork(new BigDecimal("0.0"));
} else if (null == perValue) {
temp.setWork(new BigDecimal(appendValue));
} else if (null == appendValue) {
temp.setWork(new BigDecimal(perValue));
} else {
temp.setWork(BigDecimal.valueOf((perValue + appendValue) / 2));
}
completed.add(temp);
}
}
}
}
result.setCompleted(completed);
return result;
}
/**
* 递归找前值
*
* @param date 起始时间
* @param time 当前时间
* @param beforeDeal 处理前的数据
*/
private Float getPreValue(Date date, Date time, Map<Date, UserDataExcel> beforeDeal) {
Float result;
if (date.equals(time)) {
return null;
} else {
Calendar calendar = Calendar.getInstance();
calendar.setTime(time);
calendar.add(Calendar.MINUTE, -15);
UserDataExcel temp = beforeDeal.get(calendar.getTime());
if (temp == null || temp.getWork() == null) {
result = getPreValue(date, calendar.getTime(), beforeDeal);
} else {
result = temp.getWork().floatValue();
}
}
return result;
}
/**
* 递归找后置
*
* @param date 起始时间
* @param count 一天时间的总计数
* @param step 间隔分钟
* @param time 截止时间
*/
private Float getAppendValue(Date date, int count, int step, Date time, Map<Date, UserDataExcel> beforeDeal) {
Float result;
Calendar calendar = Calendar.getInstance();
calendar.setTime(date);
calendar.set(Calendar.MINUTE, (count - 1) * step);
if (time.equals(calendar.getTime())) {
return null;
} else {
Calendar calendar1 = Calendar.getInstance();
calendar1.setTime(time);
calendar1.add(Calendar.MINUTE, 15);
UserDataExcel temp = beforeDeal.get(calendar1.getTime());
if (temp == null || temp.getWork() == null) {
result = getAppendValue(date, count, step, calendar1.getTime(), beforeDeal);
} else {
result = temp.getWork().floatValue();
}
}
return result;
}
}

View File

@@ -0,0 +1,51 @@
package com.njcn.advance.utils;
import cn.hutool.core.util.StrUtil;
import com.njcn.advance.enums.AdvanceResponseEnum;
import com.njcn.common.pojo.enums.response.CommonResponseEnum;
import com.njcn.common.pojo.exception.BusinessException;
import com.njcn.common.pojo.response.HttpResult;
import com.njcn.common.utils.EnumUtils;
import com.njcn.device.biz.enums.DeviceResponseEnum;
import javax.validation.constraints.NotNull;
import java.util.Objects;
/**
* @author hongawen
* @version 1.0.0
* @date 2021年12月20日 10:03
*/
public class AdvanceEnumUtil {
/**
* 获取UserResponseEnum实例
*/
public static AdvanceResponseEnum getDeviceEnumResponseEnumByMessage(@NotNull Object value) {
AdvanceResponseEnum advanceResponseEnum;
try {
String message = value.toString();
if(message.indexOf(StrUtil.C_COMMA)>0){
value = message.substring(message.indexOf(StrUtil.C_COMMA)+1);
}
advanceResponseEnum = EnumUtils.valueOf(AdvanceResponseEnum.class, value, AdvanceResponseEnum.class.getMethod(BusinessException.GET_MESSAGE_METHOD));
return Objects.isNull(advanceResponseEnum) ? AdvanceResponseEnum.INTERNAL_ERROR : advanceResponseEnum;
} catch (NoSuchMethodException e) {
throw new BusinessException(CommonResponseEnum.INTERNAL_ERROR);
}
}
public static Enum<?> getExceptionEnum(HttpResult<Object> result){
//如果返回错误,且为内部错误,则直接抛出异常
CommonResponseEnum commonResponseEnum = EnumUtils.getCommonResponseEnumByCode(result.getCode());
if (commonResponseEnum == CommonResponseEnum.ADVANCE_RESPONSE_ENUM) {
return getDeviceEnumResponseEnumByMessage(result.getMessage());
}
return commonResponseEnum;
}
}

View File

@@ -0,0 +1,31 @@
package com.njcn.advance.utils;
import com.njcn.advance.pojo.bo.responsibility.QvvrStruct;
import com.sun.jna.Library;
import com.sun.jna.Native;
/**
* @author hongawen
* @version 1.0.0
* @date 2023年07月24日 14:12
*/
public class ResponsibilityCallDllOrSo extends JnaCallDllOrSo {
public static String strpath;
public ResponsibilityCallDllOrSo(String name) {
super(name);
}
@Override
public void setPath() {
ResponsibilityCallDllOrSo.strpath = super.getStrpath();
}
public interface ResponsibilityLibrary extends Library {
// 加载Lib库
ResponsibilityCallDllOrSo.ResponsibilityLibrary INSTANTCE = (ResponsibilityCallDllOrSo.ResponsibilityLibrary) Native.loadLibrary(ResponsibilityCallDllOrSo.strpath, ResponsibilityCallDllOrSo.ResponsibilityLibrary.class);
// 定义方法--->与C方法相对应
void harm_response(QvvrStruct outData);
}
}

View File

@@ -1,7 +1,9 @@
package com.njcn.auth.utils;
import cn.hutool.core.util.RandomUtil;
import okhttp3.*;
import java.io.IOException;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;

View File

@@ -0,0 +1,23 @@
package com.njcn.common.pojo.dto;
import lombok.AllArgsConstructor;
import lombok.Data;
import lombok.NoArgsConstructor;
import java.io.Serializable;
/**
* @author hongawen
* @version 1.0.0
* @date 2023年07月25日 09:40
*/
@Data
@NoArgsConstructor
@AllArgsConstructor
public class SelectOption implements Serializable {
private String name;
private String value;
}

View File

@@ -82,6 +82,10 @@ public enum CommonResponseEnum {
FILE_EXIST("A0096", "文件已存在"),
FILE_SIZE_ERROR("A0096", "文件过大"),
FILE_XLSX_ERROR("A0096", "请上传excel文件"),
DEPT_EXIST("A0097", "部门id已存在"),
DEPT_NOT_EXIST("A0098", "部门id不存在"),
@@ -95,6 +99,8 @@ public enum CommonResponseEnum {
MATH_ERROR("A0103","比例总和大于100%"),
CS_DEVICE_RESPONSE_ENUM("A0104", "治理终端响应枚举类型"),
ADVANCE_RESPONSE_ENUM("A00105", "终端响应枚举类型"),
;

View File

@@ -15,15 +15,18 @@ import com.njcn.common.pojo.exception.BusinessException;
import java.io.IOException;
import java.lang.reflect.Method;
import java.lang.reflect.Type;
import java.math.BigDecimal;
import java.net.InetAddress;
import java.net.UnknownHostException;
import java.text.DateFormat;
import java.text.SimpleDateFormat;
import java.time.Instant;
import java.time.LocalDate;
import java.time.LocalDateTime;
import java.time.LocalTime;
import java.time.format.DateTimeFormatter;
import java.util.*;
import java.util.concurrent.TimeUnit;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
@@ -307,6 +310,78 @@ public class PubUtils {
}
}
/**
* 将当前时间的秒数置为0
*
* @param date 时间
*/
public static Date getSecondsAsZero(Date date) {
Calendar calendar = Calendar.getInstance();
calendar.setTime(date);
calendar.set(Calendar.SECOND, 0);
return calendar.getTime();
}
/**
* 根据起始时间和截止时间返回yyyy-MM-dd的日期
*
* @param startTime 起始时间
* @param endTime 截止时间
*/
public static List<String> getTimes(Date startTime, Date endTime) {
SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd");
List<String> result = new ArrayList<>();
Calendar start = Calendar.getInstance();
start.setTime(startTime);
Calendar end = Calendar.getInstance();
end.setTime(endTime);
end.set(end.get(Calendar.YEAR), end.get(Calendar.MONTH), end.get(Calendar.DAY_OF_MONTH), 0, 0, 0);
long interval = end.getTimeInMillis() - start.getTimeInMillis();
result.add(sdf.format(start.getTime()));
if (interval > 0) {
int days = (int) (interval / 86400000);
for (int i = 0; i < days; i++) {
start.add(Calendar.DAY_OF_MONTH, 1);
result.add(sdf.format(start.getTime()));
}
}
return result;
}
/***
* 将instant转为date 处理8小时误差
* @author hongawen
* @date 2023/7/20 15:58
* @param instant 日期
* @return Instant
*/
public static Date instantToDate(Instant instant){
return Date.from(instant.minusMillis(TimeUnit.HOURS.toMillis(8)));
}
/***
* 将date转为instant 处理8小时误差
* @author hongawen
* @date 2023/7/20 15:58
* @param date 日期
* @return Instant
*/
public static Instant dateToInstant(Date date){
return date.toInstant().plusMillis(TimeUnit.HOURS.toMillis(8));
}
/**
* 根据参数返回float的四舍五入值
*
* @param i 保留的位数
* @param value float原值
*/
public static Float floatRound(int i, float value) {
BigDecimal bp = new BigDecimal(value);
return bp.setScale(i, BigDecimal.ROUND_HALF_UP).floatValue();
}
//*****************************************xuyang添加,用于App********************************************************
/**
* 正则表达式字符串

View File

@@ -0,0 +1,39 @@
package com.njcn.common.utils.serializer;
import cn.hutool.core.date.DatePattern;
import cn.hutool.core.date.DateUtil;
import com.fasterxml.jackson.core.JsonParser;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.DeserializationContext;
import com.fasterxml.jackson.databind.deser.std.StdDeserializer;
import com.njcn.common.utils.PubUtils;
import org.springframework.stereotype.Component;
import java.io.IOException;
import java.time.Instant;
/**
* @author hongawen
* @version 1.0.0
* @date 2023年07月24日 13:33
*/
@Component
public class InstantDateDeserializer extends StdDeserializer<Instant> {
public InstantDateDeserializer() {
this(null);
}
protected InstantDateDeserializer(Class<?> vc) {
super(vc);
}
@Override
public Instant deserialize(JsonParser p, DeserializationContext ctxt) throws IOException, JsonProcessingException {
String text = p.getValueAsString();
return PubUtils.dateToInstant(DateUtil.parse(text,DatePattern.NORM_DATETIME_PATTERN));
}
}

View File

@@ -42,7 +42,7 @@ public class InstantDateSerializer extends StdSerializer<Instant> {
if (instant == null) {
return;
}
String jsonValue = format.format(instant.atZone(ZoneId.systemDefault()));
String jsonValue = format.format(instant.atZone(ZoneId.of("+00:00")));
jsonGenerator.writeString(jsonValue);
}
}

View File

@@ -27,6 +27,11 @@ public interface OssPath {
*/
String ALGORITHM="algorithm/";
/***
* 装置模板
*/
String DEV_MODEL = "algorithm/devModel/";
/***
* process模块中干扰源入网报告的上传路径
*/
@@ -63,10 +68,6 @@ public interface OssPath {
*/
String EDDATA = "edData/";
/***
* 装置模板
*/
String DEV_MODEL = "algorithm/devModel/";
/***
* 资源管理文件
@@ -93,7 +94,15 @@ public interface OssPath {
*/
String WIRING_DIAGRAM = "wiringDiagram/";
/***
* 高级算法责任量化用采数据保存地址
*/
String RESPONSIBILITY_USER_DATA="advance/responsibility/userData/";
/***
* 高级算法责任量化结果数据保存地址
*/
String RESPONSIBILITY_USER_RESULT_DATA="advance/responsibility/userData/result/";
}

View File

@@ -15,7 +15,7 @@ public enum OssResponseEnum {
*/
UPLOAD_FILE_ERROR("A00551","上传文件服务器错误,请检查数据"),
DOWNLOAD_FILE_URL_ERROR("A00554","下载文件URL不存在请检查数据"),
DOWNLOAD_FILE_STREAM_ERROR("A00555","文件服务器下载文件流异常"),
DOWNLOAD_FILE_STREAM_ERROR("A00555","文件服务器下载文件流为空"),
DOWNLOAD_FILE_ERROR("A00556","文件服务器下载异常")
;

View File

@@ -1,7 +1,5 @@
package com.njcn.poi.util;
import cn.afterturn.easypoi.excel.ExcelExportUtil;
import cn.afterturn.easypoi.excel.entity.ExportParams;
import cn.hutool.core.io.FileUtil;
import cn.hutool.core.util.CharsetUtil;
import com.njcn.common.pojo.enums.response.CommonResponseEnum;
@@ -12,10 +10,7 @@ import javax.servlet.ServletOutputStream;
import javax.servlet.http.HttpServletResponse;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.IOException;
import java.net.URLEncoder;
import java.util.Collection;
/**
* @author hongawen

View File

@@ -163,7 +163,7 @@ public class LineDetail{
/**
* 接线图
*/
private String wiringDiagram;
private String wireDiagram;
}

View File

@@ -18,6 +18,12 @@ public class LineDetailVO implements Serializable {
@ApiModelProperty("变电站名称")
private String subName;
@ApiModelProperty("终端名称")
private String devName;
@ApiModelProperty("网络参数")
private String ip;
@ApiModelProperty("监测点名称")
private String lineName;
}

View File

@@ -612,16 +612,20 @@
<select id="getLineSubGdDetail" resultType="com.njcn.device.pq.pojo.vo.LineDetailVO">
SELECT DISTINCT gd.NAME gdName,
substation.NAME subName,
device.NAME devName,
deviceDetail.ip ip,
line.NAME lineName
FROM pq_line line,
pq_line voltage,
pq_line device,
pq_device deviceDetail,
pq_line substation,
pq_line gd
WHERE line.pid = voltage.id
AND voltage.pid = device.id
AND device.pid = substation.id
AND substation.pid = gd.id
AND device.id = deviceDetail.id
AND line.id = #{id}
</select>

View File

@@ -145,7 +145,7 @@ public class LineServiceImpl extends ServiceImpl<LineMapper, Line> implements Li
lineDetailDataVO.setOwner(lineDetail.getOwner());
lineDetailDataVO.setOwnerDuty(lineDetail.getOwnerDuty());
lineDetailDataVO.setOwnerTel(lineDetail.getOwnerTel());
lineDetailDataVO.setWiringDiagram(lineDetail.getWiringDiagram());
lineDetailDataVO.setWiringDiagram(lineDetail.getWireDiagram());
return lineDetailDataVO;
}

View File

@@ -1678,8 +1678,8 @@ public class TerminalBaseServiceImpl extends ServiceImpl<LineMapper, Line> imple
throw new BusinessException(DeviceResponseEnum.LINE_NO);
}
String[] urls = new String[0];
if(StrUtil.isNotBlank(lineDetailValid.getWiringDiagram())){
urls = lineDetailValid.getWiringDiagram().split(StrUtil.COMMA);
if(StrUtil.isNotBlank(lineDetailValid.getWireDiagram())){
urls = lineDetailValid.getWireDiagram().split(StrUtil.COMMA);
}
if (urls.length + files.length > 2) {
@@ -1699,7 +1699,7 @@ public class TerminalBaseServiceImpl extends ServiceImpl<LineMapper, Line> imple
}
LineDetail lineDetail = new LineDetail();
lineDetail.setWiringDiagram(strBuilder.toString());
lineDetail.setWireDiagram(strBuilder.toString());
lineDetail.setId(lineId);
lineDetailMapper.updateById(lineDetail);
return true;
@@ -1712,11 +1712,11 @@ public class TerminalBaseServiceImpl extends ServiceImpl<LineMapper, Line> imple
if (Objects.isNull(detail)) {
throw new BusinessException(DeviceResponseEnum.LINE_NO);
}
if (StrUtil.isBlank(detail.getWiringDiagram())) {
if (StrUtil.isBlank(detail.getWireDiagram())) {
return result;
}
String[] urlStr = detail.getWiringDiagram().split(StrUtil.COMMA);
String[] urlStr = detail.getWireDiagram().split(StrUtil.COMMA);
for (String url : urlStr) {
LineWiringDiagramVO lineWiringDiagramVO = new LineWiringDiagramVO();
String realUrl = fileStorageUtil.getFileUrl(url);
@@ -1734,14 +1734,14 @@ public class TerminalBaseServiceImpl extends ServiceImpl<LineMapper, Line> imple
if (Objects.isNull(detail)) {
throw new BusinessException(DeviceResponseEnum.LINE_NO);
}
if (StrUtil.isBlank(detail.getWiringDiagram())) {
if (StrUtil.isBlank(detail.getWireDiagram())) {
throw new BusinessException(DeviceResponseEnum.LINE_WIRING_DEL);
}
if (!detail.getWiringDiagram().contains(wiringDiagramName)) {
if (!detail.getWireDiagram().contains(wiringDiagramName)) {
throw new BusinessException(DeviceResponseEnum.LINE_WIRING_NO);
}
String[] urlArr = detail.getWiringDiagram().split(StrUtil.COMMA);
String[] urlArr = detail.getWireDiagram().split(StrUtil.COMMA);
List<String> newUrlList = Arrays.stream(urlArr).filter(s -> !s.equalsIgnoreCase(wiringDiagramName)).collect(Collectors.toList());
if (newUrlList.size() > 1) {
@@ -1763,7 +1763,7 @@ public class TerminalBaseServiceImpl extends ServiceImpl<LineMapper, Line> imple
LineDetail lineDetail = new LineDetail();
lineDetail.setId(detail.getId());
lineDetail.setWiringDiagram(strBuilder.toString());
lineDetail.setWireDiagram(strBuilder.toString());
lineDetailMapper.updateById(lineDetail);
return true;
}
@@ -1774,15 +1774,15 @@ public class TerminalBaseServiceImpl extends ServiceImpl<LineMapper, Line> imple
if (Objects.isNull(detail)) {
throw new BusinessException(DeviceResponseEnum.LINE_NO);
}
if (StrUtil.isBlank(detail.getWiringDiagram())) {
if (StrUtil.isBlank(detail.getWireDiagram())) {
throw new BusinessException(DeviceResponseEnum.LINE_WIRING_DEL);
}
if (!detail.getWiringDiagram().contains(wiringDiagramName)) {
if (!detail.getWireDiagram().contains(wiringDiagramName)) {
throw new BusinessException(DeviceResponseEnum.LINE_WIRING_NO);
}
fileStorageUtil.deleteFile(wiringDiagramName);
String[] urlArr = detail.getWiringDiagram().split(StrUtil.COMMA);
String[] urlArr = detail.getWireDiagram().split(StrUtil.COMMA);
List<String> newUrlList = Arrays.stream(urlArr).filter(s -> !s.equalsIgnoreCase(wiringDiagramName)).collect(Collectors.toList());
StrBuilder strBuilder = new StrBuilder();
for (int i = 0; i < newUrlList.size(); i++) {
@@ -1794,7 +1794,7 @@ public class TerminalBaseServiceImpl extends ServiceImpl<LineMapper, Line> imple
}
LineDetail lineDetail = new LineDetail();
lineDetail.setId(detail.getId());
lineDetail.setWiringDiagram(strBuilder.toString());
lineDetail.setWireDiagram(strBuilder.toString());
lineDetailMapper.updateById(lineDetail);
return true;
}

View File

@@ -109,7 +109,6 @@ spring:
filters:
- SwaggerHeaderFilter
- StripPrefix=1
- id: process-boot
uri: lb://process-boot
predicates:
@@ -117,7 +116,6 @@ spring:
filters:
- SwaggerHeaderFilter
- StripPrefix=1
- id: prepare-boot
uri: lb://prepare-boot
predicates:
@@ -167,10 +165,10 @@ spring:
filters:
- SwaggerHeaderFilter
- StripPrefix=1
- id: access-boot
uri: lb://access-boot
- id: advance-boot
uri: lb://advance-boot
predicates:
- Path=/access-boot/**
- Path=/advance-boot/**
filters:
- SwaggerHeaderFilter
- StripPrefix=1

View File

@@ -49,8 +49,16 @@
<version>${project.version}</version>
</dependency>
<dependency>
<groupId>org.influxdb</groupId>
<artifactId>influxdb-java</artifactId>
<groupId>com.njcn</groupId>
<artifactId>pqs-influx</artifactId>
<version>0.0.1-SNAPSHOT</version>
<!--排除okhttp3的依赖-->
<exclusions>
<exclusion>
<groupId>com.squareup.okhttp3</groupId>
<artifactId>*</artifactId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>com.njcn</groupId>

View File

@@ -0,0 +1,28 @@
package com.njcn.harmonic.api;
import com.njcn.common.pojo.constant.ServerInfo;
import com.njcn.common.pojo.response.HttpResult;
import com.njcn.harmonic.api.fallback.HarmDataFeignClientFallbackFactory;
import com.njcn.harmonic.pojo.param.HistoryHarmParam;
import com.njcn.influx.pojo.dto.HarmHistoryDataDTO;
import org.springframework.cloud.openfeign.FeignClient;
import org.springframework.web.bind.annotation.PostMapping;
import org.springframework.web.bind.annotation.RequestBody;
@FeignClient(
value = ServerInfo.HARMONIC,
path = "/harmonic",
fallbackFactory = HarmDataFeignClientFallbackFactory.class,
contextId = "harmonic")
public interface HarmDataFeignClient {
/**
* 获取监测点信息
* @param historyHarmParam 请求查询参数
* @return 结果
*/
@PostMapping("/getHistoryHarmData")
HttpResult<HarmHistoryDataDTO> getHistoryHarmData(@RequestBody HistoryHarmParam historyHarmParam);
}

View File

@@ -0,0 +1,39 @@
package com.njcn.harmonic.api.fallback;
import com.njcn.common.pojo.enums.response.CommonResponseEnum;
import com.njcn.common.pojo.exception.BusinessException;
import com.njcn.common.pojo.response.HttpResult;
import com.njcn.device.biz.utils.DeviceEnumUtil;
import com.njcn.harmonic.api.HarmDataFeignClient;
import com.njcn.harmonic.pojo.param.HistoryHarmParam;
import com.njcn.influx.pojo.dto.HarmHistoryDataDTO;
import feign.hystrix.FallbackFactory;
import lombok.extern.slf4j.Slf4j;
import org.springframework.stereotype.Component;
/**
* @author hongawen
* @version 1.0.0
* @date 2023年07月21日 14:31
*/
@Slf4j
@Component
public class HarmDataFeignClientFallbackFactory implements FallbackFactory<HarmDataFeignClient> {
@Override
public HarmDataFeignClient create(Throwable throwable) {
//判断抛出异常是否为解码器抛出的业务异常
Enum<?> exceptionEnum = CommonResponseEnum.SERVICE_FALLBACK;
if (throwable.getCause() instanceof BusinessException) {
BusinessException businessException = (BusinessException) throwable.getCause();
exceptionEnum = DeviceEnumUtil.getExceptionEnum(businessException.getResult());
}
Enum<?> finalExceptionEnum = exceptionEnum;
return new HarmDataFeignClient() {
@Override
public HttpResult<HarmHistoryDataDTO> getHistoryHarmData(HistoryHarmParam historyHarmParam) {
log.error("{}异常,降级处理,异常为:{}", "获取谐波历史数据", throwable.toString());
throw new BusinessException(finalExceptionEnum);
}
};
}
}

View File

@@ -30,6 +30,8 @@ public enum HarmonicResponseEnum {
REPORT_DOWNLOAD_ERROR("A00559","报表文件下载异常"),
REPORT_TEMPLATE_DOWNLOAD_ERROR("A00560","报表模板下载异常"),
NO_DATA("A00561","时间范围内暂无谐波数据"),
INSUFFICIENCY_OF_INTEGRITY("A00561","时间范围内谐波数据完整性不足"),
;
private final String code;

View File

@@ -0,0 +1,54 @@
package com.njcn.harmonic.pojo.param;
import com.njcn.common.pojo.constant.PatternRegex;
import com.njcn.harmonic.constant.HarmonicValidMessage;
import io.swagger.annotations.ApiModelProperty;
import lombok.AllArgsConstructor;
import lombok.Data;
import lombok.NoArgsConstructor;
import javax.validation.constraints.Max;
import javax.validation.constraints.Min;
import javax.validation.constraints.NotBlank;
import javax.validation.constraints.Pattern;
import java.io.Serializable;
/**
* @author hongawen
* @version 1.0.0
* @date 2023年07月19日 09:23
*/
@Data
@NoArgsConstructor
@AllArgsConstructor
public class HistoryHarmParam implements Serializable {
@ApiModelProperty("开始时间")
@NotBlank(message = HarmonicValidMessage.DATA_NOT_BLANK)
@Pattern(regexp = PatternRegex.TIME_FORMAT, message = "时间格式错误")
private String searchBeginTime;
@ApiModelProperty("结束时间")
@NotBlank(message = HarmonicValidMessage.DATA_NOT_BLANK)
@Pattern(regexp = PatternRegex.TIME_FORMAT, message = "时间格式错误")
private String searchEndTime;
@NotBlank(message = HarmonicValidMessage.DATA_NOT_BLANK)
@ApiModelProperty("监测点索引")
private String lineId;
@Max(1)
@Min(0)
@ApiModelProperty("0-电流 1-电压")
private int type;
@Max(50)
@Min(2)
@ApiModelProperty("谐波次数")
private Integer time;
}

View File

@@ -1,5 +1,7 @@
package com.njcn.harmonic.pojo.po;
import com.fasterxml.jackson.databind.annotation.JsonSerialize;
import com.njcn.common.utils.serializer.InstantDateSerializer;
import lombok.Data;
import org.influxdb.annotation.Column;
import org.influxdb.annotation.Measurement;
@@ -18,6 +20,7 @@ import java.time.Instant;
public class LimitTarget {
@Column(name = "time")
@JsonSerialize(using = InstantDateSerializer.class)
private Instant time;
@Column(name = "line_id")

View File

@@ -27,32 +27,12 @@
<groupId>com.njcn</groupId>
<artifactId>common-web</artifactId>
<version>${project.version}</version>
<!-- <exclusions>-->
<!-- <exclusion>-->
<!-- <groupId>org.apache.logging.log4j</groupId>-->
<!-- <artifactId>log4j-to-slf4j</artifactId>-->
<!-- </exclusion>-->
<!-- <exclusion>-->
<!-- <groupId>org.slf4j</groupId>-->
<!-- <artifactId>jul-to-slf4j</artifactId>-->
<!-- </exclusion>-->
<!-- <exclusion>-->
<!-- <groupId>ch.qos.logback</groupId>-->
<!-- <artifactId>logback-classic</artifactId>-->
<!-- </exclusion>-->
<!-- </exclusions>-->
</dependency>
<dependency>
<groupId>com.njcn</groupId>
<artifactId>common-swagger</artifactId>
<version>${project.version}</version>
</dependency>
<dependency>
<groupId>com.njcn</groupId>
<artifactId>common-influxDB</artifactId>
<version>${project.version}</version>
<scope>compile</scope>
</dependency>
<dependency>
<groupId>com.njcn</groupId>
<artifactId>pq-device-api</artifactId>
@@ -67,28 +47,34 @@
<groupId>com.njcn</groupId>
<artifactId>user-api</artifactId>
<version>1.0.0</version>
<scope>compile</scope>
</dependency>
<dependency>
<groupId>com.njcn</groupId>
<artifactId>event-api</artifactId>
<version>${project.version}</version>
</dependency>
<!--避免idea后端配置类报红-->
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-configuration-processor</artifactId>
<optional>true</optional>
</dependency>
<!--华为obs工具包-->
<dependency>
<groupId>com.njcn</groupId>
<artifactId>common-oss</artifactId>
<version>${project.version}</version>
<exclusions>
<exclusion>
<groupId>com.squareup.okhttp3</groupId>
<artifactId>okhttp</artifactId>
</exclusion>
</exclusions>
</dependency>
<!-- https://mvnrepository.com/artifact/com.squareup.okhttp3/okhttp -->
<dependency>
<groupId>com.squareup.okhttp3</groupId>
<artifactId>okhttp</artifactId>
<version>4.8.1</version>
</dependency>
<dependency>
<groupId>com.squareup.okhttp3</groupId>
<artifactId>logging-interceptor</artifactId>
<version>4.8.1</version>
</dependency>
</dependencies>

View File

@@ -5,6 +5,7 @@ import org.mybatis.spring.annotation.MapperScan;
import org.springframework.boot.SpringApplication;
import org.springframework.boot.autoconfigure.SpringBootApplication;
import org.springframework.cloud.openfeign.EnableFeignClients;
import org.springframework.context.annotation.DependsOn;
/**
@@ -16,6 +17,7 @@ import org.springframework.cloud.openfeign.EnableFeignClients;
* @date 2021/12/27 11:38
*/
@Slf4j
@DependsOn("proxyMapperRegister")
@MapperScan("com.njcn.**.mapper")
@EnableFeignClients(basePackages = "com.njcn")
@SpringBootApplication(scanBasePackages = "com.njcn")

View File

@@ -8,12 +8,10 @@ import com.njcn.common.pojo.enums.response.CommonResponseEnum;
import com.njcn.common.pojo.response.HttpResult;
import com.njcn.common.utils.HttpResultUtil;
import com.njcn.common.utils.LogUtil;
import com.njcn.device.pq.pojo.dto.PollutionLineDTO;
import com.njcn.harmonic.pojo.param.HarmonicPublicParam;
import com.njcn.harmonic.pojo.vo.*;
import com.njcn.harmonic.service.IAnalyzeService;
import com.njcn.harmonic.service.IHarmonicService;
import com.njcn.harmonic.service.IPollutionService;
import com.njcn.poi.util.PoiUtil;
import com.njcn.web.controller.BaseController;
import io.swagger.annotations.Api;
@@ -44,8 +42,6 @@ public class AnalyzeController extends BaseController {
private final IAnalyzeService IAnalyzeService;
private final IPollutionService pollutionService;
private final IHarmonicService harmonicService;

View File

@@ -6,11 +6,13 @@ import com.njcn.common.pojo.enums.response.CommonResponseEnum;
import com.njcn.common.pojo.param.StatisticsBizBaseParam;
import com.njcn.common.pojo.response.HttpResult;
import com.njcn.common.utils.HttpResultUtil;
import com.njcn.harmonic.pojo.param.HistoryHarmParam;
import com.njcn.harmonic.pojo.param.HistoryParam;
import com.njcn.harmonic.pojo.param.NormHistoryParam;
import com.njcn.harmonic.pojo.vo.HistoryDataResultVO;
import com.njcn.harmonic.pojo.vo.StatHarmonicOrgVO;
import com.njcn.harmonic.service.HistoryResultService;
import com.njcn.influx.pojo.dto.HarmHistoryDataDTO;
import com.njcn.web.controller.BaseController;
import io.swagger.annotations.Api;
import io.swagger.annotations.ApiImplicitParam;
@@ -70,4 +72,14 @@ public class HistoryResultController extends BaseController {
return HttpResultUtil.assembleCommonResponseResult(CommonResponseEnum.SUCCESS, list, methodDescribe);
}
@OperateInfo(info = LogEnum.BUSINESS_COMMON)
@PostMapping("/getHistoryHarmData")
@ApiOperation("获取谐波历史数据")
@ApiImplicitParam(name = "historyHarmParam", value = "谐波历史数据请求参数", required = true)
public HttpResult<HarmHistoryDataDTO> getHistoryHarmData(@RequestBody @Validated HistoryHarmParam historyHarmParam) {
String methodDescribe = getMethodDescribe("getHistoryHarmData");
HarmHistoryDataDTO harmHistoryDataDTO = historyResultService.getHistoryHarmData(historyHarmParam);
return HttpResultUtil.assembleCommonResponseResult(CommonResponseEnum.SUCCESS, harmHistoryDataDTO, methodDescribe);
}
}

View File

@@ -12,7 +12,6 @@ import com.njcn.harmonic.pojo.param.HarmonicPublicParam;
import com.njcn.harmonic.pojo.param.PollutionSubstationQuryParam;
import com.njcn.harmonic.pojo.vo.PollutionSubstationVO;
import com.njcn.harmonic.pojo.vo.PollutionVO;
import com.njcn.harmonic.service.IPollutionService;
import com.njcn.harmonic.service.PollutionSubstationService;
import com.njcn.web.controller.BaseController;
import io.swagger.annotations.Api;
@@ -46,8 +45,6 @@ public class PollutionSubstationController extends BaseController {
private final PollutionSubstationService pollutionSubstationService;
private final IPollutionService pollutionService;
@OperateInfo(info = LogEnum.BUSINESS_COMMON)
@PostMapping("/getPollutionSubstationData")

View File

@@ -1,5 +1,6 @@
package com.njcn.harmonic.pojo.vo;
package com.njcn.harmonic.pojo;
import com.njcn.influx.pojo.bo.HarmonicHistoryData;
import lombok.Data;
import org.influxdb.dto.QueryResult;
@@ -14,6 +15,7 @@ import java.util.List;
@Data
public class QueryResultLimitVO implements Serializable {
private QueryResult queryResult;
private List<HarmonicHistoryData> harmonicHistoryDataList;
private Float topLimit;
private Float lowerLimit;
private String lineName;

View File

@@ -1,10 +1,12 @@
package com.njcn.harmonic.service;
import com.njcn.common.pojo.param.StatisticsBizBaseParam;
import com.njcn.harmonic.pojo.param.HistoryHarmParam;
import com.njcn.harmonic.pojo.param.HistoryParam;
import com.njcn.harmonic.pojo.param.NormHistoryParam;
import com.njcn.harmonic.pojo.vo.HistoryDataResultVO;
import com.njcn.harmonic.pojo.vo.StatHarmonicOrgVO;
import com.njcn.influx.pojo.dto.HarmHistoryDataDTO;
import java.util.List;
@@ -36,4 +38,13 @@ public interface HistoryResultService {
* @return 结果
*/
List<StatHarmonicOrgVO> getHarmonicProportion(StatisticsBizBaseParam statisticsBizBaseParam);
/***
* 按次、监测点获取指定历史谐波数据
* @author hongawen
* @date 2023/7/19 9:56
* @param historyHarmParam 请求历史谐波数据参数
* @return HarmHistoryDataDTO
*/
HarmHistoryDataDTO getHistoryHarmData(HistoryHarmParam historyHarmParam);
}

View File

@@ -1,53 +0,0 @@
package com.njcn.harmonic.service;
import com.njcn.device.pq.pojo.dto.PollutionLineDTO;
import com.njcn.device.pq.pojo.dto.PollutionSubstationDTO;
import com.njcn.harmonic.pojo.param.HarmonicPublicParam;
import com.njcn.harmonic.pojo.vo.PollutionVO;
import java.util.List;
/**
* @author 徐扬
*/
public interface IPollutionService {
/**
* 功能描述: 获取部门变电站关系
*
* @param param
* @return java.util.List<java.lang.Object>
* @author xy
* @date 2022/2/21 16:48
*/
// List<PollutionVO> getDeptSubstationRelations(HarmonicPublicParam param);
/**
* 功能描述: 根据部门获取变电站详情
*
* @param param 部门参数
* @return java.util.List<com.njcn.harmonic.pojo.vo.PollutionVO>
* @author xy
* @date 2022/2/21 20:51
*/
//List<PollutionSubstationDTO> getSubstationInfoById(HarmonicPublicParam param);
/**
* 功能描述: 根据变电站获取监测点详情
* @param param
* @return java.util.List<com.njcn.harmonic.pojo.vo.PollutionVO>
* @author xy
* @date 2022/2/21 20:51
*/
// List<PollutionLineDTO> getLineInfoById(HarmonicPublicParam param);
/**
* 功能描述:获取排名前10的监测点
* @param param
*
* @return java.util.List<com.njcn.harmonic.pojo.vo.LoadTypeVO>
* @author xy
* @date 2022/2/22 10:04
*/
// List<PollutionLineDTO> getLineRank(HarmonicPublicParam param);
}

View File

@@ -56,15 +56,6 @@ public interface PollutionSubstationService extends IService<RStatPollutionSubst
*/
List<PollutionLineDTO> getLineInfoById(HarmonicPublicParam param);
/***
* 获取前十监测点排名
* @author wr
* @date 2023-03-03 16:21
* @param param
* @return List<PollutionLineDTO>
*/
List<PollutionLineDTO> getLineRank(HarmonicPublicParam param);
List<PollutionLineDTO> getLineRankTop10(HarmonicPublicParam param);

View File

@@ -36,7 +36,6 @@ import com.njcn.harmonic.pojo.vo.OverAreaLimitVO;
import com.njcn.harmonic.pojo.vo.OverAreaVO;
import com.njcn.harmonic.pojo.vo.WarningSubstationVO;
import com.njcn.harmonic.service.IAnalyzeService;
import com.njcn.influxdb.utils.InfluxDbUtils;
import com.njcn.poi.excel.ExcelUtil;
import com.njcn.system.api.DicDataFeignClient;
import com.njcn.system.enums.DicDataEnum;
@@ -76,8 +75,6 @@ public class AnalyzeServiceImpl implements IAnalyzeService {
private final HarmonicServiceImpl harmonicService;
private final InfluxDbUtils influxDbUtils;
private final LineFeignClient lineFeignClient;
private final RStatLimitRateDMapper rateDMapper;
@@ -371,147 +368,6 @@ public class AnalyzeServiceImpl implements IAnalyzeService {
return page;
}
return page;
// if (!CollectionUtils.isEmpty(lineList)){
// page.setTotal(lineList.size());
// int pages = (int)Math.ceil(lineList.size()*1.0/param.getPageSize());
// page.setPages(pages);
// List<List<String>> pageList = Lists.partition(lineList,param.getPageSize());
// List<String> temList = pageList.get(param.getPageNum()-1);
// List<LimitTarget> limitTargetList = getOverDays(temList,param.getSearchBeginTime(),param.getSearchEndTime());
// Map<String,List<LimitTarget>> map = limitTargetList.stream().collect(Collectors.groupingBy(LimitTarget::getLineId));
// PollutionParamDTO pollutionParamDTO = new PollutionParamDTO();
// pollutionParamDTO.setLineList(temList);
// List<OverLimitLineDTO> overLimitLineList = lineFeignClient.getOverLimitLineInfo(pollutionParamDTO).getData();
// overLimitLineList.forEach(item->{
// int overDay=0,freqOverDay=0,volDevOverDay=0,threeUnbalance=0,flickerOverDay=0,negativeOverDay=0,harmVolOverDay=0,volDisOverDay=0,volContainOverDay=0,harmCurOverDay=0,intHarmOverDay=0;
// int overVolThreeTimes=0,overVolFiveTimes=0,overVolSevenTimes=0,overVolElevenTimes=0,overVolThirteenTimes=0,overVolTwentyThreeTimes=0,overVolTwentyFiveTimes=0,overVolOtherTimes=0;
// int overCurThreeTimes=0,overCurFiveTimes=0,overCurSevenTimes=0,overCurElevenTimes=0,overCurThirteenTimes=0,overCurTwentyThreeTimes=0,overCurTwentyFiveTimes=0,overCurOtherTimes=0;
// MonitorOverLimitVO monitorOverLimitVO = new MonitorOverLimitVO();
// BeanUtil.copyProperties(item,monitorOverLimitVO);
// List<LimitTarget> l1 = map.get(item.getId());
// if (!CollectionUtils.isEmpty(l1)) {
// for (LimitTarget item2 : l1) {
// if (item2.getFreqDevOverTime()+item2.getVoltageDevOverTime()+item2.getUBalanceOverTime()+item2.getFlickerOverTime()+item2.getINegOverTime()+item2.getUAberranceOverTime()+item2.getUHarmAllOverTime()+item2.getIHarmAllOverTime()+item2.getInuharmAllOverTime() > 0){
// overDay++;
// }
// if (item2.getFreqDevOverTime()>0){
// freqOverDay++;
// }
// if (item2.getVoltageDevOverTime()>0){
// volDevOverDay++;
// }
// if (item2.getUBalanceOverTime()>0){
// threeUnbalance++;
// }
// if (item2.getFlickerOverTime()>0){
// flickerOverDay++;
// }
// if (item2.getINegOverTime()>0){
// negativeOverDay++;
// }
// if (item2.getUAberranceOverTime()+item2.getUHarmAllOverTime()>0){
// harmVolOverDay++;
// }
// if (item2.getUAberranceOverTime()>0){
// volDisOverDay++;
// }
// if (item2.getUHarmAllOverTime()>0){
// volContainOverDay++;
// }
// if (item2.getIHarmAllOverTime()>0){
// harmCurOverDay++;
// }
// if (item2.getInuharmAllOverTime()>0){
// intHarmOverDay++;
// }
// if (item2.getUHarm3OverTime()>0){
// overVolThreeTimes++;
// }
// if (item2.getUHarm5OverTime()>0){
// overVolFiveTimes++;
// }
// if (item2.getUHarm7OverTime()>0){
// overVolSevenTimes++;
// }
// if (item2.getUHarm11OverTime()>0){
// overVolElevenTimes++;
// }
// if (item2.getUHarm13OverTime()>0){
// overVolThirteenTimes++;
// }
// if (item2.getUHarm23OverTime()>0){
// overVolTwentyThreeTimes++;
// }
// if (item2.getUHarm25OverTime()>0){
// overVolTwentyFiveTimes++;
// }
// if (item2.getUHarmAllOverTime()>0){
// overVolOtherTimes++;
// }
// if (item2.getIHarm3OverTime()>0){
// overCurThreeTimes++;
// }
// if (item2.getIHarm5OverTime()>0){
// overCurFiveTimes++;
// }
// if (item2.getIHarm7OverTime()>0){
// overCurSevenTimes++;
// }
// if (item2.getIHarm11OverTime()>0){
// overCurElevenTimes++;
// }
// if (item2.getIHarm13OverTime()>0){
// overCurThirteenTimes++;
// }
// if (item2.getIHarm23OverTime()>0){
// overCurTwentyThreeTimes++;
// }
// if (item2.getIHarm25OverTime()>0){
// overCurTwentyFiveTimes++;
// }
// if (item2.getIHarmAllOverTime()>0){
// overCurOtherTimes++;
// }
// }
// }
// monitorOverLimitVO.setOverDay(overDay);
// monitorOverLimitVO.setFreqOverDay(freqOverDay);
// monitorOverLimitVO.setVolDevOverDay(volDevOverDay);
// monitorOverLimitVO.setThreeUnbalance(threeUnbalance);
// monitorOverLimitVO.setFlickerOverDay(flickerOverDay);
// monitorOverLimitVO.setNegativeOverDay(negativeOverDay);
// monitorOverLimitVO.setHarmVolOverDay(harmVolOverDay);
// monitorOverLimitVO.setVolDisOverDay(volDisOverDay);
// monitorOverLimitVO.setVolContainOverDay(volContainOverDay);
// monitorOverLimitVO.setHarmCurOverDay(harmCurOverDay);
// monitorOverLimitVO.setIntHarmOverDay(intHarmOverDay);
// monitorOverLimitVO.setOverVolThreeTimes(overVolThreeTimes);
// monitorOverLimitVO.setOverVolFiveTimes(overVolFiveTimes);
// monitorOverLimitVO.setOverVolSevenTimes(overVolSevenTimes);
// monitorOverLimitVO.setOverVolElevenTimes(overVolElevenTimes);
// monitorOverLimitVO.setOverVolThirteenTimes(overVolThirteenTimes);
// monitorOverLimitVO.setOverVolTwentyThreeTimes(overVolTwentyThreeTimes);
// monitorOverLimitVO.setOverVolTwentyFiveTimes(overVolTwentyFiveTimes);
// monitorOverLimitVO.setOverVolOtherTimes(overVolOtherTimes);
// monitorOverLimitVO.setOverCurThreeTimes(overCurThreeTimes);
// monitorOverLimitVO.setOverCurFiveTimes(overCurFiveTimes);
// monitorOverLimitVO.setOverCurSevenTimes(overCurSevenTimes);
// monitorOverLimitVO.setOverCurElevenTimes(overCurElevenTimes);
// monitorOverLimitVO.setOverCurThirteenTimes(overCurThirteenTimes);
// monitorOverLimitVO.setOverCurTwentyThreeTimes(overCurTwentyThreeTimes);
// monitorOverLimitVO.setOverCurTwentyFiveTimes(overCurTwentyFiveTimes);
// monitorOverLimitVO.setOverCurOtherTimes(overCurOtherTimes);
// result.add(monitorOverLimitVO);
// });
// }
// if (!CollectionUtils.isEmpty(result)){
// List<MonitorOverLimitVO> recordList = new ArrayList<>();
// //默认 根据在线监测点个数 倒叙排序
// recordList = result.stream().sorted(Comparator.comparing(MonitorOverLimitVO::getOverDay).reversed()).collect(Collectors.toList());
// page.setRecords(recordList);
// return page;
// }
// return page;
}
private boolean filterNotQualifiedHarmVolData(RStatLimitTargetDPO t) {
@@ -781,64 +637,4 @@ public class AnalyzeServiceImpl implements IAnalyzeService {
condMap.put(DicDataEnum.VOLTAGE_RISE.getCode(), dictMap.get(DicDataEnum.VOLTAGE_RISE.getCode())); //电压暂升
return eventDetailFeignClient.getEventDetailCount(condMap).getData();
}
public List<LimitTarget> getOverDays(List<String> list, String startTime, String endTime) {
StringBuilder stringBuilder = new StringBuilder();
StringBuilder stringBuilder1 = new StringBuilder();
stringBuilder.append(Param.TIME + " >= '").append(startTime).append("' and ").append(Param.TIME).append(" <= '").append(endTime).append("' and (");
for (int i = 0; i < list.size(); i++) {
if (list.size() - i != 1) {
stringBuilder.append(Param.LINE_ID + "='").append(list.get(i)).append("' or ");
} else {
stringBuilder.append(Param.LINE_ID + "='").append(list.get(i)).append("')");
}
}
stringBuilder.append(" group by line_id tz('Asia/Shanghai')");
stringBuilder1.append("freq_dev_overtime AS freq_dev_overtime,voltage_dev_overtime AS voltage_dev_overtime,ubalance_overtime AS ubalance_overtime,flicker_overtime AS flicker_overtime,i_neg_overtime AS i_neg_overtime,uaberrance_overtime AS uaberrance_overtime,");
for (int i = 2; i <= 25; i++) {
if (i == 25) {
stringBuilder1.append("uharm_").append(i).append("_overtime AS uharm_all_overtime,");
} else {
stringBuilder1.append("uharm_").append(i).append("_overtime+");
}
}
for (int i = 2; i <= 25; i++) {
if (i == 25) {
stringBuilder1.append("iharm_").append(i).append("_overtime AS iharm_all_overtime,");
} else {
stringBuilder1.append("iharm_").append(i).append("_overtime+");
}
}
for (int i = 1; i <= 16; i++) {
if (i == 16) {
stringBuilder1.append("inuharm_").append(i).append("_overtime AS inuharm_all_overtime,");
} else {
stringBuilder1.append("inuharm_").append(i).append("_overtime+");
}
}
stringBuilder1.append("uharm_3_overtime,uharm_5_overtime,uharm_7_overtime,uharm_11_overtime,uharm_13_overtime,uharm_23_overtime,uharm_25_overtime,");
for (int i = 2; i <= 25; i++) {
if (i != 3 && i != 5 && i != 7 && i != 11 && i != 13 && i != 23 && i != 25) {
if (i == 24) {
stringBuilder1.append("uharm_").append(i).append("_overtime AS uOtherTimes,");
} else {
stringBuilder1.append("uharm_").append(i).append("_overtime+");
}
}
}
stringBuilder1.append("iharm_3_overtime,iharm_5_overtime,iharm_7_overtime,iharm_11_overtime,iharm_13_overtime,iharm_23_overtime,iharm_25_overtime,");
for (int i = 2; i <= 25; i++) {
if (i != 3 && i != 5 && i != 7 && i != 11 && i != 13 && i != 23 && i != 25) {
if (i == 24) {
stringBuilder1.append("iharm_").append(i).append("_overtime AS iOtherTimes");
} else {
stringBuilder1.append("iharm_").append(i).append("_overtime+");
}
}
}
String sql = "select " + stringBuilder1 + " from limit_target where " + stringBuilder;
QueryResult sqlResult = influxDbUtils.query(sql);
InfluxDBResultMapper resultMapper = new InfluxDBResultMapper();
return resultMapper.toPOJO(sqlResult, LimitTarget.class);
}
}

View File

@@ -7,15 +7,10 @@ import com.njcn.harmonic.mapper.RStatComassesDMapper;
import com.njcn.harmonic.pojo.po.PQSComAssesPO;
import com.njcn.harmonic.pojo.vo.AssesVO;
import com.njcn.harmonic.service.AssesService;
import com.njcn.harmonic.service.ComAssessService;
import com.njcn.influxdb.param.InfluxDBPublicParam;
import com.njcn.influxdb.utils.InfluxDbUtils;
import com.njcn.web.utils.ComAssesUtil;
import com.njcn.web.utils.PqsComasses;
import lombok.AllArgsConstructor;
import org.apache.commons.lang.StringUtils;
import org.influxdb.dto.QueryResult;
import org.influxdb.impl.InfluxDBResultMapper;
import org.springframework.stereotype.Service;
import org.springframework.util.CollectionUtils;
@@ -32,7 +27,6 @@ import java.util.List;
@AllArgsConstructor
public class AssesServiceImpl implements AssesService {
private final InfluxDbUtils influxDbUtils;
private final ComAssesUtil comAssesUtil;
@@ -51,12 +45,9 @@ public class AssesServiceImpl implements AssesService {
}
/**
* influxDB相关操作
* 查询监测点的数据完整性
*/
private Float getCondition(String lineList, String startTime, String endTime) {
float synData;
if (!lineList.isEmpty()) {
List<PQSComAssesPO> avgCount = rStatComassesDMapper.getAvgCount(Arrays.asList(lineList),

View File

@@ -3,7 +3,6 @@ package com.njcn.harmonic.service.impl;
import cn.hutool.core.collection.CollUtil;
import cn.hutool.core.date.DateUtil;
import cn.hutool.core.io.IoUtil;
import cn.hutool.core.text.StrPool;
import cn.hutool.core.util.CharsetUtil;
import cn.hutool.core.util.StrUtil;
import cn.hutool.json.*;
@@ -20,6 +19,10 @@ import com.njcn.harmonic.mapper.ExcelRptTempMapper;
import com.njcn.harmonic.pojo.dto.ReportTemplateDTO;
import com.njcn.harmonic.pojo.param.ReportSearchParam;
import com.njcn.harmonic.pojo.param.ReportTemplateParam;
import com.njcn.influx.constant.InfluxDbSqlConstant;
import com.njcn.influx.pojo.constant.InfluxDBTableConstant;
import com.njcn.influx.pojo.dto.StatisticalDataDTO;
import com.njcn.influx.service.CommonService;
import com.njcn.system.pojo.po.EleEpdPqd;
import com.njcn.harmonic.pojo.po.ExcelRpt;
import com.njcn.harmonic.pojo.po.ExcelRptTemp;
@@ -28,34 +31,22 @@ import com.njcn.harmonic.pojo.vo.ReportTemplateVO;
import com.njcn.harmonic.pojo.vo.ReportTreeVO;
import com.njcn.harmonic.pojo.vo.SysDeptTempVO;
import com.njcn.harmonic.service.CustomReportService;
import com.njcn.influxdb.param.InfluxDBSqlConstant;
import com.njcn.influxdb.param.InfluxDBTableConstant;
import com.njcn.influxdb.utils.InfluxDbUtils;
import com.njcn.oss.constant.OssPath;
import com.njcn.oss.enums.OssResponseEnum;
import com.njcn.oss.utils.FileStorageUtil;
import com.njcn.system.api.DicDataFeignClient;
import com.njcn.user.api.DeptFeignClient;
import com.njcn.user.pojo.dto.DeptDTO;
import com.njcn.web.utils.WebUtil;
import lombok.RequiredArgsConstructor;
import lombok.extern.slf4j.Slf4j;
import org.apache.commons.fileupload.FileItem;
import org.apache.commons.fileupload.disk.DiskFileItemFactory;
import org.apache.tomcat.util.http.fileupload.IOUtils;
import org.apache.commons.lang.StringUtils;
import org.influxdb.dto.QueryResult;
import org.springframework.beans.BeanUtils;
import org.springframework.http.MediaType;
import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Transactional;
import org.springframework.web.multipart.MultipartFile;
import org.springframework.web.multipart.commons.CommonsMultipartFile;
import javax.servlet.http.HttpServletResponse;
import java.io.*;
import java.net.URL;
import java.nio.charset.StandardCharsets;
import java.time.LocalDateTime;
import java.util.ArrayList;
import java.util.List;
@@ -83,16 +74,12 @@ public class CustomReportServiceImpl implements CustomReportService {
private final EleEpdMapper eleEpdMapper;
private final InfluxDbUtils influxDbUtils;
private final DicDataFeignClient dicDataFeignClient;
private final DeptTempMapper deptTempMapper;
private final GeneralInfo generalInfo;
private final FileStorageUtil fileStorageUtil;
private final CommonService commonService;
@Override
public boolean addCustomReportTemplate(ReportTemplateParam reportTemplateParam) {
@@ -406,7 +393,6 @@ public class CustomReportServiceImpl implements CustomReportService {
reportTemplateDTO.setStatMethod(vItem[1].toUpperCase());
reportTemplateDTO.setClassId(vItem[2]);
}
reportTemplateDTOList.add(reportTemplateDTO);
}
}
@@ -421,15 +407,15 @@ public class CustomReportServiceImpl implements CustomReportService {
if (CollUtil.isNotEmpty(reportTemplateDTOList)) {
//开始组织sql
reportTemplateDTOList.stream().forEach(data -> {
StringBuilder sql = new StringBuilder(InfluxDBSqlConstant.SELECT);
if (InfluxDBTableConstant.MAX.equalsIgnoreCase(data.getStatMethod())) {
assSql(data, sql, endList, InfluxDBSqlConstant.MAX, reportSearchParam);
} else if (InfluxDBTableConstant.MIN.equalsIgnoreCase(data.getStatMethod())) {
assSql(data, sql, endList, InfluxDBSqlConstant.MIN, reportSearchParam);
} else if (InfluxDBTableConstant.AVG.equalsIgnoreCase(data.getStatMethod())) {
assSql(data, sql, endList, InfluxDBSqlConstant.AVG, reportSearchParam);
} else if (InfluxDBTableConstant.CP95.equalsIgnoreCase(data.getStatMethod())) {
assSql(data, sql, endList, InfluxDBSqlConstant.CP95, reportSearchParam);
StringBuilder sql = new StringBuilder(InfluxDbSqlConstant.SELECT);
if (InfluxDbSqlConstant.MAX.equalsIgnoreCase(data.getStatMethod())) {
assSql(data, sql, endList, InfluxDbSqlConstant.MAX, reportSearchParam);
} else if (InfluxDbSqlConstant.MIN.equalsIgnoreCase(data.getStatMethod())) {
assSql(data, sql, endList, InfluxDbSqlConstant.MIN, reportSearchParam);
} else if (InfluxDbSqlConstant.AVG.equalsIgnoreCase(data.getStatMethod())) {
assSql(data, sql, endList, InfluxDbSqlConstant.AVG, reportSearchParam);
} else if (InfluxDbSqlConstant.CP95.equalsIgnoreCase(data.getStatMethod())) {
assSql(data, sql, endList, InfluxDbSqlConstant.CP95, reportSearchParam);
}
});
}
@@ -492,93 +478,68 @@ public class CustomReportServiceImpl implements CustomReportService {
//sql拼接示例select MAX(IHA2) as IHA2 from power_quality_data where Phase = 'A' and LineId='1324564568' and Stat_Method='max' tz('Asia/Shanghai')
//cp95函数特殊处理 PERCENTILE(field_key, N)
if (InfluxDBSqlConstant.CP95.equals(method)) {
if (InfluxDbSqlConstant.CP95.equals(method)) {
sql.append(method)
.append(InfluxDBSqlConstant.LBK)
.append(InfluxDbSqlConstant.LBK)
.append(data.getTemplateName())
.append(InfluxDBSqlConstant.NUM_95)
.append(InfluxDBSqlConstant.RBK)
.append(InfluxDBSqlConstant.AS_VALUE);
.append(InfluxDbSqlConstant.NUM_95)
.append(InfluxDbSqlConstant.RBK)
.append(InfluxDbSqlConstant.AS_VALUE);
} else {
sql.append(method)
.append(InfluxDBSqlConstant.LBK)
.append(InfluxDbSqlConstant.LBK)
.append(data.getTemplateName())
.append(InfluxDBSqlConstant.RBK)
.append(InfluxDBSqlConstant.AS_VALUE);
.append(InfluxDbSqlConstant.RBK)
.append(InfluxDbSqlConstant.AS_VALUE);
}
sql.append(InfluxDBSqlConstant.FROM)
sql.append(InfluxDbSqlConstant.FROM)
.append(data.getClassId())
.append(InfluxDBSqlConstant.WHERE)
.append(InfluxDbSqlConstant.WHERE)
.append(InfluxDBTableConstant.LINE_ID)
.append(InfluxDBSqlConstant.EQ)
.append(InfluxDBSqlConstant.QM)
.append(InfluxDbSqlConstant.EQ)
.append(InfluxDbSqlConstant.QM)
.append(reportSearchParam.getLineId())
.append(InfluxDBSqlConstant.QM);
.append(InfluxDbSqlConstant.QM);
//相别特殊处理
if (!InfluxDBTableConstant.NO_PHASE.equals(data.getPhase())) {
sql.append(InfluxDBSqlConstant.AND)
sql.append(InfluxDbSqlConstant.AND)
.append(InfluxDBTableConstant.PHASIC_TYPE)
.append(InfluxDBSqlConstant.EQ)
.append(InfluxDBSqlConstant.QM)
.append(InfluxDbSqlConstant.EQ)
.append(InfluxDbSqlConstant.QM)
.append(data.getPhase())
.append(InfluxDBSqlConstant.QM);
.append(InfluxDbSqlConstant.QM);
}
//data_flicker、data_fluc、data_plt 无 value_type
if (!InfluxDBTableConstant.DATA_FLICKER.equals(data.getClassId()) && !InfluxDBTableConstant.DATA_FLUC.equals(data.getClassId()) && !InfluxDBTableConstant.DATA_PLT.equals(data.getClassId())) {
sql.append(InfluxDBSqlConstant.AND)
sql.append(InfluxDbSqlConstant.AND)
.append(InfluxDBTableConstant.VALUE_TYPE)
.append(InfluxDBSqlConstant.EQ)
.append(InfluxDBSqlConstant.QM)
.append(InfluxDbSqlConstant.EQ)
.append(InfluxDbSqlConstant.QM)
.append(data.getStatMethod())
.append(InfluxDBSqlConstant.QM);
.append(InfluxDbSqlConstant.QM);
}
//频率和频率偏差仅统计T相
if(data.getTemplateName().equals("freq_dev") || data.getTemplateName().equals("freq")){
sql.append(InfluxDBSqlConstant.AND)
sql.append(InfluxDbSqlConstant.AND)
.append(InfluxDBTableConstant.PHASIC_TYPE)
.append(InfluxDBSqlConstant.EQ)
.append(InfluxDbSqlConstant.EQ)
.append(InfluxDBTableConstant.PHASE_TYPE_T);
}
//时间范围处理
sql
.append(InfluxDBSqlConstant.AND)
.append(InfluxDBSqlConstant.TIME).append(InfluxDBSqlConstant.GE).append(InfluxDBSqlConstant.QM).append(reportSearchParam.getStartTime()).append(InfluxDBSqlConstant.START_TIME).append(InfluxDBSqlConstant.QM)
.append(InfluxDBSqlConstant.AND)
.append(InfluxDBSqlConstant.TIME).append(InfluxDBSqlConstant.LT).append(InfluxDBSqlConstant.QM).append(reportSearchParam.getEndTime()).append(InfluxDBSqlConstant.END_TIME).append(InfluxDBSqlConstant.QM);
.append(InfluxDbSqlConstant.AND)
.append(InfluxDbSqlConstant.TIME).append(InfluxDbSqlConstant.GE).append(InfluxDbSqlConstant.QM).append(reportSearchParam.getStartTime()).append(InfluxDbSqlConstant.START_TIME).append(InfluxDbSqlConstant.QM)
.append(InfluxDbSqlConstant.AND)
.append(InfluxDbSqlConstant.TIME).append(InfluxDbSqlConstant.LT).append(InfluxDbSqlConstant.QM).append(reportSearchParam.getEndTime()).append(InfluxDbSqlConstant.END_TIME).append(InfluxDbSqlConstant.QM);
sql.append(InfluxDBSqlConstant.TZ);
if(data.getClassId().equals(InfluxDBTableConstant.DATA_PLT)){
System.out.println(sql);
}
sql.append(InfluxDbSqlConstant.TZ);
//根据不同的库表赋值
QueryResult queryResult = influxDbUtils.query(String.valueOf(sql));
//剖析influx数据填入value
List<QueryResult.Result> results = queryResult.getResults();
if (results.size() != 0) {
QueryResult.Result result = results.get(0);
if (result.getSeries() != null) {
List<QueryResult.Series> seriess = result.getSeries();
if (seriess.size() != 0) {
QueryResult.Series series = seriess.get(0);
List<String> columns = series.getColumns();
List<List<Object>> values = series.getValues();
for (List<Object> columnValue : values) {
for (int i = 0; i < columnValue.size(); i++) {
if (columns.get(i).equals("value")) {
data.setValue(String.format("%.3f", columnValue.get(i)));
}
}
}
}
}
}
StatisticalDataDTO statisticalDataDTO = commonService.selectBySql(sql);
data.setValue(String.format("%.3f",statisticalDataDTO.getValue()));
endList.add(data);
}

View File

@@ -1,5 +1,7 @@
package com.njcn.harmonic.service.impl;
import cn.hutool.core.util.StrUtil;
import com.njcn.common.utils.HarmonicTimesUtil;
import com.njcn.common.utils.PubUtils;
import com.njcn.device.pq.api.LineFeignClient;
import com.njcn.device.biz.pojo.po.Overlimit;
@@ -7,7 +9,12 @@ import com.njcn.harmonic.constant.Param;
import com.njcn.harmonic.pojo.param.HarmInHarmParam;
import com.njcn.harmonic.pojo.vo.HarmInHarmVO;
import com.njcn.harmonic.service.HarmInHarmService;
import com.njcn.influxdb.utils.InfluxDbUtils;
import com.njcn.influx.pojo.constant.InfluxDBTableConstant;
import com.njcn.influx.pojo.po.DataHarmRateV;
import com.njcn.influx.pojo.po.DataI;
import com.njcn.influx.query.InfluxQueryWrapper;
import com.njcn.influx.service.DataHarmRateVService;
import com.njcn.influx.service.IDataIService;
import lombok.AllArgsConstructor;
import org.influxdb.dto.QueryResult;
import org.springframework.stereotype.Service;
@@ -15,6 +22,7 @@ import org.springframework.stereotype.Service;
import java.math.BigDecimal;
import java.util.ArrayList;
import java.util.List;
import java.util.Objects;
/**
* 类的介绍
@@ -29,7 +37,10 @@ public class HarmInHarmServiceImpl implements HarmInHarmService {
private final LineFeignClient lineFeignClient;
private final InfluxDbUtils influxDbUtils;
private final DataHarmRateVService dataHarmRateVService;
private final IDataIService dataIService;
@Override
public HarmInHarmVO getHarmInHarmData(HarmInHarmParam harmInHarmParam) {
@@ -64,47 +75,35 @@ public class HarmInHarmServiceImpl implements HarmInHarmService {
*/
private List<Float> getCondition(String startTime, String endTime, String lineId, Integer harmState) {
List<Float> floatList = new ArrayList<>();
QueryResult queryResult;
if (!lineId.isEmpty()) {
//组装sql语句
StringBuilder stringBuilder = new StringBuilder();
stringBuilder.append(Param.TIME + " >= '").append(startTime).append(Param.START_TIME).append("' and ").append(Param.TIME).append(" <= '").append(endTime).append(Param.END_TIME).append("' and (");
//sql语句
stringBuilder.append(Param.LINE_ID + "='").append(lineId).append("')");
stringBuilder.append(" and ").append(Param.VALUETYPE + "='").append("CP95").append("'");
String sql = "";
if (StrUtil.isNotBlank(lineId)) {
if (harmState == 0) {
sql = "SELECT MEAN(v_2) as V2, MEAN(v_3) as V3, MEAN(v_4) as V4, MEAN(v_5) as V5, MEAN(v_6) as V6, MEAN(v_7) as V7," +
" MEAN(v_8) as V8, MEAN(v_9) as V9, MEAN(v_10) as V10, MEAN(v_11) as V11, MEAN(v_12) as V12, MEAN(v_13) as V13," +
" MEAN(v_14) as V14, MEAN(v_15) as V15, MEAN(v_16) as V16, MEAN(v_17) as V17, MEAN(v_18) as V18," +
" MEAN(v_19) as V19, MEAN(v_20) as V20, MEAN(v_21) as V21," +
" MEAN(v_22) as V22, MEAN(v_23) as V23, MEAN(v_24) as V24, MEAN(v_25) as V25, MEAN(v_26) as V25, MEAN(v_27) as V27, MEAN(v_28) as V28," +
" MEAN(v_29) as V29, MEAN(v_30) as V30, MEAN(v_31) as V31, MEAN(v_32) as V32, MEAN(v_33) as V33, MEAN(v_34) as V34, MEAN(v_35) as V35," +
" MEAN(v_36) as V36, MEAN(v_37) as V37, MEAN(v_38) as V38, MEAN(v_39) as V39, MEAN(v_40) as V40, MEAN(v_41) as V41, MEAN(v_42) as V42," +
" MEAN(v_43) as V43, MEAN(v_44) as V44, MEAN(v_45) as V45, MEAN(v_46) as V46, MEAN(v_47) as V47, MEAN(v_48) as V48, MEAN(v_49) as V49," +
" MEAN(v_50) as V50 FROM data_harmrate_v WHERE " + stringBuilder.toString() + " and phasic_type !='T' order by time asc tz('Asia/Shanghai');";
InfluxQueryWrapper influxQueryWrapper = new InfluxQueryWrapper(DataHarmRateV.class);
influxQueryWrapper.meanSamePrefixAndSuffix("v_","", HarmonicTimesUtil.harmonicTimesList(2,50,1))
.between(DataHarmRateV::getTime,startTime,endTime)
.eq(DataHarmRateV::getLineId,lineId)
.eq(DataHarmRateV::getValueType, InfluxDBTableConstant.CP95)
.ne(DataHarmRateV::getPhaseType,InfluxDBTableConstant.PHASE_TYPE_T);
DataHarmRateV dataHarmRateV = dataHarmRateVService.getMeanAllTimesData(influxQueryWrapper);
if(Objects.nonNull(dataHarmRateV)){
for (int i = 2; i < 51; i++) {
floatList.add(PubUtils.getValueByMethod(dataHarmRateV, "getV", i));
}
}
} else {
sql = "SELECT MEAN(i_2) as I2, MEAN(i_3) as I3, MEAN(i_4) as I4, MEAN(i_5) as I5, MEAN(i_6) as I6, MEAN(i_7) as I7," +
" MEAN(i_8) as I8, MEAN(i_9) as I9, MEAN(i_10) as I10, MEAN(i_11) as I11, MEAN(i_12) as I12, MEAN(i_13) as I13," +
" MEAN(i_14) as I14, MEAN(i_15) as I15, MEAN(i_16) as I16, MEAN(i_17) as I17, MEAN(i_18) as I18," +
" MEAN(i_19) as I19, MEAN(i_20) as I20, MEAN(i_21) as I21," +
" MEAN(i_22) as I22, MEAN(i_23) as I23, MEAN(i_24) as I24, MEAN(i_25) as I25, MEAN(i_26) as I25, MEAN(i_27) as I27, MEAN(i_28) as I28," +
" MEAN(i_29) as I29, MEAN(i_30) as I30, MEAN(i_31) as I31, MEAN(i_32) as I32, MEAN(i_33) as I33, MEAN(i_34) as I34, MEAN(i_35) as I35," +
" MEAN(i_36) as I36, MEAN(i_37) as I37, MEAN(i_38) as I38, MEAN(i_39) as I39, MEAN(i_40) as I40, MEAN(i_41) as I41, MEAN(i_42) as I42," +
" MEAN(i_43) as I43, MEAN(i_44) as I44, MEAN(i_45) as I45, MEAN(i_46) as I46, MEAN(i_47) as I47, MEAN(i_48) as I48, MEAN(i_49) as I49," +
" MEAN(i_50) as I50 FROM data_i WHERE " + stringBuilder.toString() + " and phasic_type !='T' order by time asc tz('Asia/Shanghai');";
}
queryResult = influxDbUtils.query(sql);
if (queryResult.getResults().get(0).getSeries() != null) {
List<Object> resultList = queryResult.getResults().get(0).getSeries().get(0).getValues().get(0);
if (resultList.size() != 0) {
for (int i = 1; i < resultList.size(); i++) {
floatList.add(BigDecimal.valueOf(Float.parseFloat(resultList.get(i).toString())).setScale(4, BigDecimal.ROUND_HALF_UP).floatValue());
InfluxQueryWrapper influxQueryWrapper = new InfluxQueryWrapper(DataI.class);
influxQueryWrapper.meanSamePrefixAndSuffix("i_","", HarmonicTimesUtil.harmonicTimesList(2,50,1))
.between(DataHarmRateV::getTime,startTime,endTime)
.eq(DataHarmRateV::getLineId,lineId)
.eq(DataHarmRateV::getValueType, InfluxDBTableConstant.CP95)
.ne(DataHarmRateV::getPhaseType,InfluxDBTableConstant.PHASE_TYPE_T);
DataI dataI = dataIService.getMeanAllTimesData(influxQueryWrapper);
if(Objects.nonNull(dataI)){
for (int i = 2; i < 51; i++) {
floatList.add(PubUtils.getValueByMethod(dataI, "getI", i));
}
}
}
}
return floatList;
}
}

View File

@@ -25,7 +25,6 @@ import com.njcn.harmonic.pojo.vo.HarmonicLineVO;
import com.njcn.harmonic.pojo.vo.HarmonicSubstationVO;
import com.njcn.harmonic.pojo.vo.PollutionVO;
import com.njcn.harmonic.service.IHarmonicService;
import com.njcn.influxdb.utils.InfluxDbUtils;
import com.njcn.system.enums.DicDataEnum;
import com.njcn.web.utils.RequestUtil;
import lombok.AllArgsConstructor;

View File

@@ -1,8 +1,12 @@
package com.njcn.harmonic.service.impl;
import cn.hutool.core.bean.BeanUtil;
import cn.hutool.core.collection.CollectionUtil;
import cn.hutool.core.date.DatePattern;
import cn.hutool.core.date.DateUtil;
import cn.hutool.core.util.StrUtil;
import com.njcn.common.pojo.constant.BizParamConstant;
import com.njcn.common.pojo.exception.BusinessException;
import com.njcn.common.pojo.param.StatisticsBizBaseParam;
import com.njcn.common.utils.PubUtils;
import com.njcn.device.pq.api.LineFeignClient;
@@ -10,46 +14,55 @@ import com.njcn.device.biz.pojo.po.Overlimit;
import com.njcn.device.pq.pojo.vo.LineDetailDataVO;
import com.njcn.event.api.EventDetailFeignClient;
import com.njcn.event.pojo.po.EventDetail;
import com.njcn.harmonic.constant.Param;
import com.njcn.harmonic.enums.HarmonicResponseEnum;
import com.njcn.harmonic.mapper.StatHarmonicOrgDMapper;
import com.njcn.harmonic.mapper.StatHarmonicOrgMMapper;
import com.njcn.harmonic.mapper.StatHarmonicOrgQMapper;
import com.njcn.harmonic.mapper.StatHarmonicOrgYMapper;
import com.njcn.harmonic.pojo.QueryResultLimitVO;
import com.njcn.harmonic.pojo.param.HistoryHarmParam;
import com.njcn.harmonic.pojo.param.HistoryParam;
import com.njcn.harmonic.pojo.param.NormHistoryParam;
import com.njcn.harmonic.pojo.vo.EventDetailVO;
import com.njcn.harmonic.pojo.vo.HistoryDataResultVO;
import com.njcn.harmonic.pojo.vo.QueryResultLimitVO;
import com.njcn.harmonic.pojo.vo.StatHarmonicOrgVO;
import com.njcn.harmonic.service.HistoryResultService;
import com.njcn.influxdb.param.InfluxDBPublicParam;
import com.njcn.influxdb.utils.InfluxDbUtils;
import com.njcn.influx.imapper.CommonMapper;
import com.njcn.influx.imapper.DataHarmRateVMapper;
import com.njcn.influx.imapper.IDataIMapper;
import com.njcn.influx.pojo.bo.HarmonicHistoryData;
import com.njcn.influx.pojo.constant.InfluxDBTableConstant;
import com.njcn.influx.pojo.dto.HarmData;
import com.njcn.influx.pojo.dto.HarmHistoryDataDTO;
import com.njcn.influx.pojo.po.DataHarmRateV;
import com.njcn.influx.pojo.po.DataI;
import com.njcn.influx.query.InfluxQueryWrapper;
import com.njcn.user.api.DeptFeignClient;
import com.njcn.user.pojo.dto.DeptDTO;
import com.njcn.web.utils.WebUtil;
import lombok.AllArgsConstructor;
import lombok.SneakyThrows;
import org.influxdb.dto.QueryResult;
import lombok.extern.slf4j.Slf4j;
import org.springframework.stereotype.Service;
import org.springframework.util.CollectionUtils;
import java.math.BigDecimal;
import java.text.DateFormat;
import java.math.RoundingMode;
import java.text.SimpleDateFormat;
import java.time.format.DateTimeFormatter;
import java.util.*;
import java.util.stream.Collectors;
import java.util.stream.Stream;
/**
* @author denghuajun
* @date 2022/3/14
*/
@Slf4j
@Service
@AllArgsConstructor
public class HistoryResultServiceImpl implements HistoryResultService {
private final InfluxDbUtils influxDbUtils;
private final LineFeignClient lineFeignClient;
private final EventDetailFeignClient eventDetailFeignClient;
@@ -64,6 +77,13 @@ public class HistoryResultServiceImpl implements HistoryResultService {
private final StatHarmonicOrgDMapper statHarmonicOrgDMapper;
private final CommonMapper commonMapper;
private final IDataIMapper dataIMapper;
private final DataHarmRateVMapper dataHarmRateVMapper;
@Override
public List<HistoryDataResultVO> getHistoryResult(HistoryParam historyParam) {
List<HistoryDataResultVO> historyDataResultVOList = new ArrayList<>();
@@ -123,31 +143,32 @@ public class HistoryResultServiceImpl implements HistoryResultService {
private HistoryDataResultVO getCondition(String startTime, String endTime, String lineId, String contion, Integer number, Integer valueType, Integer ptType) {
HistoryDataResultVO historyDataResultVO = new HistoryDataResultVO();
QueryResultLimitVO queryResultLimitVO = getQueryResult(startTime, endTime, lineId, contion, number, valueType, ptType);
QueryResult queryResult = queryResultLimitVO.getQueryResult();
List<HarmonicHistoryData> harmonicHistoryDataList = queryResultLimitVO.getHarmonicHistoryDataList();
BeanUtil.copyProperties(queryResultLimitVO, historyDataResultVO);
List<QueryResult.Series> list = queryResult.getResults().get(0).getSeries();
//时间轴
List<Date> time = new ArrayList<>();
List<Float> aValue = new ArrayList<>();
//A相值
List<Float> aValue;
//B相值
List<Float> bValue = new ArrayList<>();
//C相值
List<Float> cValue = new ArrayList<>();
//针对统计相别为T时存放的数据
List<Float> fValue = new ArrayList<>();
List<List<Object>> objectListData = new ArrayList<>();
if (!CollectionUtils.isEmpty(list)) {
if (list.size() == 1) {
List<List<Object>> listData = queryResult.getResults().get(0).getSeries().get(0).getValues();
for (int i = 0; i < listData.size(); i++) {
List<Object> objectList = listData.get(i);
DateFormat formatter = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ssX");
TimeZone tx = TimeZone.getTimeZone("Asia/Shanghai");
formatter.setTimeZone(tx);
Date d = formatter.parse(objectList.get(0).toString());
time.add(d);
fValue.add(BigDecimal.valueOf(Float.parseFloat(objectList.get(1).toString())).setScale(4, BigDecimal.ROUND_HALF_UP).floatValue());
if (CollectionUtil.isNotEmpty(harmonicHistoryDataList)) {
//相别统计为T时业务数据处理
if (StrUtil.isBlank(harmonicHistoryDataList.get(0).getPhasicType()) || harmonicHistoryDataList.get(0).getPhasicType().equalsIgnoreCase("t")) {
for (HarmonicHistoryData harmonicHistoryData : harmonicHistoryDataList) {
time.add(new Date(harmonicHistoryData.getTime().toEpochMilli()));
fValue.add(BigDecimal.valueOf(harmonicHistoryData.getAValue()).setScale(4, RoundingMode.HALF_UP).floatValue());
//返回结果有多个值,需要额外处理下
if (Integer.parseInt(contion) == 14) {
bValue.add(BigDecimal.valueOf(Float.parseFloat(objectList.get(2).toString())).setScale(4, BigDecimal.ROUND_HALF_UP).floatValue());
cValue.add(BigDecimal.valueOf(Float.parseFloat(objectList.get(3).toString())).setScale(4, BigDecimal.ROUND_HALF_UP).floatValue());
bValue.add(BigDecimal.valueOf(harmonicHistoryData.getBValue()).setScale(4, RoundingMode.HALF_UP).floatValue());
cValue.add(BigDecimal.valueOf(harmonicHistoryData.getCValue()).setScale(4, RoundingMode.HALF_UP).floatValue());
}
}
//组装二维数组
for (int i = 0; i < time.size(); i++) {
List<Object> objects = new ArrayList<>();
objects.add(time.get(i));
@@ -163,28 +184,33 @@ public class HistoryResultServiceImpl implements HistoryResultService {
historyDataResultVO.setMinValue(Collections.min(fValue));
historyDataResultVO.setMaxValue(Collections.max(fValue));
historyDataResultVO.setValue(objectListData);
} else {
for (int i = 0; i < list.size(); i++) {
List<List<Object>> values = list.get(i).getValues();
for (int j = 0; j < values.size(); j++) {
List<Object> objectList = values.get(j);
if (i == 0) {
SimpleDateFormat formatter = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ssX");
TimeZone tx = TimeZone.getTimeZone("Asia/Shanghai");
formatter.setTimeZone(tx);
Date d = formatter.parse(objectList.get(0).toString());
time.add(d);
aValue.add(BigDecimal.valueOf(Float.parseFloat(objectList.get(1).toString())).setScale(4, BigDecimal.ROUND_HALF_UP).floatValue());
} else if (i == 1) {
bValue.add(BigDecimal.valueOf(Float.parseFloat(objectList.get(1).toString())).setScale(4, BigDecimal.ROUND_HALF_UP).floatValue());
} else {
cValue.add(BigDecimal.valueOf(Float.parseFloat(objectList.get(1).toString())).setScale(4, BigDecimal.ROUND_HALF_UP).floatValue());
}
}
}
//按相别分为3组
List<HarmonicHistoryData> aList = harmonicHistoryDataList.stream()
.filter(temp -> temp.getPhasicType().equalsIgnoreCase(InfluxDBTableConstant.PHASE_TYPE_A))
.collect(Collectors.toList());
List<HarmonicHistoryData> bList = harmonicHistoryDataList.stream()
.filter(temp -> temp.getPhasicType().equalsIgnoreCase(InfluxDBTableConstant.PHASE_TYPE_B))
.collect(Collectors.toList());
List<HarmonicHistoryData> cList = harmonicHistoryDataList.stream()
.filter(temp -> temp.getPhasicType().equalsIgnoreCase(InfluxDBTableConstant.PHASE_TYPE_C))
.collect(Collectors.toList());
time = aList.stream()
.map(temp -> new Date(temp.getTime().toEpochMilli()))
.collect(Collectors.toList());
aValue = aList.stream()
.map(temp -> BigDecimal.valueOf(temp.getAValue()).setScale(4, RoundingMode.HALF_UP).floatValue())
.collect(Collectors.toList());
bValue = bList.stream()
.map(temp -> BigDecimal.valueOf(temp.getAValue()).setScale(4, RoundingMode.HALF_UP).floatValue())
.collect(Collectors.toList());
cValue = cList.stream()
.map(temp -> BigDecimal.valueOf(temp.getAValue()).setScale(4, RoundingMode.HALF_UP).floatValue())
.collect(Collectors.toList());
//组装二维数组
for (int i = 0; i < time.size(); i++) {
List<Object> objects = new ArrayList<>();
//指定
objects.add(time.get(i));
objects.add(aValue.get(i));
objects.add(bValue.get(i));
@@ -212,7 +238,6 @@ public class HistoryResultServiceImpl implements HistoryResultService {
private QueryResultLimitVO getQueryResult(String startTime, String endTime, String lineList, String contion, Integer number, Integer valueType, Integer ptType) {
QueryResultLimitVO queryResultLimitVO = new QueryResultLimitVO();
QueryResult queryResult;
if (!lineList.isEmpty()) {
Float topLimit = 0f;
Float lowerLimit = 0f;
@@ -222,9 +247,9 @@ public class HistoryResultServiceImpl implements HistoryResultService {
LineDetailDataVO lineDetailDataVO = lineFeignClient.getLineDetailData(lineList).getData();
//组装sql语句
StringBuilder stringBuilder = new StringBuilder();
stringBuilder.append(InfluxDBPublicParam.TIME + " >= '").append(startTime).append(InfluxDBPublicParam.START_TIME).append("' and ").append(InfluxDBPublicParam.TIME).append(" <= '").append(endTime).append(InfluxDBPublicParam.END_TIME).append("' and (");
stringBuilder.append(InfluxDBTableConstant.TIME + " >= '").append(startTime).append(InfluxDBTableConstant.START_TIME).append("' and ").append(InfluxDBTableConstant.TIME).append(" <= '").append(endTime).append(InfluxDBTableConstant.END_TIME).append("' and (");
//sql语句
stringBuilder.append(InfluxDBPublicParam.LINE_ID + "='").append(lineList).append("')");
stringBuilder.append(InfluxDBTableConstant.LINE_ID + "='").append(lineList).append("')");
String valueTypeName = "";
switch (valueType) {
case 1:
@@ -243,7 +268,7 @@ public class HistoryResultServiceImpl implements HistoryResultService {
break;
}
if (!Integer.valueOf(contion).equals(60) && !Integer.valueOf(contion).equals(61) && !Integer.valueOf(contion).equals(62)) {
stringBuilder.append(" and ").append(InfluxDBPublicParam.VALUETYPE + "='").append(valueTypeName).append("'");
stringBuilder.append(" and ").append(InfluxDBTableConstant.VALUE_TYPE + "='").append(valueTypeName).append("'");
}
String sql = "";
List<String> phasicType = new ArrayList<>();
@@ -252,7 +277,7 @@ public class HistoryResultServiceImpl implements HistoryResultService {
switch (Integer.parseInt(contion)) {
case 10:
//相电压有效值
sql = "SELECT time as time, rms as aValue FROM data_v WHERE " + stringBuilder.toString() +
sql = "SELECT time as time, rms as aValue ," + InfluxDBTableConstant.PHASIC_TYPE + " FROM data_v WHERE " + stringBuilder +
" and (phasic_type ='A' or phasic_type ='B' or phasic_type ='C') group by phasic_type order by time asc tz('Asia/Shanghai');";
phasicType.add("A相");
phasicType.add("B相");
@@ -262,7 +287,7 @@ public class HistoryResultServiceImpl implements HistoryResultService {
break;
case 11:
//线电压有效值
sql = "SELECT time as time, rms_lvr as aValue FROM data_v WHERE " + stringBuilder.toString() +
sql = "SELECT time as time, rms_lvr as aValue ," + InfluxDBTableConstant.PHASIC_TYPE + " FROM data_v WHERE " + stringBuilder +
" and (phasic_type ='A' or phasic_type ='B' or phasic_type ='C') group by phasic_type order by time asc tz('Asia/Shanghai');";
phasicType.add("AB相");
phasicType.add("BC相");
@@ -272,7 +297,7 @@ public class HistoryResultServiceImpl implements HistoryResultService {
break;
case 12:
//电压偏差
sql = "SELECT time as time, vu_dev as aValue FROM data_v WHERE " + stringBuilder.toString() +
sql = "SELECT time as time, vu_dev as aValue ," + InfluxDBTableConstant.PHASIC_TYPE + " FROM data_v WHERE " + stringBuilder +
" and (phasic_type ='A' or phasic_type ='B' or phasic_type ='C') group by phasic_type order by time asc tz('Asia/Shanghai');";
topLimit = overlimit.getVoltageDev();
lowerLimit = overlimit.getUvoltageDev();
@@ -290,7 +315,7 @@ public class HistoryResultServiceImpl implements HistoryResultService {
break;
case 13:
//三相电压不平衡度
sql = "SELECT time as time, v_unbalance as aValue FROM data_v WHERE " + stringBuilder.toString() +
sql = "SELECT time as time, v_unbalance as aValue ," + InfluxDBTableConstant.PHASIC_TYPE + " FROM data_v WHERE " + stringBuilder +
" and phasic_type ='T' order by time asc tz('Asia/Shanghai');";
topLimit = overlimit.getUbalance();
phasicType.add("三相电压不平衡度");
@@ -299,7 +324,7 @@ public class HistoryResultServiceImpl implements HistoryResultService {
break;
case 14:
//电压不平衡
sql = "SELECT time as time, v_zero as aValue, v_pos as bValue, v_neg as cValue FROM data_v WHERE " + stringBuilder.toString() +
sql = "SELECT time as time, v_zero as aValue, v_pos as bValue, v_neg as cValue ," + InfluxDBTableConstant.PHASIC_TYPE + "FROM data_v WHERE " + stringBuilder +
" and (phasic_type ='T') group by phasic_type order by time asc tz('Asia/Shanghai');";
phasicType.add("零序电压");
phasicType.add("正序电压");
@@ -311,7 +336,7 @@ public class HistoryResultServiceImpl implements HistoryResultService {
break;
case 15:
//电压总谐波畸变率
sql = "SELECT time as time, v_thd as aValue FROM data_v WHERE " + stringBuilder.toString() +
sql = "SELECT time as time, v_thd as aValue ," + InfluxDBTableConstant.PHASIC_TYPE + " FROM data_v WHERE " + stringBuilder +
" and (phasic_type ='A' or phasic_type ='B' or phasic_type ='C') group by phasic_type order by time asc tz('Asia/Shanghai');";
topLimit = overlimit.getUaberrance();
if (ptType == 0) {
@@ -328,7 +353,7 @@ public class HistoryResultServiceImpl implements HistoryResultService {
break;
case 20:
//电流有效值
sql = "SELECT time as time, rms as aValue FROM data_i WHERE " + stringBuilder.toString() +
sql = "SELECT time as time, rms as aValue ," + InfluxDBTableConstant.PHASIC_TYPE + " FROM data_i WHERE " + stringBuilder +
" and (phasic_type ='A' or phasic_type ='B' or phasic_type ='C') group by phasic_type order by time asc tz('Asia/Shanghai');";
phasicType.add("A相");
phasicType.add("B相");
@@ -338,7 +363,7 @@ public class HistoryResultServiceImpl implements HistoryResultService {
break;
case 21:
//电流总畸变率
sql = "SELECT time as time, i_thd as aValue FROM data_i WHERE " + stringBuilder.toString() +
sql = "SELECT time as time, i_thd as aValue ," + InfluxDBTableConstant.PHASIC_TYPE + " FROM data_i WHERE " + stringBuilder +
" and (phasic_type ='A' or phasic_type ='B' or phasic_type ='C') group by phasic_type order by time asc tz('Asia/Shanghai');";
phasicType.add("A相");
phasicType.add("B相");
@@ -348,7 +373,7 @@ public class HistoryResultServiceImpl implements HistoryResultService {
break;
case 22:
//负序电流
sql = "SELECT time as time, i_neg as aValue FROM data_i WHERE " + stringBuilder.toString() +
sql = "SELECT time as time, i_neg as aValue ," + InfluxDBTableConstant.PHASIC_TYPE + " FROM data_i WHERE " + stringBuilder +
" and (phasic_type ='A' or phasic_type ='B' or phasic_type ='C') group by phasic_type order by time asc tz('Asia/Shanghai');";
topLimit = overlimit.getINeg();
phasicType.add("负序电流");
@@ -357,7 +382,7 @@ public class HistoryResultServiceImpl implements HistoryResultService {
break;
case 30:
//频率 V9暂时代表Freq
sql = "SELECT time as time, freq as aValue FROM data_v WHERE " + stringBuilder.toString() +
sql = "SELECT time as time, freq as aValue ," + InfluxDBTableConstant.PHASIC_TYPE + " FROM data_v WHERE " + stringBuilder +
" and phasic_type ='T' order by time asc tz('Asia/Shanghai');";
topLimit = 50 + overlimit.getFreqDev();
lowerLimit = 50 - overlimit.getFreqDev();
@@ -368,10 +393,10 @@ public class HistoryResultServiceImpl implements HistoryResultService {
case 40:
//谐波电压含有率
if (number == 1) {
sql = "SELECT time as time, v as aValue FROM data_harmrate_v WHERE " + stringBuilder.toString() +
sql = "SELECT time as time, v as aValue ," + InfluxDBTableConstant.PHASIC_TYPE + " FROM data_harmrate_v WHERE " + stringBuilder +
" and (phasic_type ='A' or phasic_type ='B' or phasic_type ='C') group by phasic_type order by time asc tz('Asia/Shanghai');";
} else {
sql = "SELECT time as time, v_" + number + " as aValue FROM data_harmrate_v WHERE " + stringBuilder.toString() +
sql = "SELECT time as time, v_" + number + " as aValue ," + InfluxDBTableConstant.PHASIC_TYPE + " FROM data_harmrate_v WHERE " + stringBuilder +
" and (phasic_type ='A' or phasic_type ='B' or phasic_type ='C') group by phasic_type order by time asc tz('Asia/Shanghai');";
if (number < 26) {
topLimit = PubUtils.getValueByMethod(overlimit, "getUharm", number);
@@ -392,10 +417,10 @@ public class HistoryResultServiceImpl implements HistoryResultService {
case 41:
//谐波电流含有率
if (number == 1) {
sql = "SELECT time as time, i as aValue FROM data_harmrate_i WHERE " + stringBuilder.toString() +
sql = "SELECT time as time, i as aValue ," + InfluxDBTableConstant.PHASIC_TYPE + " FROM data_harmrate_i WHERE " + stringBuilder +
" and (phasic_type ='A' or phasic_type ='B' or phasic_type ='C') group by phasic_type order by time asc tz('Asia/Shanghai');";
} else {
sql = "SELECT time as time, i_" + number + " as aValue FROM data_harmrate_i WHERE " + stringBuilder.toString() +
sql = "SELECT time as time, i_" + number + " as aValue ," + InfluxDBTableConstant.PHASIC_TYPE + " FROM data_harmrate_i WHERE " + stringBuilder +
" and (phasic_type ='A' or phasic_type ='B' or phasic_type ='C') group by phasic_type order by time asc tz('Asia/Shanghai');";
}
phasicType.add("A相");
@@ -407,10 +432,10 @@ public class HistoryResultServiceImpl implements HistoryResultService {
case 42:
//谐波电压幅值
if (number == 1) {
sql = "SELECT time as time, v as aValue FROM data_v WHERE " + stringBuilder.toString() +
sql = "SELECT time as time, v as aValue ," + InfluxDBTableConstant.PHASIC_TYPE + " FROM data_v WHERE " + stringBuilder +
" and (phasic_type ='A' or phasic_type ='B' or phasic_type ='C') group by phasic_type order by time asc tz('Asia/Shanghai');";
} else {
sql = "SELECT time as time, v_" + number + " as aValue FROM data_v WHERE " + stringBuilder.toString() +
sql = "SELECT time as time, v_" + number + " as aValue ," + InfluxDBTableConstant.PHASIC_TYPE + " FROM data_v WHERE " + stringBuilder +
" and (phasic_type ='A' or phasic_type ='B' or phasic_type ='C') group by phasic_type order by time asc tz('Asia/Shanghai');";
}
if (ptType == 0) {
@@ -432,10 +457,10 @@ public class HistoryResultServiceImpl implements HistoryResultService {
case 43:
//谐波电流幅值
if (number == 1) {
sql = "SELECT time as time, i as aValue FROM data_i WHERE " + stringBuilder.toString() +
sql = "SELECT time as time, i as aValue ," + InfluxDBTableConstant.PHASIC_TYPE + " FROM data_i WHERE " + stringBuilder +
" and (phasic_type ='A' or phasic_type ='B' or phasic_type ='C') group by phasic_type order by time asc tz('Asia/Shanghai');";
} else {
sql = "SELECT time as time, i_" + number + " as aValue FROM data_i WHERE " + stringBuilder.toString() +
sql = "SELECT time as time, i_" + number + " as aValue ," + InfluxDBTableConstant.PHASIC_TYPE + " FROM data_i WHERE " + stringBuilder +
" and (phasic_type ='A' or phasic_type ='B' or phasic_type ='C') group by phasic_type order by time asc tz('Asia/Shanghai');";
if (number < 26) {
topLimit = PubUtils.getValueByMethod(overlimit, "getIharm", number);
@@ -450,10 +475,10 @@ public class HistoryResultServiceImpl implements HistoryResultService {
case 44:
//谐波电压相角
if (number == 1) {
sql = "SELECT time as time, v as aValue FROM data_harmphasic_v WHERE " + stringBuilder.toString() +
sql = "SELECT time as time, v as aValue ," + InfluxDBTableConstant.PHASIC_TYPE + " FROM data_harmphasic_v WHERE " + stringBuilder +
" and (phasic_type ='A' or phasic_type ='B' or phasic_type ='C') group by phasic_type order by time asc tz('Asia/Shanghai');";
} else {
sql = "SELECT time as time, v_" + number + " as aValue FROM data_harmphasic_v WHERE " + stringBuilder.toString() +
sql = "SELECT time as time, v_" + number + " as aValue ," + InfluxDBTableConstant.PHASIC_TYPE + " FROM data_harmphasic_v WHERE " + stringBuilder +
" and (phasic_type ='A' or phasic_type ='B' or phasic_type ='C') group by phasic_type order by time asc tz('Asia/Shanghai');";
}
if (ptType == 0) {
@@ -471,10 +496,10 @@ public class HistoryResultServiceImpl implements HistoryResultService {
case 45:
//谐波电流相角
if (number == 1) {
sql = "SELECT time as time, i as aValue FROM data_harmphasic_i WHERE " + stringBuilder.toString() +
sql = "SELECT time as time, i as aValue ," + InfluxDBTableConstant.PHASIC_TYPE + " FROM data_harmphasic_i WHERE " + stringBuilder +
" and (phasic_type ='A' or phasic_type ='B' or phasic_type ='C') group by phasic_type order by time asc tz('Asia/Shanghai');";
} else {
sql = "SELECT time as time, i_" + number + " as aValue FROM data_harmphasic_i WHERE " + stringBuilder.toString() +
sql = "SELECT time as time, i_" + number + " as aValue ," + InfluxDBTableConstant.PHASIC_TYPE + " FROM data_harmphasic_i WHERE " + stringBuilder +
" and (phasic_type ='A' or phasic_type ='B' or phasic_type ='C') group by phasic_type order by time asc tz('Asia/Shanghai');";
}
phasicType.add("A相");
@@ -485,7 +510,7 @@ public class HistoryResultServiceImpl implements HistoryResultService {
break;
case 46:
//间谐波电压含有率
sql = "SELECT time as time, v_" + number + " as aValue FROM data_inharmrate_v WHERE " + stringBuilder.toString() +
sql = "SELECT time as time, v_" + number + " as aValue ," + InfluxDBTableConstant.PHASIC_TYPE + " FROM data_inharmrate_v WHERE " + stringBuilder +
" and (phasic_type ='A' or phasic_type ='B' or phasic_type ='C') group by phasic_type order by time asc tz('Asia/Shanghai');";
topLimit = PubUtils.getValueByMethod(overlimit, "getInuharm", number);
if (ptType == 0) {
@@ -502,7 +527,7 @@ public class HistoryResultServiceImpl implements HistoryResultService {
break;
case 47:
//间谐波电流含有率
sql = "SELECT time as time, i_" + number + " as aValue FROM data_inharmrate_i WHERE " + stringBuilder.toString() +
sql = "SELECT time as time, i_" + number + " as aValue ," + InfluxDBTableConstant.PHASIC_TYPE + " FROM data_inharmrate_i WHERE " + stringBuilder +
" and (phasic_type ='A' or phasic_type ='B' or phasic_type ='C') group by phasic_type order by time asc tz('Asia/Shanghai');";
phasicType.add("A相");
phasicType.add("B相");
@@ -512,7 +537,7 @@ public class HistoryResultServiceImpl implements HistoryResultService {
break;
case 48:
//间谐波电压幅值
sql = "SELECT time as time, v_" + number + " as aValue FROM data_inharm_v WHERE " + stringBuilder.toString() +
sql = "SELECT time as time, v_" + number + " as aValue ," + InfluxDBTableConstant.PHASIC_TYPE + " FROM data_inharm_v WHERE " + stringBuilder +
" and (phasic_type ='A' or phasic_type ='B' or phasic_type ='C') group by phasic_type order by time asc tz('Asia/Shanghai');";
targetName = "间谐波电压幅值";
if (ptType == 0) {
@@ -528,7 +553,7 @@ public class HistoryResultServiceImpl implements HistoryResultService {
break;
case 49:
//间谐波电流幅值
sql = "SELECT time as time, i_" + number + " as aValue FROM data_inharm_i WHERE " + stringBuilder.toString() +
sql = "SELECT time as time, i_" + number + " as aValue ," + InfluxDBTableConstant.PHASIC_TYPE + " FROM data_inharm_i WHERE " + stringBuilder +
" and (phasic_type ='A' or phasic_type ='B' or phasic_type ='C') group by phasic_type order by time asc tz('Asia/Shanghai');";
phasicType.add("A相");
phasicType.add("B相");
@@ -538,7 +563,7 @@ public class HistoryResultServiceImpl implements HistoryResultService {
break;
case 50:
//谐波有功功率
sql = "SELECT time as time, p_" + number + " as aValue FROM data_harmpower_p WHERE " + stringBuilder.toString() +
sql = "SELECT time as time, p_" + number + " as aValue ," + InfluxDBTableConstant.PHASIC_TYPE + " FROM data_harmpower_p WHERE " + stringBuilder +
" and (phasic_type ='A' or phasic_type ='B' or phasic_type ='C') group by phasic_type order by time asc tz('Asia/Shanghai');";
phasicType.add("A相");
phasicType.add("B相");
@@ -552,7 +577,7 @@ public class HistoryResultServiceImpl implements HistoryResultService {
break;
case 51:
//谐波无功功率
sql = "SELECT time as time, q_" + number + " as aValue FROM data_harmpower_q WHERE " + stringBuilder.toString() +
sql = "SELECT time as time, q_" + number + " as aValue ," + InfluxDBTableConstant.PHASIC_TYPE + " FROM data_harmpower_q WHERE " + stringBuilder +
" and (phasic_type ='A' or phasic_type ='B' or phasic_type ='C') group by phasic_type order by time asc tz('Asia/Shanghai');";
phasicType.add("A相");
phasicType.add("B相");
@@ -566,7 +591,7 @@ public class HistoryResultServiceImpl implements HistoryResultService {
break;
case 52:
//谐波视在功率
sql = "SELECT time as time, s_" + number + " as aValue FROM data_harmpower_s WHERE " + stringBuilder.toString() +
sql = "SELECT time as time, s_" + number + " as aValue ," + InfluxDBTableConstant.PHASIC_TYPE + " FROM data_harmpower_s WHERE " + stringBuilder +
" and (phasic_type ='A' or phasic_type ='B' or phasic_type ='C') group by phasic_type order by time asc tz('Asia/Shanghai');";
phasicType.add("A相");
phasicType.add("B相");
@@ -580,7 +605,7 @@ public class HistoryResultServiceImpl implements HistoryResultService {
break;
case 53:
//三相有功功率
sql = "SELECT time as time, p as aValue FROM data_harmpower_p WHERE " + stringBuilder.toString() +
sql = "SELECT time as time, p as aValue ," + InfluxDBTableConstant.PHASIC_TYPE + " FROM data_harmpower_p WHERE " + stringBuilder +
" and (phasic_type ='A' or phasic_type ='B' or phasic_type ='C') group by phasic_type order by time asc tz('Asia/Shanghai');";
phasicType.add("A相");
phasicType.add("B相");
@@ -590,7 +615,7 @@ public class HistoryResultServiceImpl implements HistoryResultService {
break;
case 54:
//三相无功功率
sql = "SELECT time as time, q as aValue FROM data_harmpower_q WHERE " + stringBuilder.toString() +
sql = "SELECT time as time, q as aValue ," + InfluxDBTableConstant.PHASIC_TYPE + " FROM data_harmpower_q WHERE " + stringBuilder +
" and (phasic_type ='A' or phasic_type ='B' or phasic_type ='C') group by phasic_type order by time asc tz('Asia/Shanghai');";
phasicType.add("A相");
phasicType.add("B相");
@@ -600,7 +625,7 @@ public class HistoryResultServiceImpl implements HistoryResultService {
break;
case 55:
//三相视在功率
sql = "SELECT time as time, s as aValue FROM data_harmpower_s WHERE " + stringBuilder.toString() +
sql = "SELECT time as time, s as aValue ," + InfluxDBTableConstant.PHASIC_TYPE + " FROM data_harmpower_s WHERE " + stringBuilder +
" and (phasic_type ='A' or phasic_type ='B' or phasic_type ='C') group by phasic_type order by time asc tz('Asia/Shanghai');";
phasicType.add("A相");
phasicType.add("B相");
@@ -610,7 +635,7 @@ public class HistoryResultServiceImpl implements HistoryResultService {
break;
case 56:
//三相总有功功率
sql = "SELECT time as time, p as aValue FROM data_harmpower_p WHERE " + stringBuilder.toString() +
sql = "SELECT time as time, p as aValue ," + InfluxDBTableConstant.PHASIC_TYPE + " FROM data_harmpower_p WHERE " + stringBuilder +
" and phasic_type ='T' order by time asc tz('Asia/Shanghai');";
phasicType.add("三相总有功功率");
unit.add("kW");
@@ -618,7 +643,7 @@ public class HistoryResultServiceImpl implements HistoryResultService {
break;
case 57:
//三相总无功功率
sql = "SELECT time as time, q as aValue FROM data_harmpower_q WHERE " + stringBuilder.toString() +
sql = "SELECT time as time, q as aValue ," + InfluxDBTableConstant.PHASIC_TYPE + " FROM data_harmpower_q WHERE " + stringBuilder +
" and phasic_type ='T' order by time asc tz('Asia/Shanghai');";
phasicType.add("三相总无功功率");
unit.add("kVar");
@@ -626,7 +651,7 @@ public class HistoryResultServiceImpl implements HistoryResultService {
break;
case 58:
//三相总视在功率
sql = "SELECT time as time, s as aValue FROM data_harmpower_s WHERE " + stringBuilder.toString() +
sql = "SELECT time as time, s as aValue ," + InfluxDBTableConstant.PHASIC_TYPE + " FROM data_harmpower_s WHERE " + stringBuilder +
" and phasic_type ='T' order by time asc tz('Asia/Shanghai');";
phasicType.add("三相总视在功率");
unit.add("kVA");
@@ -634,7 +659,7 @@ public class HistoryResultServiceImpl implements HistoryResultService {
break;
case 59:
//视在功率因数
sql = "SELECT time as time, pf as aValue FROM data_harmpower_p WHERE " + stringBuilder.toString() +
sql = "SELECT time as time, pf as aValue ," + InfluxDBTableConstant.PHASIC_TYPE + " FROM data_harmpower_p WHERE " + stringBuilder +
" and (phasic_type ='A' or phasic_type ='B' or phasic_type ='C') group by phasic_type order by time asc tz('Asia/Shanghai');";
phasicType.add("A相");
phasicType.add("B相");
@@ -643,7 +668,7 @@ public class HistoryResultServiceImpl implements HistoryResultService {
break;
case 591:
//位移功率因数
sql = "SELECT time as time, df as aValue FROM data_harmpower_p WHERE " + stringBuilder.toString() +
sql = "SELECT time as time, df as aValue ," + InfluxDBTableConstant.PHASIC_TYPE + " FROM data_harmpower_p WHERE " + stringBuilder +
" and (phasic_type ='A' or phasic_type ='B' or phasic_type ='C') group by phasic_type order by time asc tz('Asia/Shanghai');";
phasicType.add("A相");
phasicType.add("B相");
@@ -652,21 +677,21 @@ public class HistoryResultServiceImpl implements HistoryResultService {
break;
case 592:
//总视在功率因数
sql = "SELECT time as time, pf as aValue FROM data_harmpower_p WHERE " + stringBuilder.toString() +
sql = "SELECT time as time, pf as aValue ," + InfluxDBTableConstant.PHASIC_TYPE + " FROM data_harmpower_p WHERE " + stringBuilder +
" and phasic_type ='T' order by time asc tz('Asia/Shanghai');";
phasicType.add("总视在功率因数");
targetName = "总视在功率因数";
break;
case 593:
//总位移功率因数
sql = "SELECT time as time, df as aValue FROM data_harmpower_p WHERE " + stringBuilder.toString() +
sql = "SELECT time as time, df as aValue ," + InfluxDBTableConstant.PHASIC_TYPE + " FROM data_harmpower_p WHERE " + stringBuilder +
" and phasic_type ='T' order by time asc tz('Asia/Shanghai');";
phasicType.add("总位移功率因数");
targetName = "总位移功率因数";
break;
case 61:
//长时闪变
sql = "SELECT time as time, plt as aValue FROM data_plt WHERE " + stringBuilder.toString() +
sql = "SELECT time as time, plt as aValue ," + InfluxDBTableConstant.PHASIC_TYPE + " FROM data_plt WHERE " + stringBuilder +
" and (phasic_type ='A' or phasic_type ='B' or phasic_type ='C') group by phasic_type order by time asc tz('Asia/Shanghai');";
if (ptType == 0) {
phasicType.add("A相");
@@ -682,7 +707,7 @@ public class HistoryResultServiceImpl implements HistoryResultService {
break;
case 60:
//短时闪变
sql = "SELECT time as time, pst as aValue FROM data_flicker WHERE " + stringBuilder.toString() +
sql = "SELECT time as time, pst as aValue ," + InfluxDBTableConstant.PHASIC_TYPE + " FROM data_flicker WHERE " + stringBuilder +
" and (phasic_type ='A' or phasic_type ='B' or phasic_type ='C') group by phasic_type order by time asc tz('Asia/Shanghai');";
topLimit = overlimit.getFlicker();
if (ptType == 0) {
@@ -698,7 +723,7 @@ public class HistoryResultServiceImpl implements HistoryResultService {
break;
case 62:
//电压波动
sql = "SELECT time as time, fluc as aValue FROM data_fluc WHERE " + stringBuilder.toString() +
sql = "SELECT time as time, fluc as aValue ," + InfluxDBTableConstant.PHASIC_TYPE + " FROM data_fluc WHERE " + stringBuilder +
" and (phasic_type ='A' or phasic_type ='B' or phasic_type ='C') group by phasic_type order by time asc tz('Asia/Shanghai');";
if (ptType == 0) {
phasicType.add("A相");
@@ -714,8 +739,12 @@ public class HistoryResultServiceImpl implements HistoryResultService {
default:
break;
}
queryResult = influxDbUtils.query(sql);
queryResultLimitVO.setQueryResult(queryResult);
//大致有3种类型
//1、一次查询返回3条记录分别为A/B/C三相的结果
//2、一次查询返回一条记录以T相为条件返回某3个指标值
//3、一次查询返回一条记录以T相为条件返回某1个指标值
List<HarmonicHistoryData> harmonicHistoryData = commonMapper.getHistoryResult(sql);
queryResultLimitVO.setHarmonicHistoryDataList(harmonicHistoryData);
queryResultLimitVO.setTopLimit(topLimit);
queryResultLimitVO.setLowerLimit(lowerLimit);
queryResultLimitVO.setPhaiscType(phasicType);
@@ -733,7 +762,6 @@ public class HistoryResultServiceImpl implements HistoryResultService {
private QueryResultLimitVO getQueryNormDataResult(NormHistoryParam normHistoryParam) {
QueryResultLimitVO queryResultLimitVO = new QueryResultLimitVO();
SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");
QueryResult queryResult = null;
if (!normHistoryParam.getLineId().isEmpty()) {
Float topLimit = 0f;
Float lowerLimit = 0f;
@@ -747,9 +775,9 @@ public class HistoryResultServiceImpl implements HistoryResultService {
LineDetailDataVO lineDetailDataVO = lineFeignClient.getLineDetailData(lineList).getData();
//组装sql语句
StringBuilder stringBuilder = new StringBuilder();
stringBuilder.append(Param.TIME + " >= '").append(startTime).append("' and ").append(Param.TIME).append(" <= '").append(endTime).append("' and (");
stringBuilder.append(InfluxDBTableConstant.TIME + " >= '").append(startTime).append("' and ").append(InfluxDBTableConstant.TIME).append(" <= '").append(endTime).append("' and (");
//sql语句
stringBuilder.append(Param.LINE_ID + "='").append(lineList).append("')");
stringBuilder.append(InfluxDBTableConstant.LINE_ID + "='").append(lineList).append("')");
String valueTypeName = "";
String phasicType = "";
switch (valueType) {
@@ -768,15 +796,15 @@ public class HistoryResultServiceImpl implements HistoryResultService {
default:
break;
}
if ("A".equals(normHistoryParam.getPhaseType()) || "AB".equals(normHistoryParam.getPhaseType())) {
if ("A".equalsIgnoreCase(normHistoryParam.getPhaseType()) || "AB".equalsIgnoreCase(normHistoryParam.getPhaseType())) {
phasicType = "A";
} else if ("B".equals(normHistoryParam.getPhaseType()) || "BC".equals(normHistoryParam.getPhaseType())) {
} else if ("B".equalsIgnoreCase(normHistoryParam.getPhaseType()) || "BC".equalsIgnoreCase(normHistoryParam.getPhaseType())) {
phasicType = "B";
} else if ("C".equals(normHistoryParam.getPhaseType()) || "CA".equals(normHistoryParam.getPhaseType())) {
} else if ("C".equalsIgnoreCase(normHistoryParam.getPhaseType()) || "CA".equalsIgnoreCase(normHistoryParam.getPhaseType())) {
phasicType = "C";
}
if (Integer.parseInt(normHistoryParam.getTargetCode()) != 60 || Integer.parseInt(normHistoryParam.getTargetCode()) != 61 || Integer.parseInt(normHistoryParam.getTargetCode()) != 62) {
stringBuilder.append(" and ").append(Param.VALUETYPE + "='").append(valueTypeName).append("'");
stringBuilder.append(" and ").append(InfluxDBTableConstant.VALUE_TYPE + "='").append(valueTypeName).append("'");
}
stringBuilder.append(" and ").append("phasic_type ='").append(phasicType).append("'");
String sql = "";
@@ -786,7 +814,7 @@ public class HistoryResultServiceImpl implements HistoryResultService {
switch (Integer.parseInt(normHistoryParam.getTargetCode())) {
case 12:
//电压偏差
sql = "SELECT time as time, vu_dev as aValue FROM data_v WHERE " + stringBuilder.toString() +
sql = "SELECT time as time, vu_dev as aValue FROM data_v WHERE " + stringBuilder +
" order by time asc tz('Asia/Shanghai');";
topLimit = overlimit.getVoltageDev();
lowerLimit = overlimit.getUvoltageDev();
@@ -795,7 +823,7 @@ public class HistoryResultServiceImpl implements HistoryResultService {
break;
case 13:
//三相电压不平衡度
sql = "SELECT time as time, v_unbalance as aValue FROM data_v WHERE " + stringBuilder.toString() +
sql = "SELECT time as time, v_unbalance as aValue FROM data_v WHERE " + stringBuilder +
" order by time asc tz('Asia/Shanghai');";
topLimit = overlimit.getUbalance();
unit.add("%");
@@ -803,7 +831,7 @@ public class HistoryResultServiceImpl implements HistoryResultService {
break;
case 15:
//电压总谐波畸变率
sql = "SELECT time as time, v_thd as aValue FROM data_v WHERE " + stringBuilder.toString() +
sql = "SELECT time as time, v_thd as aValue FROM data_v WHERE " + stringBuilder +
" order by time asc tz('Asia/Shanghai');";
topLimit = overlimit.getUaberrance();
unit.add("%");
@@ -811,7 +839,7 @@ public class HistoryResultServiceImpl implements HistoryResultService {
break;
case 22:
//负序电流
sql = "SELECT time as time, i_neg as aValue FROM data_i WHERE " + stringBuilder.toString() +
sql = "SELECT time as time, i_neg as aValue FROM data_i WHERE " + stringBuilder +
" order by time asc tz('Asia/Shanghai');";
topLimit = overlimit.getINeg();
unit.add("A");
@@ -819,8 +847,8 @@ public class HistoryResultServiceImpl implements HistoryResultService {
break;
case 30:
//频率 V9暂时代表Freq
sql = "SELECT time as time, freq as aValue FROM data_v WHERE " + stringBuilder.toString() +
" and phasic_type ='T' order by time asc tz('Asia/Shanghai');";
sql = "SELECT time as time, freq as aValue FROM data_v WHERE " + stringBuilder +
" order by time asc tz('Asia/Shanghai');";
topLimit = 50 + overlimit.getFreqDev();
lowerLimit = 50 - overlimit.getFreqDev();
unit.add("Hz");
@@ -828,23 +856,23 @@ public class HistoryResultServiceImpl implements HistoryResultService {
break;
case 40:
//谐波电压含有率
sql = "SELECT time as time, v_" + number + " as aValue FROM data_harmrate_v WHERE " + stringBuilder.toString() +
" and (phasic_type ='A' or phasic_type ='B' or phasic_type ='C') group by phasic_type order by time asc tz('Asia/Shanghai');";
sql = "SELECT time as time, v_" + number + " as aValue FROM data_harmrate_v WHERE " + stringBuilder +
" order by time asc tz('Asia/Shanghai');";
topLimit = PubUtils.getValueByMethod(overlimit, "getUharm", number);
unit.add("%");
targetName = "谐波电压含有率";
break;
case 41:
//谐波电流含有率
sql = "SELECT time as time, i_" + number + " as aValue FROM data_harmrate_i WHERE " + stringBuilder.toString() +
" and (phasic_type ='A' or phasic_type ='B' or phasic_type ='C') group by phasic_type order by time asc tz('Asia/Shanghai');";
sql = "SELECT time as time, i_" + number + " as aValue FROM data_harmrate_i WHERE " + stringBuilder +
" order by time asc tz('Asia/Shanghai');";
unit.add("%");
targetName = "谐波电流含有率";
break;
case 43:
//谐波电流幅值
sql = "SELECT time as time, i_" + number + " as aValue FROM data_i WHERE " + stringBuilder.toString() +
" and (phasic_type ='A' or phasic_type ='B' or phasic_type ='C') group by phasic_type order by time asc tz('Asia/Shanghai');";
sql = "SELECT time as time, i_" + number + " as aValue FROM data_i WHERE " + stringBuilder +
" order by time asc tz('Asia/Shanghai');";
topLimit = PubUtils.getValueByMethod(overlimit, "getIharm", number);
unit.add("A");
targetName = "谐波电流幅值";
@@ -852,24 +880,24 @@ public class HistoryResultServiceImpl implements HistoryResultService {
case 46:
//间谐波电压含有率
sql = "SELECT time as time, v_" + number + " as aValue FROM data_inharmrate_v WHERE " + stringBuilder.toString() +
" and (phasic_type ='A' or phasic_type ='B' or phasic_type ='C') group by phasic_type order by time asc tz('Asia/Shanghai');";
sql = "SELECT time as time, v_" + number + " as aValue FROM data_inharmrate_v WHERE " + stringBuilder +
" order by time asc tz('Asia/Shanghai');";
topLimit = PubUtils.getValueByMethod(overlimit, "getInuharm", number);
unit.add("%");
targetName = "间谐波电压含有率";
break;
case 61:
//长时闪变
sql = "SELECT time as time, plt as aValue FROM data_plt WHERE " + stringBuilder.toString() +
" and (phasic_type ='A' or phasic_type ='B' or phasic_type ='C') group by phasic_type order by time asc tz('Asia/Shanghai');";
sql = "SELECT time as time, plt as aValue FROM data_plt WHERE " + stringBuilder +
" order by time asc tz('Asia/Shanghai');";
targetName = "长时闪变";
topLimit = overlimit.getFlicker();
break;
default:
break;
}
queryResult = influxDbUtils.query(sql);
queryResultLimitVO.setQueryResult(queryResult);
List<HarmonicHistoryData> harmonicHistoryData = commonMapper.getHistoryResult(sql);
queryResultLimitVO.setHarmonicHistoryDataList(harmonicHistoryData);
queryResultLimitVO.setTopLimit(topLimit);
queryResultLimitVO.setLowerLimit(lowerLimit);
queryResultLimitVO.setPhaiscType(Collections.singletonList(phasicType));
@@ -891,22 +919,16 @@ public class HistoryResultServiceImpl implements HistoryResultService {
private HistoryDataResultVO getNormCondition(NormHistoryParam normHistoryParam) {
HistoryDataResultVO historyDataResultVO = new HistoryDataResultVO();
QueryResultLimitVO queryResultLimitVO = getQueryNormDataResult(normHistoryParam);
QueryResult queryResult = queryResultLimitVO.getQueryResult();
BeanUtil.copyProperties(queryResultLimitVO, historyDataResultVO);
List<QueryResult.Series> list = queryResult.getResults().get(0).getSeries();
//获取查询返回的结果集
List<HarmonicHistoryData> harmonicHistoryDataList = queryResultLimitVO.getHarmonicHistoryDataList();
List<Date> time = new ArrayList<>();
List<Float> fValue = new ArrayList<>();
List<List<Object>> objectListData = new ArrayList<>();
if (!CollectionUtils.isEmpty(list)) {
List<List<Object>> listData = queryResult.getResults().get(0).getSeries().get(0).getValues();
for (int i = 0; i < listData.size(); i++) {
List<Object> objectList = listData.get(i);
DateFormat formatter = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ssX");
TimeZone tx = TimeZone.getTimeZone("Asia/Kolkata");
formatter.setTimeZone(tx);
Date d = formatter.parse(objectList.get(0).toString());
time.add(d);
fValue.add(BigDecimal.valueOf(Float.parseFloat(objectList.get(1).toString())).setScale(4, BigDecimal.ROUND_HALF_UP).floatValue());
if (CollectionUtil.isNotEmpty(harmonicHistoryDataList)) {
for (HarmonicHistoryData harmonicHistoryData : harmonicHistoryDataList) {
time.add(new Date(harmonicHistoryData.getTime().toEpochMilli()));
fValue.add(BigDecimal.valueOf(harmonicHistoryData.getAValue()).setScale(4, BigDecimal.ROUND_HALF_UP).floatValue());
}
for (int i = 0; i < time.size(); i++) {
List<Object> objects = new ArrayList<>();
@@ -961,6 +983,103 @@ public class HistoryResultServiceImpl implements HistoryResultService {
return statHarmonicOrgVOS;
}
@Override
public HarmHistoryDataDTO getHistoryHarmData(HistoryHarmParam historyHarmParam) {
List<HarmData> historyData;
float overLimit;
Overlimit overlimit = lineFeignClient.getOverLimitData(historyHarmParam.getLineId()).getData();
//判断是电流还是电压谐波
if (historyHarmParam.getType() == 0) {
historyData = getIHistoryData(historyHarmParam);
overLimit = PubUtils.getValueByMethod(overlimit, "getIharm", historyHarmParam.getTime());
} else {
historyData = getVHistoryData(historyHarmParam);
overLimit = PubUtils.getValueByMethod(overlimit, "getUharm", historyHarmParam.getTime());
}
return new HarmHistoryDataDTO(historyData, overLimit);
}
/***
* 获取指定次数 监测点的历史谐波电流数据
* @author hongawen
* @date 2023/7/19 10:03
*/
private List<HarmData> getIHistoryData(HistoryHarmParam historyHarmParam) {
LineDetailDataVO lineDetailData = lineFeignClient.getLineDetailData(historyHarmParam.getLineId()).getData();
List<HarmData> historyData;
InfluxQueryWrapper influxQueryWrapper = new InfluxQueryWrapper(DataI.class, HarmData.class);
influxQueryWrapper
.select(DataI::getTime)
.max("i_" + historyHarmParam.getTime(), "value")
.between(DataI::getTime, historyHarmParam.getSearchBeginTime().concat(" 00:00:00"), historyHarmParam.getSearchEndTime().concat(" 23:59:59"))
.eq(DataI::getLineId, historyHarmParam.getLineId())
.or(DataI::getPhaseType, Stream.of(InfluxDBTableConstant.PHASE_TYPE_A, InfluxDBTableConstant.PHASE_TYPE_B, InfluxDBTableConstant.PHASE_TYPE_C).collect(Collectors.toList()))
//以时间分组时,需要加上时间间隔,比如此处需要加上监测点的采样间隔
.groupBy("time(" + lineDetailData.getTimeInterval() + "m)")
.timeAsc();
historyData = dataIMapper.getIHistoryData(influxQueryWrapper);
if (CollectionUtils.isEmpty(historyData)) {
//如果数据为空,则提示给用户暂无数据
throw new BusinessException(HarmonicResponseEnum.NO_DATA);
}
//最新两条数据的间隔与监测点查出的间隔做对比,返回一个合理的间隔
int lineInterval = getInterval(lineDetailData.getTimeInterval(), PubUtils.instantToDate(historyData.get(historyData.size() - 1).getTime()), PubUtils.instantToDate(historyData.get(historyData.size() - 2).getTime()));
historyData = dealHistoryData(historyData, lineInterval);
if (CollectionUtils.isEmpty(historyData)) {
//如果数据为空,则提示给用户暂无数据
throw new BusinessException(HarmonicResponseEnum.NO_DATA);
}
//根据时间天数,获取理论上多少次用采数据
List<String> dateStr = PubUtils.getTimes(DateUtil.beginOfDay(DateUtil.parse(historyHarmParam.getSearchBeginTime())), DateUtil.endOfDay(DateUtil.parse(historyHarmParam.getSearchEndTime())));
int dueTimes = dateStr.size() * 1440 / lineInterval;
int realTimes = historyData.size();
if (dueTimes != realTimes) {
//期待值与实际值不等,则提示用户时间范围内谐波数据完整性不足
throw new BusinessException(HarmonicResponseEnum.INSUFFICIENCY_OF_INTEGRITY);
}
return historyData.stream().sorted(Comparator.comparing(HarmData::getTime)).collect(Collectors.toList());
}
/**
* 获取谐波电压的数据
* <p>
* 因历史谐波表data_harmrate_v
*/
private List<HarmData> getVHistoryData(HistoryHarmParam historyHarmParam) {
LineDetailDataVO lineDetailData = lineFeignClient.getLineDetailData(historyHarmParam.getLineId()).getData();
List<HarmData> historyData;
InfluxQueryWrapper influxQueryWrapper = new InfluxQueryWrapper(DataHarmRateV.class, HarmData.class);
influxQueryWrapper
.select(DataHarmRateV::getTime)
.max("v_" + historyHarmParam.getTime(), "value")
.between(DataHarmRateV::getTime, historyHarmParam.getSearchBeginTime().concat(" 00:00:00"), historyHarmParam.getSearchEndTime().concat(" 23:59:59"))
.eq(DataHarmRateV::getLineId, historyHarmParam.getLineId())
.or(DataHarmRateV::getPhaseType, Stream.of(InfluxDBTableConstant.PHASE_TYPE_A, InfluxDBTableConstant.PHASE_TYPE_B, InfluxDBTableConstant.PHASE_TYPE_C).collect(Collectors.toList()))
.groupBy("time(" + lineDetailData.getTimeInterval() + "m)")
.timeAsc();
historyData = dataHarmRateVMapper.getHarmRateVHistoryData(influxQueryWrapper);
if (CollectionUtils.isEmpty(historyData)) {
//如果数据为空,则提示给用户暂无数据
throw new BusinessException(HarmonicResponseEnum.NO_DATA);
}
int lineInterval = getInterval(lineDetailData.getTimeInterval(), PubUtils.instantToDate(historyData.get(historyData.size() - 1).getTime()), PubUtils.instantToDate(historyData.get(historyData.size() - 2).getTime()));
//最新两条数据的间隔与监测点查出的间隔做对比,返回一个合理的间隔
historyData = dealHistoryData(historyData, lineInterval);
if (CollectionUtils.isEmpty(historyData)) {
//如果数据为空,则提示给用户暂无数据
throw new BusinessException(HarmonicResponseEnum.NO_DATA);
}
//根据时间天数,获取理论上多少次用采数据
List<String> dateStr = PubUtils.getTimes(DateUtil.beginOfDay(DateUtil.parse(historyHarmParam.getSearchBeginTime())), DateUtil.endOfDay(DateUtil.parse(historyHarmParam.getSearchEndTime())));
int dueTimes = dateStr.size() * 1440 / lineInterval;
int realTimes = historyData.size();
if (dueTimes != realTimes) {
//期待值与实际值不等,则提示用户时间范围内谐波数据完整性不足
throw new BusinessException(HarmonicResponseEnum.INSUFFICIENCY_OF_INTEGRITY);
}
return historyData.stream().sorted(Comparator.comparing(HarmData::getTime)).collect(Collectors.toList());
}
/**
* 使用流对象的方法插入数据
*/
@@ -981,6 +1100,149 @@ public class HistoryResultServiceImpl implements HistoryResultService {
return list;
}
/**
* 根据库中查询的数据,进行数据补齐操作
*
* @param beforeDeal 库中实际的历史谐波数据
*/
private List<HarmData> dealHistoryData(List<HarmData> beforeDeal, int lineInterval) {
List<HarmData> result = new ArrayList<>();
try {
if (CollectionUtils.isEmpty(beforeDeal)) {
return result;
} else {
//先将查询数据按日进行收集
Map<String/*yyyy-MM-dd的时间格式*/, Map<Date, HarmData>/*当前天的所有谐波数据*/> dayHistoryDatas = new HashMap<>();
for (HarmData harmData : beforeDeal) {
Date time = PubUtils.instantToDate(harmData.getTime());
String date = DateUtil.format(time, DatePattern.NORM_DATE_PATTERN);
if (dayHistoryDatas.containsKey(date)) {
Map<Date, HarmData> harmDataMap = dayHistoryDatas.get(date);
harmDataMap.put(PubUtils.getSecondsAsZero(PubUtils.instantToDate(harmData.getTime())), harmData);
dayHistoryDatas.put(date, harmDataMap);
} else {
Map<Date, HarmData> harmDataMap = new HashMap<>();
harmDataMap.put(PubUtils.getSecondsAsZero(PubUtils.instantToDate(harmData.getTime())), harmData);
dayHistoryDatas.put(date, harmDataMap);
}
}
//将数据按日期处理后,开始进行完整性判断,满足完整性则进行补齐,否则返回空数据
Set<String> days = dayHistoryDatas.keySet();
for (String day : days) {
//获取出当天的历史谐波数据
Map<Date, HarmData> harmDataMap = dayHistoryDatas.get(day);
if (CollectionUtils.isEmpty(harmDataMap)) {
continue;
}
int dueTimes = 1440 / lineInterval;
int realTimes = harmDataMap.size();
double integrity = (double) realTimes / (double) dueTimes;
if (integrity < 0.9 || integrity >= 1.0) {
//完整性不足,则返回原数据
Set<Date> dates = harmDataMap.keySet();
for (Date time : dates) {
result.add(harmDataMap.get(time));
}
} else if (integrity < 1.0) {
//进行数据补齐,数据补齐需要根据监测点测量间隔,最好是MAP格式 map的key是yyyy-MM-dd HH:mm
List<HarmData> afterDeal = new ArrayList<>();
String timeTemp = day + " 00:00:00";
Date date = DateUtil.parse(timeTemp, DatePattern.NORM_DATETIME_PATTERN);
for (int i = 0; i < dueTimes; i++) {
Calendar calendar = Calendar.getInstance();
calendar.setTime(date);
calendar.add(Calendar.MINUTE, lineInterval * i);
HarmData temp = harmDataMap.get(calendar.getTime());
if (temp != null && temp.getValue() != null) {
afterDeal.add(temp);
} else {
//递归找到前面的值
Float preValue = getPreHarmValue(date, calendar.getTime(), harmDataMap, lineInterval);
//递归找到后面的值
Float appendValue = getAppendHarmValue(date, calendar.getTime(), harmDataMap, lineInterval);
HarmData harmData = new HarmData();
harmData.setTime(PubUtils.dateToInstant(calendar.getTime()));
//还需要判断前值和后值为空的情况
if (null == preValue && null == appendValue) {
harmData.setValue(0.0f);
} else if (null == preValue) {
harmData.setValue(appendValue);
} else if (null == appendValue) {
harmData.setValue(preValue);
} else {
harmData.setValue((preValue + appendValue) / 2);
}
afterDeal.add(harmData);
}
}
result.addAll(afterDeal);
}
}
}
} catch (Exception e) {
log.error("开始处理历史电压谐波数据失败,失败原因:{}", e.toString());
throw new BusinessException(HarmonicResponseEnum.INSUFFICIENCY_OF_INTEGRITY);
}
return result;
}
/**
* 递归找前值 谐波数据
*
* @param date 起始时间
* @param time 当前事件
* @param beforeDeal 处理前的数据
*/
private Float getPreHarmValue(Date date, Date time, Map<Date, HarmData> beforeDeal, int interval) {
Float result;
if (date.getTime() >= time.getTime()) {
return null;
} else {
Calendar calendar = Calendar.getInstance();
calendar.setTime(time);
interval = -interval;
calendar.add(Calendar.MINUTE, interval);
HarmData temp = beforeDeal.get(calendar.getTime());
if (temp == null || temp.getValue() == null) {
result = getPreHarmValue(date, calendar.getTime(), beforeDeal, Math.abs(interval));
} else {
result = temp.getValue();
}
}
return result;
}
/**
* 递归找后置 谐波数据
*
* @param date 起始时间
* @param time 截止时间
*/
private Float getAppendHarmValue(Date date, Date time, Map<Date, HarmData> beforeDeal, int interval) {
Float result;
Calendar calendar = Calendar.getInstance();
calendar.setTime(date);
calendar.add(Calendar.DAY_OF_MONTH, 1);
calendar.add(Calendar.MINUTE, -interval);
if (calendar.getTimeInMillis() <= time.getTime()) {
return null;
} else {
Calendar calendar1 = Calendar.getInstance();
calendar1.setTime(time);
calendar1.add(Calendar.MINUTE, interval);
HarmData temp = beforeDeal.get(calendar1.getTime());
if (temp == null || temp.getValue() == null) {
result = getAppendHarmValue(date, calendar1.getTime(), beforeDeal, interval);
} else {
result = temp.getValue();
}
}
return result;
}
// /**
// * 获取季度时间段
// * @param date
@@ -1037,4 +1299,26 @@ public class HistoryResultServiceImpl implements HistoryResultService {
// return result;
// }
/**
* 获取合理的测量间隔
*/
private int getInterval(int lineInterval, Date lastOne, Date lastTwo) {
int interval = 0;
Calendar one = Calendar.getInstance();
one.setTime(lastOne);
Calendar two = Calendar.getInstance();
two.setTime(lastTwo);
long oneTime = lastOne.getTime();
long twoTime = lastTwo.getTime();
long intvalTime = oneTime - twoTime;
long databaseInterval = lineInterval * 60 * 1000;
if (oneTime < twoTime || intvalTime >= databaseInterval) {
interval = lineInterval;
}
if (intvalTime < databaseInterval) {
interval = (int) (intvalTime / (1000 * 60));
}
return interval;
}
}

View File

@@ -21,7 +21,6 @@ import com.njcn.harmonic.pojo.dto.PublicDTO;
import com.njcn.harmonic.pojo.vo.IntegrityIconVO;
import com.njcn.harmonic.pojo.vo.IntegrityVO;
import com.njcn.harmonic.service.IntegrityService;
import com.njcn.influxdb.utils.InfluxDbUtils;
import com.njcn.poi.excel.ExcelUtil;
import com.njcn.poi.pojo.bo.BaseLineExcelBody;
import com.njcn.poi.pojo.bo.BaseLineProExcelBody;
@@ -54,8 +53,6 @@ public class IntegrityServiceImpl implements IntegrityService {
private final IntegrityMapper integrityMapper;
private final InfluxDbUtils influxDbUtils;
private final GeneralDeviceInfoClient generalDeviceInfoClient;
private final GeneralInfo generalInfo;

View File

@@ -8,16 +8,13 @@ import com.njcn.device.pq.api.GeneralDeviceInfoClient;
import com.njcn.device.pq.pojo.dto.GeneralDeviceDTO;
import com.njcn.device.pq.pojo.param.DeviceInfoParam;
import com.njcn.device.pq.pojo.param.OnlineRateParam;
import com.njcn.device.pq.pojo.po.OnlineRate;
import com.njcn.device.pq.pojo.vo.RStatOnlinerateVO;
import com.njcn.harmonic.mapper.OnlineRateDataMapper;
import com.njcn.harmonic.pojo.dto.PublicDTO;
import com.njcn.harmonic.pojo.vo.OnlineRateCensusVO;
import com.njcn.harmonic.pojo.vo.OnlineRateVO;
import com.njcn.harmonic.service.OnlineRateDataService;
import com.njcn.influxdb.utils.InfluxDbUtils;
import lombok.AllArgsConstructor;
import org.influxdb.dto.QueryResult;
import org.springframework.stereotype.Service;
import org.springframework.util.CollectionUtils;
@@ -27,7 +24,6 @@ import java.util.List;
import java.util.Objects;
import java.util.stream.Collectors;
import static com.njcn.influxdb.param.InfluxDBPublicParam.*;
/**
* 类的介绍:

View File

@@ -8,7 +8,6 @@ import com.njcn.device.pq.api.LineFeignClient;
import com.njcn.device.pq.pojo.dto.GeneralDeviceDTO;
import com.njcn.device.pq.pojo.dto.OverLimitLineDTO;
import com.njcn.device.pq.pojo.dto.PollutionParamDTO;
import com.njcn.device.pq.pojo.dto.WarningSubstationDTO;
import com.njcn.harmonic.constant.Param;
import com.njcn.harmonic.enums.HarmonicResponseEnum;
import com.njcn.harmonic.pojo.photoVO.WarningAreaVO;
@@ -18,7 +17,7 @@ import com.njcn.harmonic.pojo.po.LimitRate;
import com.njcn.harmonic.pojo.po.LimitTarget;
import com.njcn.harmonic.pojo.vo.OverAreaVO;
import com.njcn.harmonic.service.PhotovoltaicService;
import com.njcn.influxdb.utils.InfluxDbUtils;
import com.njcn.influx.imapper.CommonMapper;
import lombok.AllArgsConstructor;
import lombok.extern.slf4j.Slf4j;
import org.influxdb.dto.QueryResult;
@@ -45,10 +44,10 @@ public class PhotovoltaicServiceImpl implements PhotovoltaicService {
private final GeneralDeviceInfoClient generalDeviceInfoClient;
private final InfluxDbUtils influxDbUtils;
private final LineFeignClient lineFeignClient;
private final CommonMapper commonMapper;
@Override
@SuppressWarnings("unchecked")
public Page<WarningAreaVO> areaWarningManage(OverAreaVO param) {
@@ -366,6 +365,7 @@ public class PhotovoltaicServiceImpl implements PhotovoltaicService {
/**
* 功能描述: 处理区域在线监测点数、超标监测点数
*
* @param list 集合
* @return
* @author xy
@@ -408,14 +408,12 @@ public class PhotovoltaicServiceImpl implements PhotovoltaicService {
}
}
String sql = "select " + stringBuilder1 + " from " + databaseName + " where " + stringBuilder;
QueryResult sqlResult = influxDbUtils.query(sql);
InfluxDBResultMapper resultMapper = new InfluxDBResultMapper();
switch (databaseName) {
case Param.LIMIT_TARGET:
result = resultMapper.toPOJO(sqlResult, LimitTarget.class);
result = commonMapper.selectLimitTargetBySql(sql);
break;
case Param.LIMIT_RATE:
result = resultMapper.toPOJO(sqlResult,LimitRate.class);
result = commonMapper.selectLimitTargetBySql(sql);
break;
default:
break;
@@ -435,9 +433,9 @@ public class PhotovoltaicServiceImpl implements PhotovoltaicService {
}
stringBuilder.append(" group by line_id tz('Asia/Shanghai')");
String sql = "select * from pqs_eventdetail where " + stringBuilder;
QueryResult sqlResult = influxDbUtils.query(sql);
// QueryResult sqlResult = influxDbUtils.query(sql);
InfluxDBResultMapper resultMapper = new InfluxDBResultMapper();
return resultMapper.toPOJO(sqlResult, EventDetail.class);
return null;
}
}

View File

@@ -1,261 +0,0 @@
package com.njcn.harmonic.service.impl;
import cn.hutool.core.bean.BeanUtil;
import cn.hutool.core.collection.CollectionUtil;
import com.njcn.common.config.GeneralInfo;
import com.njcn.common.pojo.dto.SimpleDTO;
import com.njcn.device.pms.api.MonitorClient;
import com.njcn.device.pms.api.PmsGeneralDeviceInfoClient;
import com.njcn.device.pms.api.PmsGeneratrixClient;
import com.njcn.device.pms.api.StatationStatClient;
import com.njcn.device.pms.pojo.dto.PmsGeneralDeviceDTO;
import com.njcn.device.pms.pojo.dto.PmsStatationStatInfoDTO;
import com.njcn.device.pms.pojo.param.PmsDeviceInfoParam;
import com.njcn.device.pms.pojo.param.PmsStatationStatInfoParam;
import com.njcn.device.pms.pojo.po.Monitor;
import com.njcn.device.pq.api.GeneralDeviceInfoClient;
import com.njcn.device.pq.api.LineFeignClient;
import com.njcn.device.pq.pojo.dto.GeneralDeviceDTO;
import com.njcn.device.pq.pojo.dto.PollutionLineDTO;
import com.njcn.device.pq.pojo.dto.PollutionParamDTO;
import com.njcn.device.pq.pojo.dto.PollutionSubstationDTO;
import com.njcn.harmonic.constant.Param;
import com.njcn.harmonic.pojo.dto.PublicDTO;
import com.njcn.harmonic.pojo.param.HarmonicPublicParam;
import com.njcn.harmonic.pojo.vo.PollMonitorVO;
import com.njcn.harmonic.pojo.vo.PollutionVO;
import com.njcn.harmonic.service.IPollutionService;
import com.njcn.influxdb.utils.InfluxDbUtils;
import com.njcn.system.enums.DicDataEnum;
import com.njcn.web.utils.RequestUtil;
import lombok.AllArgsConstructor;
import org.apache.commons.lang3.StringUtils;
import org.influxdb.dto.QueryResult;
import org.springframework.stereotype.Service;
import org.springframework.util.CollectionUtils;
import java.math.BigDecimal;
import java.util.*;
import java.util.stream.Collectors;
/**
* 类的介绍:
*
* @author xuyang
* @version 1.0.0
* @createTime 2022/2/21 16:43
*/
@Service
@AllArgsConstructor
public class PollutionServiceImpl implements IPollutionService {
private final GeneralDeviceInfoClient generalDeviceInfoClient;
private final PmsGeneralDeviceInfoClient pmsGeneralDeviceInfoClient;
private final StatationStatClient statationStatClient;
private final MonitorClient monitorClient;
private final InfluxDbUtils influxDbUtils = new InfluxDbUtils("admin", "njcnpqs", "http://192.168.1.18:8086", "pqsbase", ""); ;
private final GeneralInfo generalInfo;
private final LineFeignClient lineFeignClient;
/* @Override
public List<PollutionVO> getDeptSubstationRelations(HarmonicPublicParam harmonicPublicParam) {
//用于区分是pms 还是 pq
List<PollutionVO> list = new ArrayList<>();
List<String> lineList = new ArrayList<>();
List<PollutionLineDTO> lineInfo = new ArrayList<>();
PollutionParamDTO paramDTO = new PollutionParamDTO();
harmonicPublicParam.setServerName(generalInfo.getMicroServiceName());
if (StringUtils.isBlank(RequestUtil.getDeptIndex())) {
return list;
}
List<GeneralDeviceDTO> deviceList = generalDeviceInfoClient.getPracticalRunDeviceInfo(harmonicPublicParam).getData();
deviceList.forEach(dept -> {
lineList.addAll(dept.getLineIndexes());
});
if (!CollectionUtils.isEmpty(lineList)) {
paramDTO.setLineList(lineList);
lineInfo = lineFeignClient.getLineInfo(paramDTO).getData();
List<PublicDTO> lineData = getLineDate(lineList, harmonicPublicParam.getSearchBeginTime(), harmonicPublicParam.getSearchEndTime(), harmonicPublicParam.getStatisticalType().getCode());
if (!CollectionUtils.isEmpty(lineData)) {
lineInfo.stream().map(list1 -> lineData.stream().filter(list2 -> Objects.equals(list1.getId(), list2.getId())).findAny().map(m -> {
list1.setData(m.getData());
return list1;
})).collect(Collectors.toList());
}
}
Map<String, List<PollutionLineDTO>> map = lineInfo.stream().collect(Collectors.groupingBy(PollutionLineDTO::getSubstationId));
deviceList.forEach(dept -> {
List<PollutionVO> childrenList = new ArrayList<>();
PollutionVO pollutionVO = new PollutionVO();
pollutionVO.setId(dept.getIndex());
pollutionVO.setName(dept.getName());
if (!CollectionUtils.isEmpty(dept.getSubIndexes())) {
dept.getSubIndexes().forEach(sub -> {
List<PollutionLineDTO> l1 = map.get(sub);
PollutionVO children = new PollutionVO();
children.setId(sub);
children.setPid(dept.getIndex());
children.setName(l1.get(0).getSubstation());
children.setData(l1.stream().max(Comparator.comparing(PollutionLineDTO::getData)).get().getData());
childrenList.add(children);
});
}
if (!CollectionUtils.isEmpty(childrenList)) {
pollutionVO.setChildren(childrenList.stream().sorted(Comparator.comparing(PollutionVO::getData).reversed()).collect(Collectors.toList()));
pollutionVO.setData(childrenList.stream().max(Comparator.comparing(PollutionVO::getData)).get().getData());
}
list.add(pollutionVO);
});
if (!CollectionUtils.isEmpty(list)) {
return list.stream().sorted(Comparator.comparing(PollutionVO::getData).reversed().thenComparing(PollutionVO::getName)).collect(Collectors.toList());
}
return list;
}
@Override
public List<PollutionSubstationDTO> getSubstationInfoById(HarmonicPublicParam deptParam) {
List<PollutionSubstationDTO> list = new ArrayList<>();
deptParam.setServerName(generalInfo.getMicroServiceName());
List<GeneralDeviceDTO> sub = generalDeviceInfoClient.getPracticalRunDeviceInfoAsSubstation(deptParam).getData();
sub.forEach(item -> {
PollutionSubstationDTO pollutionSubstationDTO = lineFeignClient.getSubstationInfo(item.getIndex()).getData();
if (!CollectionUtils.isEmpty(item.getLineIndexes())) {
List<PublicDTO> lineData = getLineDate(item.getLineIndexes(), deptParam.getSearchBeginTime(), deptParam.getSearchEndTime(), deptParam.getStatisticalType().getCode());
if (!CollectionUtils.isEmpty(lineData)) {
pollutionSubstationDTO.setData(lineData.stream().max(Comparator.comparing(PublicDTO::getData)).get().getData());
}
}
list.add(pollutionSubstationDTO);
});
if (!CollectionUtils.isEmpty(list)) {
return list.stream().sorted(Comparator.comparing(PollutionSubstationDTO::getData).reversed().thenComparing(PollutionSubstationDTO::getName)).collect(Collectors.toList());
}
return list;
}
@Override
public List<PollutionLineDTO> getLineInfoById(HarmonicPublicParam harmonicPublicParam) {
harmonicPublicParam.setServerName(generalInfo.getMicroServiceName());
List<PollutionLineDTO> list = new ArrayList<>();
List<String> line = new ArrayList<>();
PollutionParamDTO paramDTO = new PollutionParamDTO();
if (StringUtils.isBlank(RequestUtil.getDeptIndex())) {
return list;
}
List<GeneralDeviceDTO> sub = generalDeviceInfoClient.getPracticalRunDeviceInfoAsSubstation(harmonicPublicParam).getData();
sub.forEach(item -> {
if (Objects.equals(harmonicPublicParam.getId(), item.getIndex())) {
if (!CollectionUtils.isEmpty(item.getLineIndexes())) {
line.addAll(item.getLineIndexes());
}
}
});
if (!CollectionUtils.isEmpty(line)) {
paramDTO.setLineList(line);
list = lineFeignClient.getLineInfo(paramDTO).getData();
List<PublicDTO> lineData = getLineDate(line, harmonicPublicParam.getSearchBeginTime(), harmonicPublicParam.getSearchEndTime(), harmonicPublicParam.getStatisticalType().getCode());
if (!CollectionUtils.isEmpty(lineData)) {
list.stream().map(list1 -> lineData.stream().filter(list2 -> Objects.equals(list1.getId(), list2.getId())).findAny().map(m -> {
list1.setData(m.getData());
return list1;
})).collect(Collectors.toList());
}
} else {
return list;
}
return list.stream().sorted(Comparator.comparing(PollutionLineDTO::getData).reversed().thenComparing(PollutionLineDTO::getName)).collect(Collectors.toList());
}
*/
/* @Override
public List<PollutionLineDTO> getLineRank(HarmonicPublicParam harmonicPublicParam) {
harmonicPublicParam.setServerName(generalInfo.getMicroServiceName());
List<PollutionLineDTO> list = new ArrayList<>();
List<String> lineList = new ArrayList<>();
PollutionParamDTO paramDTO = new PollutionParamDTO();
if (StringUtils.isBlank(RequestUtil.getDeptIndex())) {
return list;
}
List<GeneralDeviceDTO> deviceList = generalDeviceInfoClient.getPracticalRunDeviceInfo(harmonicPublicParam).getData();
deviceList.forEach(item -> {
if (!CollectionUtils.isEmpty(item.getLineIndexes())) {
lineList.addAll(item.getLineIndexes());
}
});
if (!CollectionUtils.isEmpty(lineList)) {
paramDTO.setLineList(lineList);
list = lineFeignClient.getLineInfo(paramDTO).getData();
List<PublicDTO> result = getLineDate(lineList, harmonicPublicParam.getSearchBeginTime(), harmonicPublicParam.getSearchEndTime(), harmonicPublicParam.getStatisticalType().getCode());
if (!CollectionUtils.isEmpty(result)) {
list.stream().map(list1 -> result.stream().filter(list2 -> Objects.equals(list1.getId(), list2.getId())).findAny().map(m -> {
list1.setData(m.getData());
return list1;
})).collect(Collectors.toList());
}
}
list.sort((item1, item2) -> item2.getData().compareTo(item1.getData()));
if (list.size() > Param.UP_LIMIT) {
return list.subList(Param.DOWN_LIMIT, Param.UP_LIMIT);
}
return list;
}*/
/**
* 功能描述:根据监测点id查询influxDB污染指数
*
* @param line 监测点集合
* startTime 开始时间
* endTime 结束时间
* type 指标参数
* @return
* @author xy
* @date 2022/2/21 20:08
*/
private List<PublicDTO> getLineDate(List<String> line, String startTime, String endTime, String type) {
List<PublicDTO> result = new ArrayList<>();
String quota = "";
if (Objects.equals(type, DicDataEnum.XBDY_ENUM.getCode())) {
quota = "harmonic_v";
} else if (Objects.equals(type, DicDataEnum.XBDL_ENUM.getCode())) {
quota = "harmonic_i";
}
StringBuilder stringBuilder = new StringBuilder();
stringBuilder.append(Param.TIME).append(" >= '").append(startTime).append(Param.START_TIME).append("' and ").append(Param.TIME).append(" <= '").append(endTime).append(Param.END_TIME).append("' and (");
for (int i = 0; i < line.size(); i++) {
if (line.size() - i != 1) {
stringBuilder.append(Param.LINE_ID).append("='").append(line.get(i)).append("' or ");
} else {
stringBuilder.append(Param.LINE_ID).append("='").append(line.get(i)).append("')");
}
}
stringBuilder.append(" group by line_id order by time desc limit 1 tz('Asia/Shanghai')");
String sql = "SELECT line_id," + quota + " FROM harmonic_pollution where " + stringBuilder;
QueryResult sqlResult = influxDbUtils.query(sql);
List<QueryResult.Series> list = sqlResult.getResults().get(0).getSeries();
if (!CollectionUtils.isEmpty(list)) {
list.forEach(po -> {
String index = po.getTags().get(Param.LINE_ID);
List<List<Object>> valueList = po.getValues();
if (!CollectionUtils.isEmpty(valueList)) {
for (List<Object> value : valueList) {
PublicDTO publicDTO = new PublicDTO();
Double data = value.get(2) == null ? 0.0 : BigDecimal.valueOf(Double.parseDouble(value.get(2).toString())).setScale(2, BigDecimal.ROUND_HALF_UP).doubleValue();
publicDTO.setId(index);
publicDTO.setData(data);
result.add(publicDTO);
}
}
});
}
return result;
}
}

View File

@@ -30,15 +30,12 @@ import com.njcn.harmonic.pojo.po.*;
import com.njcn.harmonic.pojo.vo.PollutionSubstationVO;
import com.njcn.harmonic.pojo.vo.PollutionVO;
import com.njcn.harmonic.service.PollutionSubstationService;
import com.njcn.influxdb.utils.InfluxDbUtils;
import com.njcn.system.enums.DicDataEnum;
import com.njcn.user.api.DeptFeignClient;
import com.njcn.user.pojo.po.Dept;
import com.njcn.web.utils.RequestUtil;
import lombok.AllArgsConstructor;
import lombok.extern.slf4j.Slf4j;
import org.apache.commons.lang3.StringUtils;
import org.influxdb.dto.QueryResult;
import org.springframework.beans.BeanUtils;
import org.springframework.stereotype.Service;
import org.springframework.util.CollectionUtils;
@@ -91,7 +88,6 @@ public class PollutionSubstationServiceImpl extends ServiceImpl<RStatPollutionSu
private final RMpPollutionDPOMapper rMpPollutionDPOMapper;
private final InfluxDbUtils influxDbUtils = new InfluxDbUtils("admin", "njcnpqs", "http://192.168.1.18:8086", "pqsbase", ""); ;
/**
* @param pollutionSubstationQuryParam
* @Description: getPollutionSubstationData
@@ -578,99 +574,6 @@ public class PollutionSubstationServiceImpl extends ServiceImpl<RStatPollutionSu
return list.stream().sorted(Comparator.comparing(PollutionLineDTO::getData).reversed().thenComparing(PollutionLineDTO::getName)).collect(Collectors.toList());
}
@Override
public List<PollutionLineDTO> getLineRank(HarmonicPublicParam param) {
List<PollutionLineDTO> list= new ArrayList<>();
String pollutionType = param.getStatisticalType ( ).getId ();
String searchBeginTime = "";
if(Objects.equals (param.getReportFlag (),BizParamConstant.STAT_BIZ_YEAR) ){
searchBeginTime = param.getSearchBeginTime ( ).substring (0, 4)+"%";
}else if(Objects.equals (param.getReportFlag (),BizParamConstant.STAT_BIZ_MONTH)){
searchBeginTime = param.getSearchBeginTime ( ).substring (0, 7)+"%";
}else if(Objects.equals (param.getReportFlag (),BizParamConstant.STAT_BIZ_DAY)) {
searchBeginTime = param.getSearchBeginTime ( ).substring (0, 10)+"%";
}
if(param.getType()==0) {
param.setServerName(generalInfo.getMicroServiceName());
List<String> lineList = new ArrayList<>();
PollutionParamDTO paramDTO = new PollutionParamDTO();
if (StringUtils.isBlank(RequestUtil.getDeptIndex())) {
return list;
}
List<GeneralDeviceDTO> deviceList = generalDeviceInfoClient.getPracticalRunDeviceInfo(param).getData();
deviceList.forEach(item -> {
if (!CollectionUtils.isEmpty(item.getLineIndexes())) {
lineList.addAll(item.getLineIndexes());
}
});
if (!CollectionUtils.isEmpty(lineList)) {
paramDTO.setLineList(lineList);
list = lineFeignClient.getLineInfo(paramDTO).getData();
List<PublicDTO> result = getLineDate(lineList, param.getSearchBeginTime(), param.getSearchEndTime(), param.getStatisticalType().getCode());
if (!CollectionUtils.isEmpty(result)) {
list.stream().map(list1 -> result.stream().filter(list2 -> Objects.equals(list1.getId(), list2.getId())).findAny().map(m -> {
list1.setData(m.getData());
return list1;
})).collect(Collectors.toList());
}
}
list.sort((item1, item2) -> item2.getData().compareTo(item1.getData()));
if (list.size() > Param.UP_LIMIT) {
return list.subList(Param.DOWN_LIMIT, Param.UP_LIMIT);
}
return list;
}
PmsDeviceInfoParam pmsDeviceInfoParam = new PmsDeviceInfoParam();
pmsDeviceInfoParam.setDeptIndex(param.getDeptIndex());
//获取统计类型
pmsDeviceInfoParam.setStatisticalType(param.getStatisticalType());
//获取主网台账信息
List<PmsGeneralDeviceDTO> deviceList = pmsGeneralDeviceInfoClient.getPmsDeviceInfoWithInOrg(pmsDeviceInfoParam).getData();
List<String> monitorIdList=new ArrayList<>();
deviceList.forEach(dept -> {
monitorIdList.addAll(dept.getMonitorIdList());
});
if(CollectionUtil.isNotEmpty(monitorIdList)){
//获取监测点数据
List<RMpPollutionDPO> lineData = rMpPollutionDPOMapper.selectMaxList ( monitorIdList,pollutionType,searchBeginTime);
if(CollectionUtil.isEmpty(lineData)){
return list;
}
//获取主网监测点信息
List<Monitor> data = monitorClient.getMonitorList(monitorIdList).getData();
Map<String, Monitor> monitorMap = data.stream().collect(Collectors.toMap(Monitor::getId, Function.identity(), (key1, kye2) -> key1));
lineData.sort((item1, item2) -> item2.getValue().compareTo(item1.getValue()));
if (lineData.size() > Param.UP_LIMIT) {
lineData = lineData.subList(Param.DOWN_LIMIT, Param.UP_LIMIT);
}
PollutionLineDTO dto;
for (RMpPollutionDPO lineDatum : lineData) {
if(monitorMap.containsKey(lineDatum.getLineId())){
Monitor monitor = monitorMap.get(lineDatum.getLineId());
dto=new PollutionLineDTO();
dto.setId(monitor.getId());
dto.setName(monitor.getName());
dto.setPowerCompany(monitor.getOrgName());
dto.setSubstation(monitor.getPowerrId());
dto.setSubstationId(monitor.getPowerrName());
dto.setBusBar(monitor.getLineName());
dto.setData(lineDatum.getValue());
list.add(dto);
}
}
}
return list;
}
@Override
public List<PollutionLineDTO> getLineRankTop10 (HarmonicPublicParam param) {
@@ -752,53 +655,4 @@ public class PollutionSubstationServiceImpl extends ServiceImpl<RStatPollutionSu
return list;
}
/**
* 功能描述:根据监测点id查询influxDB污染指数
*
* @param line 监测点集合
* startTime 开始时间
* endTime 结束时间
* type 指标参数
* @return
* @author xy
* @date 2022/2/21 20:08
*/
private List<PublicDTO> getLineDate(List<String> line, String startTime, String endTime, String type) {
List<PublicDTO> result = new ArrayList<>();
String quota = "";
if (Objects.equals(type, DicDataEnum.V_HARMONIC.getCode())) {
quota = ",harmonic_v";
} else if (Objects.equals(type, DicDataEnum.I_ALL.getCode())) {
quota = ",harmonic_i";
}
StringBuilder stringBuilder = new StringBuilder();
stringBuilder.append(Param.TIME).append(" >= '").append(startTime).append(Param.START_TIME).append("' and ").append(Param.TIME).append(" <= '").append(endTime).append(Param.END_TIME).append("' and (");
for (int i = 0; i < line.size(); i++) {
if (line.size() - i != 1) {
stringBuilder.append(Param.LINE_ID).append("='").append(line.get(i)).append("' or ");
} else {
stringBuilder.append(Param.LINE_ID).append("='").append(line.get(i)).append("')");
}
}
stringBuilder.append(" group by line_id order by time desc limit 1 tz('Asia/Shanghai')");
String sql = "SELECT line_id" + quota + " FROM harmonic_pollution where " + stringBuilder;
QueryResult sqlResult = influxDbUtils.query(sql);
List<QueryResult.Series> list = sqlResult.getResults().get(0).getSeries();
if (!CollectionUtils.isEmpty(list)) {
list.forEach(po -> {
String index = po.getTags().get(Param.LINE_ID);
List<List<Object>> valueList = po.getValues();
if (!CollectionUtils.isEmpty(valueList)) {
for (List<Object> value : valueList) {
PublicDTO publicDTO = new PublicDTO();
Double data = value.get(2) == null ? 0.0 : BigDecimal.valueOf(Double.parseDouble(value.get(2).toString())).setScale(2, BigDecimal.ROUND_HALF_UP).doubleValue();
publicDTO.setId(index);
publicDTO.setData(data);
result.add(publicDTO);
}
}
});
}
return result;
}
}

View File

@@ -8,14 +8,10 @@ import com.njcn.harmonic.mapper.RStatLimitRateDMapper;
import com.njcn.harmonic.pojo.vo.RStatLimitRateDVO;
import com.njcn.harmonic.pojo.vo.SteadyInfoData;
import com.njcn.harmonic.service.SteadyDataService;
import com.njcn.influxdb.param.InfluxDBPublicParam;
import com.njcn.influxdb.utils.InfluxDbUtils;
import com.njcn.web.pojo.vo.SteadyDataVO;
import lombok.AllArgsConstructor;
import org.apache.commons.lang.StringUtils;
import org.influxdb.dto.QueryResult;
import org.springframework.stereotype.Service;
import org.springframework.util.CollectionUtils;
import java.math.BigDecimal;
import java.text.DecimalFormat;
@@ -32,7 +28,6 @@ import java.util.List;
@AllArgsConstructor
public class SteadyDataServiceImpl implements SteadyDataService {
private final InfluxDbUtils influxDbUtils;
private final RStatLimitRateDMapper rateDMapper;
@Override

View File

@@ -23,8 +23,7 @@ import com.njcn.harmonic.pojo.vo.SteadyExceedRateVO;
import com.njcn.harmonic.service.IRStatLimitRateDService;
import com.njcn.harmonic.service.SteadyExceedRateService;
import com.njcn.harmonic.utils.PubUtils;
import com.njcn.influxdb.param.InfluxDBPublicParam;
import com.njcn.influxdb.utils.InfluxDbUtils;
import com.njcn.influx.pojo.constant.InfluxDBTableConstant;
import lombok.AllArgsConstructor;
import lombok.SneakyThrows;
import org.springframework.stereotype.Service;
@@ -47,8 +46,6 @@ public class SteadyExceedRateServiceImpl implements SteadyExceedRateService {
private final SteadyExceedRateMapper steadyExceedRateMapper;
private final InfluxDbUtils influxDbUtils;
private final IRStatLimitRateDService rateDService;
private final RStatLimitRateDMapper rateDMapper;
@@ -472,7 +469,7 @@ public class SteadyExceedRateServiceImpl implements SteadyExceedRateService {
List<LimitRatePO> limitRatePOS = new ArrayList<>();
List<RStatLimitRateDPO> limitRates = rateDService.list(new LambdaQueryWrapper<RStatLimitRateDPO>()
.in(RStatLimitRateDPO::getLineId, lineIndexes)
.eq(RStatLimitRateDPO::getPhasicType,InfluxDBPublicParam.PHASIC_TYPET)
.eq(RStatLimitRateDPO::getPhasicType, InfluxDBTableConstant.PHASE_TYPE_T)
.ge(StrUtil.isNotBlank(startTime), RStatLimitRateDPO::getTime, DateUtil.beginOfDay(DateUtil.parse(startTime)))
.le(StrUtil.isNotBlank(endTime), RStatLimitRateDPO::getTime, DateUtil.endOfDay(DateUtil.parse(endTime)))
);

View File

@@ -3,7 +3,6 @@ package com.njcn.harmonic.service.impl;
import cn.hutool.core.collection.CollUtil;
import cn.hutool.core.date.DateUtil;
import cn.hutool.core.util.StrUtil;
import com.baomidou.mybatisplus.core.conditions.query.LambdaQueryWrapper;
import com.baomidou.mybatisplus.core.conditions.query.QueryWrapper;
import com.njcn.common.config.GeneralInfo;
import com.njcn.common.pojo.dto.SimpleDTO;
@@ -14,19 +13,16 @@ import com.njcn.device.pq.enums.LineBaseEnum;
import com.njcn.device.pq.pojo.dto.GeneralDeviceDTO;
import com.njcn.device.pq.pojo.param.DeviceInfoParam;
import com.njcn.device.pq.pojo.vo.LineDetailDataVO;
import com.njcn.harmonic.constant.Param;
import com.njcn.harmonic.mapper.RMpVThdMapper;
import com.njcn.harmonic.mapper.THDistortionMapper;
import com.njcn.harmonic.pojo.dto.PublicDTO;
import com.njcn.harmonic.pojo.po.RMpVThd;
import com.njcn.harmonic.pojo.po.RStatDataVD;
import com.njcn.harmonic.pojo.vo.RMpVThdVO;
import com.njcn.harmonic.pojo.vo.THDistortionCensusVO;
import com.njcn.harmonic.pojo.vo.THDistortionVO;
import com.njcn.harmonic.service.IRStatDataVDService;
import com.njcn.harmonic.service.THDistortionService;
import com.njcn.harmonic.utils.PubUtils;
import com.njcn.influxdb.utils.InfluxDbUtils;
import lombok.AllArgsConstructor;
import org.springframework.beans.BeanUtils;
import org.springframework.stereotype.Service;

View File

@@ -14,7 +14,6 @@ import com.njcn.harmonic.pojo.vo.TerminalCensusVO;
import com.njcn.harmonic.pojo.vo.TerminalVO;
import com.njcn.harmonic.service.TerminalService;
import com.njcn.harmonic.utils.PubUtils;
import com.njcn.influxdb.utils.InfluxDbUtils;
import lombok.AllArgsConstructor;
import org.springframework.stereotype.Service;
import org.springframework.util.CollectionUtils;
@@ -41,8 +40,6 @@ public class TerminalServiceImpl implements TerminalService {
private final TerminalDataMapper terminalDataMapper;
private final InfluxDbUtils influxDbUtils;
@Override
public List<TerminalVO> getTerminalData(DeviceInfoParam.BusinessParam terminalParam) {
List<TerminalVO> terminalList = new ArrayList<>();

View File

@@ -38,6 +38,8 @@ spring:
multipart:
max-file-size: 100MB
max-request-size: 100MB
jackson:
time-zone: GMT+8
#项目日志的配置

View File

@@ -1,6 +1,9 @@
package com.njcn.influx.imapper;
import com.njcn.influx.ano.Param;
import com.njcn.influx.ano.Select;
import com.njcn.influx.base.InfluxDbBaseMapper;
import com.njcn.influx.pojo.bo.HarmonicHistoryData;
import com.njcn.influx.pojo.dto.StatisticalDataDTO;
import com.njcn.influx.pojo.po.PowerQualityData;
import com.njcn.influx.query.InfluxQueryWrapper;
@@ -17,8 +20,6 @@ import java.util.List;
public interface CommonMapper extends InfluxDbBaseMapper<PowerQualityData> {
List<StatisticalDataDTO> getStatistical(InfluxQueryWrapper influxQueryWrapper);
StatisticalDataDTO getLineRtData(InfluxQueryWrapper influxQueryWrapper);
List<StatisticalDataDTO> getDeviceRtData(InfluxQueryWrapper influxQueryWrapper);
@@ -26,4 +27,13 @@ public interface CommonMapper extends InfluxDbBaseMapper<PowerQualityData> {
List<StatisticalDataDTO> getDeviceRtDataByTime(InfluxQueryWrapper influxQueryWrapper);
StatisticalDataDTO getLineHistoryData(InfluxQueryWrapper influxQueryWrapper);
@Select(value = "#{sql}",resultType = StatisticalDataDTO.class)
StatisticalDataDTO selectBySql(@Param("sql") StringBuilder sql);
@Select(value = "#{sql}",resultType = StatisticalDataDTO.class)
List<?> selectLimitTargetBySql(@Param("sql")String sql);
@Select(value = "#{sql}",resultType = HarmonicHistoryData.class)
List<HarmonicHistoryData> getHistoryResult(@Param("sql")String sql);
}

View File

@@ -0,0 +1,20 @@
package com.njcn.influx.imapper;
import com.njcn.influx.base.InfluxDbBaseMapper;
import com.njcn.influx.pojo.dto.HarmData;
import com.njcn.influx.pojo.po.DataHarmRateV;
import com.njcn.influx.query.InfluxQueryWrapper;
import java.util.List;
/**
* @author hongawen
* @version 1.0.0
* @date 2023年07月17日 11:03
*/
public interface DataHarmRateVMapper extends InfluxDbBaseMapper<DataHarmRateV> {
DataHarmRateV getMeanAllTimesData(InfluxQueryWrapper influxQueryWrapper);
List<HarmData> getHarmRateVHistoryData(InfluxQueryWrapper influxQueryWrapper);
}

View File

@@ -0,0 +1,20 @@
package com.njcn.influx.imapper;
import com.njcn.influx.base.InfluxDbBaseMapper;
import com.njcn.influx.pojo.dto.HarmData;
import com.njcn.influx.pojo.po.DataI;
import com.njcn.influx.query.InfluxQueryWrapper;
import java.util.List;
/**
* @author hongawen
* @version 1.0.0
* @date 2023年07月17日 11:05
*/
public interface IDataIMapper extends InfluxDbBaseMapper<DataI> {
DataI getMeanAllTimesData(InfluxQueryWrapper influxQueryWrapper);
List<HarmData> getIHistoryData(InfluxQueryWrapper influxQueryWrapper);
}

View File

@@ -0,0 +1,31 @@
package com.njcn.influx.pojo.bo;
import com.fasterxml.jackson.databind.annotation.JsonSerialize;
import com.njcn.common.utils.serializer.InstantDateSerializer;
import lombok.Data;
import org.influxdb.annotation.Column;
import java.io.Serializable;
import java.time.Instant;
/**
* @author hongawen
* @version 1.0.0
* @date 2023年07月17日 16:25
*/
@Data
public class HarmonicHistoryData implements Serializable {
@JsonSerialize(using = InstantDateSerializer.class)
private Instant time;
@Column(name = "phasic_type")
private String phasicType;
private Float aValue;
private Float bValue;
private Float cValue;
}

View File

@@ -0,0 +1,154 @@
package com.njcn.influx.pojo.constant;
/**
* 数据表相关常量
* @author hongawen
* @version 1.0.0
* @date 2022年10月14日 14:02
*/
public interface InfluxDBTableConstant {
/**
* 电压波动闪变表
*/
String DATA_FLICKER = "data_flicker";
/**
* 电压波动表
*/
String DATA_FLUC = "data_fluc";
/**
* 谐波电流角度表
*/
String DATA_HARM_PHASIC_I = "data_harmphasic_i";
/**
* 谐波电压角度表
*/
String DATA_HARM_PHASIC_V = "data_harmphasic_v";
/**
* 有功功率表
*/
String DATA_HARM_POWER_P = "data_harmpower_p";
/**
* 无功功率表
*/
String DATA_HARM_POWER_Q = "data_harmpower_q";
/**
* 视在功率表
*/
String DATA_HARM_POWER_S = "data_harmpower_s";
/**
* 谐波电流含有率表
*/
String DATA_HARM_RATE_I = "data_harmrate_i";
/**
* 谐波电压含有率表
*/
String DATA_HARM_RATE_V = "data_harmrate_v";
/**
* 电流表
*/
String DATA_I = "data_i";
/**
* 电流间谐波幅值表
*/
String DATA_IN_HARM_I = "data_inharm_i";
/**
* 电压间谐波幅值表
*/
String DATA_IN_HARM_V = "data_inharm_v";
/**
* 长时闪变表
*/
String DATA_PLT = "data_plt";
/**
* 电压表
*/
String DATA_V = "data_v";
/**
* 时间
*/
String TIME = "time";
/**
* 数据线路号
*/
String LINE_ID = "line_id";
/**
* 装置ID
*/
String DEV_ID = "dev_id";
/**
* 数据类型
*/
String PHASIC_TYPE = "phasic_type";
/**
* 指标类型
*/
String VALUE_TYPE = "value_type";
/**
* 统计结果
*/
String VALUE = "value_type";
/**
* 统计结果最大值
*/
String MAX_VALUE = "maxValue";
/**
* 统计结果最小值
*/
String MIN_VALUE = "minValue";
/**
* 数据质量标志
*/
String QUALITY_FLAG = "quality_flag";
String CP95 = "CP95";
/**
* 相别
*/
String NO_PHASE = "M";
String PHASE_TYPE_A = "A";
String PHASE_TYPE_AB = "AB";
String PHASE_TYPE_B = "B";
String PHASE_TYPE_BC = "BC";
String PHASE_TYPE_C = "C";
String PHASE_TYPE_C0 = "C0";
String PHASE_TYPE_C1 = "C1";
String PHASE_TYPE_C2 = "C2";
String PHASE_TYPE_CA = "CA";
String PHASE_TYPE_T = "T";
/**
* 日起始时间
*/
String START_TIME = " 00:00:00";
/**
* 日结束时间
*/
String END_TIME = " 23:59:59";
}

View File

@@ -0,0 +1,32 @@
package com.njcn.influx.pojo.dto;
import com.fasterxml.jackson.databind.annotation.JsonDeserialize;
import com.fasterxml.jackson.databind.annotation.JsonSerialize;
import com.njcn.common.utils.serializer.InstantDateDeserializer;
import com.njcn.common.utils.serializer.InstantDateSerializer;
import com.njcn.influx.ano.IgnoreData;
import lombok.Data;
import org.influxdb.annotation.Column;
import java.io.Serializable;
import java.time.Instant;
/**
* @author hongawen
* @version 1.0.0
* @date 2023年07月19日 09:43
*/
@Data
public class HarmData implements Serializable{
@Column(name = "time")
@JsonSerialize(using = InstantDateSerializer.class)
@JsonDeserialize(using = InstantDateDeserializer.class)
private Instant time;
@IgnoreData(value = true)
private Float value;
}

View File

@@ -0,0 +1,24 @@
package com.njcn.influx.pojo.dto;
import lombok.AllArgsConstructor;
import lombok.Data;
import lombok.NoArgsConstructor;
import java.io.Serializable;
import java.util.ArrayList;
import java.util.List;
/**
* @author hongawen
* @version 1.0.0
* @date 2023年07月19日 09:40
*/
@Data
@NoArgsConstructor
@AllArgsConstructor
public class HarmHistoryDataDTO implements Serializable {
private List<HarmData> historyData = new ArrayList<>();
private float overLimit;
}

View File

@@ -12,10 +12,10 @@ import java.util.List;
* @version V1.0.0
*/
public interface CommonService {
List<StatisticalDataDTO> commonquery(String lineId, String tableName, String columnName);
/**
* 根据条件获取监测点数据
*
* @param lineId 监测点Id
* @param tableName 表名
* @param columnName 字段名
@@ -27,16 +27,16 @@ public interface CommonService {
/**
* 根据条件获取监测点数据
*
* @param lineIds 监测点Id
* @param tableName 表名
* @param columnName 字段名
* @param phasic 相别
* @param dataType 数据类型
* @return
*/
List<StatisticalDataDTO> getDeviceRtData(List<String> lineIds, String tableName, String columnName, String phasic, String dataType);
/**
* @Description: getDeviceRtDataByTime
* @param lineIds 监测点Id
* @param tableName 表名
* @param columnName 字段名
@@ -44,6 +44,7 @@ public interface CommonService {
* @param dataType 数据类型
* @param startTime start time
* @param endTime end time
* @Description: getDeviceRtDataByTime
* @return: java.util.List<com.njcn.influx.pojo.dto.StatisticalDataDTO>
* @Author: clam
* @Date: 2023/6/13
@@ -52,14 +53,19 @@ public interface CommonService {
/**
* 根据条件获取监测点时间范围内的最大最小值
*
* @param lineId 监测点Id
* @param tableName 表名
* @param columnName 字段名
* @param phasic 相别
* @param dataType 数据类型
* @return
*/
StatisticalDataDTO getLineHistoryData(String lineId, String tableName, String columnName, String startTime, String endTime);
/***
* 当表名、字段、统计方式均不确定时代码拼接好sql
* @author hongawen
* @date 2023/7/14 15:23
* @param sql influx-sql语句
* @return StatisticalDataDTO
*/
StatisticalDataDTO selectBySql(StringBuilder sql);
}

View File

@@ -0,0 +1,13 @@
package com.njcn.influx.service;
import com.njcn.influx.pojo.po.DataHarmRateV;
import com.njcn.influx.query.InfluxQueryWrapper;
/**
* @author hongawen
* @version 1.0.0
* @date 2023年07月17日 11:01
*/
public interface DataHarmRateVService {
DataHarmRateV getMeanAllTimesData(InfluxQueryWrapper influxQueryWrapper);
}

View File

@@ -0,0 +1,14 @@
package com.njcn.influx.service;
import com.njcn.influx.pojo.po.DataI;
import com.njcn.influx.query.InfluxQueryWrapper;
/**
* @author hongawen
* @version 1.0.0
* @date 2023年07月17日 11:04
*/
public interface IDataIService {
DataI getMeanAllTimesData(InfluxQueryWrapper influxQueryWrapper);
}

View File

@@ -1,15 +1,8 @@
package com.njcn.influx.service.impl;
import com.njcn.common.pojo.constant.PatternRegex;
import com.njcn.common.pojo.enums.response.CommonResponseEnum;
import com.njcn.common.pojo.exception.BusinessException;
import com.njcn.common.pojo.response.HttpResult;
import com.njcn.common.utils.HttpResultUtil;
import com.njcn.common.utils.PubUtils;
import com.njcn.influx.ano.Select;
import com.njcn.influx.imapper.CommonMapper;
import com.njcn.influx.pojo.constant.InfluxDBTableConstant;
import com.njcn.influx.pojo.dto.StatisticalDataDTO;
import com.njcn.influx.pojo.po.HarmonicRatioData;
import com.njcn.influx.query.InfluxQueryWrapper;
import com.njcn.influx.service.CommonService;
import com.njcn.influx.utils.ReflectUitl;
@@ -29,23 +22,9 @@ import java.util.List;
@Service
@RequiredArgsConstructor
public class CommonServiceImpl implements CommonService {
private final CommonMapper commonMapper;
@Override
public List<StatisticalDataDTO> commonquery(String lineId ,String tableName, String columnName) {
HashMap<String, Class<?>> entityClassesByAnnotation = ReflectUitl.getEntityClassesByAnnotation();
Class<?> aClass = entityClassesByAnnotation.get(tableName);
InfluxQueryWrapper influxQueryWrapper = new InfluxQueryWrapper(aClass,StatisticalDataDTO.class);
influxQueryWrapper.eq("LineId",lineId).
last(columnName,"statisticalData").
groupBy(StatisticalDataDTO::getLineId, StatisticalDataDTO::getValueType, StatisticalDataDTO::getPhaseType);
List<StatisticalDataDTO> statistical = commonMapper.getStatistical(influxQueryWrapper);
return statistical;
}
@Override
public StatisticalDataDTO getLineRtData(String lineId, String tableName, String columnName, String phasic, String dataType) {
InfluxQueryWrapper influxQueryWrapper = new InfluxQueryWrapper(tableName,StatisticalDataDTO.class);
@@ -53,9 +32,9 @@ public class CommonServiceImpl implements CommonService {
.select(StatisticalDataDTO::getPhaseType)
.select(StatisticalDataDTO::getValueType)
.last(columnName)
.eq("line_id",lineId)
.eq("phasic_type",phasic)
.eq("value_type",dataType);
.eq(InfluxDBTableConstant.LINE_ID,lineId)
.eq(InfluxDBTableConstant.PHASIC_TYPE,phasic)
.eq(InfluxDBTableConstant.VALUE_TYPE,dataType);
return commonMapper.getLineRtData(influxQueryWrapper);
}
@@ -66,9 +45,9 @@ public class CommonServiceImpl implements CommonService {
.select(StatisticalDataDTO::getPhaseType)
.select(StatisticalDataDTO::getValueType)
.last(columnName)
.or("line_id",lineIds)
.eq("phasic_type",phasic)
.eq("value_type",dataType).groupBy("line_id");
.or(InfluxDBTableConstant.LINE_ID,lineIds)
.eq(InfluxDBTableConstant.PHASIC_TYPE,phasic)
.eq(InfluxDBTableConstant.VALUE_TYPE,dataType).groupBy(InfluxDBTableConstant.LINE_ID);
return commonMapper.getDeviceRtData(influxQueryWrapper);
}
@@ -78,21 +57,26 @@ public class CommonServiceImpl implements CommonService {
influxQueryWrapper.select(StatisticalDataDTO::getLineId)
.select(StatisticalDataDTO::getPhaseType)
.select(StatisticalDataDTO::getValueType)
.select(columnName,"value")
.or("line_id",lineIds)
.eq("phasic_type",phasic)
.between("time", startTime, endTime)
.eq("value_type",dataType);
.select(columnName,InfluxDBTableConstant.VALUE)
.or(InfluxDBTableConstant.LINE_ID,lineIds)
.eq(InfluxDBTableConstant.PHASIC_TYPE,phasic)
.between(InfluxDBTableConstant.TIME, startTime, endTime)
.eq(InfluxDBTableConstant.VALUE_TYPE,dataType);
return commonMapper.getDeviceRtDataByTime(influxQueryWrapper);
}
@Override
public StatisticalDataDTO getLineHistoryData(String lineId, String tableName, String columnName, String startTime, String endTime) {
InfluxQueryWrapper influxQueryWrapper = new InfluxQueryWrapper(tableName,StatisticalDataDTO.class);
influxQueryWrapper.max(columnName,"maxValue")
.min(columnName,"minValue")
.eq("line_id",lineId)
.between("time", startTime, endTime);
influxQueryWrapper.max(columnName,InfluxDBTableConstant.MAX_VALUE)
.min(columnName,InfluxDBTableConstant.MIN_VALUE)
.eq(InfluxDBTableConstant.LINE_ID,lineId)
.between(InfluxDBTableConstant.TIME, startTime, endTime);
return commonMapper.getLineHistoryData(influxQueryWrapper);
}
@Override
public StatisticalDataDTO selectBySql(StringBuilder sql) {
return commonMapper.selectBySql(sql);
}
}

Some files were not shown because too many files have changed in this diff Show More