|
|
|
@@ -3,7 +3,7 @@ package com.njcn.executor.handler;
|
|
|
|
import com.njcn.common.pojo.constant.PatternRegex;
|
|
|
|
import com.njcn.common.pojo.constant.PatternRegex;
|
|
|
|
import com.njcn.device.api.LineFeignClient;
|
|
|
|
import com.njcn.device.api.LineFeignClient;
|
|
|
|
import com.njcn.executor.pojo.vo.*;
|
|
|
|
import com.njcn.executor.pojo.vo.*;
|
|
|
|
import com.njcn.harmonic.constant.Param;
|
|
|
|
import com.njcn.influxdb.param.InfluxDBPublicParam;
|
|
|
|
import com.njcn.influxdb.utils.InfluxDbUtils;
|
|
|
|
import com.njcn.influxdb.utils.InfluxDbUtils;
|
|
|
|
import com.xxl.job.core.context.XxlJobHelper;
|
|
|
|
import com.xxl.job.core.context.XxlJobHelper;
|
|
|
|
import com.xxl.job.core.handler.annotation.XxlJob;
|
|
|
|
import com.xxl.job.core.handler.annotation.XxlJob;
|
|
|
|
@@ -36,8 +36,6 @@ import java.util.regex.Pattern;
|
|
|
|
@AllArgsConstructor
|
|
|
|
@AllArgsConstructor
|
|
|
|
public class DayJob {
|
|
|
|
public class DayJob {
|
|
|
|
|
|
|
|
|
|
|
|
private final String DATABASE = "pqsbase";
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
private final InfluxDbUtils influxDbUtils;
|
|
|
|
private final InfluxDbUtils influxDbUtils;
|
|
|
|
|
|
|
|
|
|
|
|
private final LineFeignClient lineFeignClient;
|
|
|
|
private final LineFeignClient lineFeignClient;
|
|
|
|
@@ -194,8 +192,8 @@ public class DayJob {
|
|
|
|
stringBuilder1.append("min(v_").append(i).append(") AS v_").append(i).append(",");
|
|
|
|
stringBuilder1.append("min(v_").append(i).append(") AS v_").append(i).append(",");
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
stringBuilder2.append(Param.TIME + " >= '").append(startTime).append("' and ").append(Param.TIME).append(" <= '").append(endTime).append("' and value_type = 'MIN' group by line_id,phasic_type,value_type,quality_flag tz('Asia/Shanghai')");
|
|
|
|
stringBuilder2.append(InfluxDBPublicParam.TIME + " >= '").append(startTime).append("' and ").append(InfluxDBPublicParam.TIME).append(" <= '").append(endTime).append("' and value_type = 'MIN' group by line_id,phasic_type,value_type,quality_flag ").append(InfluxDBPublicParam.TIME_ZONE);
|
|
|
|
String sql1 = "select "+stringBuilder1+" from data_v where "+stringBuilder+stringBuilder2;
|
|
|
|
String sql1 = "select "+stringBuilder1+" from "+ InfluxDBPublicParam.DATA_V+" where "+stringBuilder+stringBuilder2;
|
|
|
|
QueryResult sqlResult1 = influxDbUtils.query(sql1);
|
|
|
|
QueryResult sqlResult1 = influxDbUtils.query(sql1);
|
|
|
|
InfluxDBResultMapper resultMapper1 = new InfluxDBResultMapper();
|
|
|
|
InfluxDBResultMapper resultMapper1 = new InfluxDBResultMapper();
|
|
|
|
List<DataV> list1 = resultMapper1.toPOJO(sqlResult1, DataV.class);
|
|
|
|
List<DataV> list1 = resultMapper1.toPOJO(sqlResult1, DataV.class);
|
|
|
|
@@ -210,8 +208,8 @@ public class DayJob {
|
|
|
|
stringBuilder3.append("max(v_").append(i).append(") AS v_").append(i).append(",");
|
|
|
|
stringBuilder3.append("max(v_").append(i).append(") AS v_").append(i).append(",");
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
stringBuilder4.append(Param.TIME + " >= '").append(startTime).append("' and ").append(Param.TIME).append(" <= '").append(endTime).append("' and value_type = 'MAX' group by line_id,phasic_type,value_type,quality_flag tz('Asia/Shanghai')");
|
|
|
|
stringBuilder4.append(InfluxDBPublicParam.TIME + " >= '").append(startTime).append("' and ").append(InfluxDBPublicParam.TIME).append(" <= '").append(endTime).append("' and value_type = 'MAX' group by line_id,phasic_type,value_type,quality_flag ").append(InfluxDBPublicParam.TIME_ZONE);
|
|
|
|
String sql2 = "select "+stringBuilder3+" from data_v where "+stringBuilder+stringBuilder4;
|
|
|
|
String sql2 = "select "+stringBuilder3+" from "+ InfluxDBPublicParam.DATA_V+" where "+stringBuilder+stringBuilder4;
|
|
|
|
QueryResult sqlResult2 = influxDbUtils.query(sql2);
|
|
|
|
QueryResult sqlResult2 = influxDbUtils.query(sql2);
|
|
|
|
InfluxDBResultMapper resultMapper2 = new InfluxDBResultMapper();
|
|
|
|
InfluxDBResultMapper resultMapper2 = new InfluxDBResultMapper();
|
|
|
|
List<DataV> list2 = resultMapper2.toPOJO(sqlResult2, DataV.class);
|
|
|
|
List<DataV> list2 = resultMapper2.toPOJO(sqlResult2, DataV.class);
|
|
|
|
@@ -226,8 +224,8 @@ public class DayJob {
|
|
|
|
stringBuilder5.append("mean(v_").append(i).append(") AS v_").append(i).append(",");
|
|
|
|
stringBuilder5.append("mean(v_").append(i).append(") AS v_").append(i).append(",");
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
stringBuilder6.append(Param.TIME + " >= '").append(startTime).append("' and ").append(Param.TIME).append(" <= '").append(endTime).append("' and value_type = 'AVG' group by line_id,phasic_type,value_type,quality_flag tz('Asia/Shanghai')");
|
|
|
|
stringBuilder6.append(InfluxDBPublicParam.TIME + " >= '").append(startTime).append("' and ").append(InfluxDBPublicParam.TIME).append(" <= '").append(endTime).append("' and value_type = 'AVG' group by line_id,phasic_type,value_type,quality_flag ").append(InfluxDBPublicParam.TIME_ZONE);
|
|
|
|
String sql3 = "select "+stringBuilder5+" from data_v where "+stringBuilder+stringBuilder6;
|
|
|
|
String sql3 = "select "+stringBuilder5+" from "+ InfluxDBPublicParam.DATA_V+" where "+stringBuilder+stringBuilder6;
|
|
|
|
QueryResult sqlResult3 = influxDbUtils.query(sql3);
|
|
|
|
QueryResult sqlResult3 = influxDbUtils.query(sql3);
|
|
|
|
InfluxDBResultMapper resultMapper3 = new InfluxDBResultMapper();
|
|
|
|
InfluxDBResultMapper resultMapper3 = new InfluxDBResultMapper();
|
|
|
|
List<DataV> list3 = resultMapper3.toPOJO(sqlResult3, DataV.class);
|
|
|
|
List<DataV> list3 = resultMapper3.toPOJO(sqlResult3, DataV.class);
|
|
|
|
@@ -242,8 +240,8 @@ public class DayJob {
|
|
|
|
stringBuilder7.append("percentile(v_").append(i).append(",95) AS v_").append(i).append(",");
|
|
|
|
stringBuilder7.append("percentile(v_").append(i).append(",95) AS v_").append(i).append(",");
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
stringBuilder8.append(Param.TIME + " >= '").append(startTime).append("' and ").append(Param.TIME).append(" <= '").append(endTime).append("' and value_type = 'AVG' group by line_id,phasic_type,value_type,quality_flag tz('Asia/Shanghai')");
|
|
|
|
stringBuilder8.append(InfluxDBPublicParam.TIME + " >= '").append(startTime).append("' and ").append(InfluxDBPublicParam.TIME).append(" <= '").append(endTime).append("' and value_type = 'AVG' group by line_id,phasic_type,value_type,quality_flag ").append(InfluxDBPublicParam.TIME_ZONE);
|
|
|
|
String sql4 = "select "+stringBuilder7+" from data_v where "+stringBuilder+stringBuilder8;
|
|
|
|
String sql4 = "select "+stringBuilder7+" from "+ InfluxDBPublicParam.DATA_V+" where "+stringBuilder+stringBuilder8;
|
|
|
|
QueryResult sqlResult4 = influxDbUtils.query(sql4);
|
|
|
|
QueryResult sqlResult4 = influxDbUtils.query(sql4);
|
|
|
|
InfluxDBResultMapper resultMapper4 = new InfluxDBResultMapper();
|
|
|
|
InfluxDBResultMapper resultMapper4 = new InfluxDBResultMapper();
|
|
|
|
List<DataV> list4 = resultMapper4.toPOJO(sqlResult4, DataV.class);
|
|
|
|
List<DataV> list4 = resultMapper4.toPOJO(sqlResult4, DataV.class);
|
|
|
|
@@ -334,12 +332,12 @@ public class DayJob {
|
|
|
|
fields.put("v_48",item.getV48());
|
|
|
|
fields.put("v_48",item.getV48());
|
|
|
|
fields.put("v_49",item.getV49());
|
|
|
|
fields.put("v_49",item.getV49());
|
|
|
|
fields.put("v_50",item.getV50());
|
|
|
|
fields.put("v_50",item.getV50());
|
|
|
|
Point point = influxDbUtils.pointBuilder("day_v", time, TimeUnit.MILLISECONDS, tags, fields);
|
|
|
|
Point point = influxDbUtils.pointBuilder(InfluxDBPublicParam.DAY_V, time, TimeUnit.MILLISECONDS, tags, fields);
|
|
|
|
BatchPoints batchPoints = BatchPoints.database(DATABASE).tag("line_id", item.getLineId()).tag("phasic_type",item.getPhaseType()).tag("quality_flag",item.getQualityFlag()).tag("value_type",item.getValueType()).retentionPolicy("").consistency(InfluxDB.ConsistencyLevel.ALL).build();
|
|
|
|
BatchPoints batchPoints = BatchPoints.database(InfluxDBPublicParam.DATABASE).tag(InfluxDBPublicParam.LINE_ID, item.getLineId()).tag(InfluxDBPublicParam.PHASIC_TYPE,item.getPhaseType()).tag(InfluxDBPublicParam.QUALITY_FLAG,item.getQualityFlag()).tag(InfluxDBPublicParam.VALUE_TYPE,item.getValueType()).retentionPolicy("").consistency(InfluxDB.ConsistencyLevel.ALL).build();
|
|
|
|
batchPoints.point(point);
|
|
|
|
batchPoints.point(point);
|
|
|
|
records.add(batchPoints.lineProtocol());
|
|
|
|
records.add(batchPoints.lineProtocol());
|
|
|
|
});
|
|
|
|
});
|
|
|
|
influxDbUtils.batchInsert(DATABASE,"", InfluxDB.ConsistencyLevel.ALL, records);
|
|
|
|
influxDbUtils.batchInsert(InfluxDBPublicParam.DATABASE,"", InfluxDB.ConsistencyLevel.ALL, records);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
@@ -366,8 +364,8 @@ public class DayJob {
|
|
|
|
stringBuilder1.append("min(i_").append(i).append(") AS i_").append(i).append(",");
|
|
|
|
stringBuilder1.append("min(i_").append(i).append(") AS i_").append(i).append(",");
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
stringBuilder2.append(Param.TIME + " >= '").append(startTime).append("' and ").append(Param.TIME).append(" <= '").append(endTime).append("' and value_type = 'MIN' group by line_id,phasic_type,value_type,quality_flag tz('Asia/Shanghai')");
|
|
|
|
stringBuilder2.append(InfluxDBPublicParam.TIME + " >= '").append(startTime).append("' and ").append(InfluxDBPublicParam.TIME).append(" <= '").append(endTime).append("' and value_type = 'MIN' group by line_id,phasic_type,value_type,quality_flag ").append(InfluxDBPublicParam.TIME_ZONE);
|
|
|
|
String sql1 = "select "+stringBuilder1+" from data_i where "+stringBuilder+stringBuilder2;
|
|
|
|
String sql1 = "select "+stringBuilder1+" from "+ InfluxDBPublicParam.DATA_I+" where "+stringBuilder+stringBuilder2;
|
|
|
|
QueryResult sqlResult1 = influxDbUtils.query(sql1);
|
|
|
|
QueryResult sqlResult1 = influxDbUtils.query(sql1);
|
|
|
|
InfluxDBResultMapper resultMapper1 = new InfluxDBResultMapper();
|
|
|
|
InfluxDBResultMapper resultMapper1 = new InfluxDBResultMapper();
|
|
|
|
List<DataI> list1 = resultMapper1.toPOJO(sqlResult1, DataI.class);
|
|
|
|
List<DataI> list1 = resultMapper1.toPOJO(sqlResult1, DataI.class);
|
|
|
|
@@ -382,8 +380,8 @@ public class DayJob {
|
|
|
|
stringBuilder3.append("max(i_").append(i).append(") AS i_").append(i).append(",");
|
|
|
|
stringBuilder3.append("max(i_").append(i).append(") AS i_").append(i).append(",");
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
stringBuilder4.append(Param.TIME + " >= '").append(startTime).append("' and ").append(Param.TIME).append(" <= '").append(endTime).append("' and value_type = 'MAX' group by line_id,phasic_type,value_type,quality_flag tz('Asia/Shanghai')");
|
|
|
|
stringBuilder4.append(InfluxDBPublicParam.TIME + " >= '").append(startTime).append("' and ").append(InfluxDBPublicParam.TIME).append(" <= '").append(endTime).append("' and value_type = 'MAX' group by line_id,phasic_type,value_type,quality_flag ").append(InfluxDBPublicParam.TIME_ZONE);
|
|
|
|
String sql2 = "select "+stringBuilder3+" from data_i where "+stringBuilder+stringBuilder4;
|
|
|
|
String sql2 = "select "+stringBuilder3+" from "+ InfluxDBPublicParam.DATA_I+" where "+stringBuilder+stringBuilder4;
|
|
|
|
QueryResult sqlResult2 = influxDbUtils.query(sql2);
|
|
|
|
QueryResult sqlResult2 = influxDbUtils.query(sql2);
|
|
|
|
InfluxDBResultMapper resultMapper2 = new InfluxDBResultMapper();
|
|
|
|
InfluxDBResultMapper resultMapper2 = new InfluxDBResultMapper();
|
|
|
|
List<DataI> list2 = resultMapper2.toPOJO(sqlResult2, DataI.class);
|
|
|
|
List<DataI> list2 = resultMapper2.toPOJO(sqlResult2, DataI.class);
|
|
|
|
@@ -398,8 +396,8 @@ public class DayJob {
|
|
|
|
stringBuilder5.append("mean(i_").append(i).append(") AS i_").append(i).append(",");
|
|
|
|
stringBuilder5.append("mean(i_").append(i).append(") AS i_").append(i).append(",");
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
stringBuilder6.append(Param.TIME + " >= '").append(startTime).append("' and ").append(Param.TIME).append(" <= '").append(endTime).append("' and value_type = 'AVG' group by line_id,phasic_type,value_type,quality_flag tz('Asia/Shanghai')");
|
|
|
|
stringBuilder6.append(InfluxDBPublicParam.TIME + " >= '").append(startTime).append("' and ").append(InfluxDBPublicParam.TIME).append(" <= '").append(endTime).append("' and value_type = 'AVG' group by line_id,phasic_type,value_type,quality_flag ").append(InfluxDBPublicParam.TIME_ZONE);
|
|
|
|
String sql3 = "select "+stringBuilder5+" from data_i where "+stringBuilder+stringBuilder6;
|
|
|
|
String sql3 = "select "+stringBuilder5+" from "+ InfluxDBPublicParam.DATA_I+" where "+stringBuilder+stringBuilder6;
|
|
|
|
QueryResult sqlResult3 = influxDbUtils.query(sql3);
|
|
|
|
QueryResult sqlResult3 = influxDbUtils.query(sql3);
|
|
|
|
InfluxDBResultMapper resultMapper3 = new InfluxDBResultMapper();
|
|
|
|
InfluxDBResultMapper resultMapper3 = new InfluxDBResultMapper();
|
|
|
|
List<DataI> list3 = resultMapper3.toPOJO(sqlResult3, DataI.class);
|
|
|
|
List<DataI> list3 = resultMapper3.toPOJO(sqlResult3, DataI.class);
|
|
|
|
@@ -414,8 +412,8 @@ public class DayJob {
|
|
|
|
stringBuilder7.append("percentile(i_").append(i).append(",95) AS i_").append(i).append(",");
|
|
|
|
stringBuilder7.append("percentile(i_").append(i).append(",95) AS i_").append(i).append(",");
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
stringBuilder8.append(Param.TIME + " >= '").append(startTime).append("' and ").append(Param.TIME).append(" <= '").append(endTime).append("' and value_type = 'AVG' group by line_id,phasic_type,value_type,quality_flag tz('Asia/Shanghai')");
|
|
|
|
stringBuilder8.append(InfluxDBPublicParam.TIME + " >= '").append(startTime).append("' and ").append(InfluxDBPublicParam.TIME).append(" <= '").append(endTime).append("' and value_type = 'AVG' group by line_id,phasic_type,value_type,quality_flag ").append(InfluxDBPublicParam.TIME_ZONE);
|
|
|
|
String sql4 = "select "+stringBuilder7+" from data_i where "+stringBuilder+stringBuilder8;
|
|
|
|
String sql4 = "select "+stringBuilder7+" from "+ InfluxDBPublicParam.DATA_I+" where "+stringBuilder+stringBuilder8;
|
|
|
|
QueryResult sqlResult4 = influxDbUtils.query(sql4);
|
|
|
|
QueryResult sqlResult4 = influxDbUtils.query(sql4);
|
|
|
|
InfluxDBResultMapper resultMapper4 = new InfluxDBResultMapper();
|
|
|
|
InfluxDBResultMapper resultMapper4 = new InfluxDBResultMapper();
|
|
|
|
List<DataI> list4 = resultMapper4.toPOJO(sqlResult4, DataI.class);
|
|
|
|
List<DataI> list4 = resultMapper4.toPOJO(sqlResult4, DataI.class);
|
|
|
|
@@ -501,12 +499,12 @@ public class DayJob {
|
|
|
|
fields.put("i_48",item.getI48());
|
|
|
|
fields.put("i_48",item.getI48());
|
|
|
|
fields.put("i_49",item.getI49());
|
|
|
|
fields.put("i_49",item.getI49());
|
|
|
|
fields.put("i_50",item.getI50());
|
|
|
|
fields.put("i_50",item.getI50());
|
|
|
|
Point point = influxDbUtils.pointBuilder("day_i", time, TimeUnit.MILLISECONDS, tags, fields);
|
|
|
|
Point point = influxDbUtils.pointBuilder(InfluxDBPublicParam.DAY_I, time, TimeUnit.MILLISECONDS, tags, fields);
|
|
|
|
BatchPoints batchPoints = BatchPoints.database(DATABASE).tag("line_id", item.getLineId()).tag("phasic_type",item.getPhaseType()).tag("quality_flag",item.getQualityFlag()).tag("value_type",item.getValueType()).retentionPolicy("").consistency(InfluxDB.ConsistencyLevel.ALL).build();
|
|
|
|
BatchPoints batchPoints = BatchPoints.database(InfluxDBPublicParam.DATABASE).tag(InfluxDBPublicParam.LINE_ID, item.getLineId()).tag(InfluxDBPublicParam.PHASIC_TYPE,item.getPhaseType()).tag(InfluxDBPublicParam.QUALITY_FLAG,item.getQualityFlag()).tag(InfluxDBPublicParam.VALUE_TYPE,item.getValueType()).retentionPolicy("").consistency(InfluxDB.ConsistencyLevel.ALL).build();
|
|
|
|
batchPoints.point(point);
|
|
|
|
batchPoints.point(point);
|
|
|
|
records.add(batchPoints.lineProtocol());
|
|
|
|
records.add(batchPoints.lineProtocol());
|
|
|
|
});
|
|
|
|
});
|
|
|
|
influxDbUtils.batchInsert(DATABASE,"", InfluxDB.ConsistencyLevel.ALL, records);
|
|
|
|
influxDbUtils.batchInsert(InfluxDBPublicParam.DATABASE,"", InfluxDB.ConsistencyLevel.ALL, records);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
@@ -522,7 +520,7 @@ public class DayJob {
|
|
|
|
List<DataFlicker> result = new ArrayList<>();
|
|
|
|
List<DataFlicker> result = new ArrayList<>();
|
|
|
|
StringBuilder stringBuilder = lineStringBuilder(lineList);
|
|
|
|
StringBuilder stringBuilder = lineStringBuilder(lineList);
|
|
|
|
//最小值
|
|
|
|
//最小值
|
|
|
|
String sql1 = "select min(fluc) AS fluc,min(plt) AS plt,min(pst) AS pst from data_flicker where "+stringBuilder+" time >= '" + startTime + "' and time <= '" + endTime + "' group by line_id,phasic_type,quality_flag tz('Asia/Shanghai')";
|
|
|
|
String sql1 = "select min(fluc) AS fluc,min(plt) AS plt,min(pst) AS pst from "+ InfluxDBPublicParam.DATA_FLICKER+" where "+stringBuilder+" time >= '" + startTime + "' and time <= '" + endTime + "' group by line_id,phasic_type,quality_flag "+ InfluxDBPublicParam.TIME_ZONE;
|
|
|
|
QueryResult sqlResult1 = influxDbUtils.query(sql1);
|
|
|
|
QueryResult sqlResult1 = influxDbUtils.query(sql1);
|
|
|
|
InfluxDBResultMapper resultMapper1 = new InfluxDBResultMapper();
|
|
|
|
InfluxDBResultMapper resultMapper1 = new InfluxDBResultMapper();
|
|
|
|
List<DataFlicker> list1 = resultMapper1.toPOJO(sqlResult1, DataFlicker.class);
|
|
|
|
List<DataFlicker> list1 = resultMapper1.toPOJO(sqlResult1, DataFlicker.class);
|
|
|
|
@@ -530,7 +528,7 @@ public class DayJob {
|
|
|
|
item.setValueType("MIN");
|
|
|
|
item.setValueType("MIN");
|
|
|
|
});
|
|
|
|
});
|
|
|
|
//最大值
|
|
|
|
//最大值
|
|
|
|
String sql2 = "select max(fluc) AS fluc,max(plt) AS plt,max(pst) AS pst from data_flicker where "+stringBuilder+" time >= '" + startTime + "' and time <= '" + endTime + "' group by line_id,phasic_type,quality_flag tz('Asia/Shanghai')";
|
|
|
|
String sql2 = "select max(fluc) AS fluc,max(plt) AS plt,max(pst) AS pst from "+ InfluxDBPublicParam.DATA_FLICKER+" where "+stringBuilder+" time >= '" + startTime + "' and time <= '" + endTime + "' group by line_id,phasic_type,quality_flag "+ InfluxDBPublicParam.TIME_ZONE;
|
|
|
|
QueryResult sqlResult2 = influxDbUtils.query(sql2);
|
|
|
|
QueryResult sqlResult2 = influxDbUtils.query(sql2);
|
|
|
|
InfluxDBResultMapper resultMapper2 = new InfluxDBResultMapper();
|
|
|
|
InfluxDBResultMapper resultMapper2 = new InfluxDBResultMapper();
|
|
|
|
List<DataFlicker> list2 = resultMapper2.toPOJO(sqlResult2, DataFlicker.class);
|
|
|
|
List<DataFlicker> list2 = resultMapper2.toPOJO(sqlResult2, DataFlicker.class);
|
|
|
|
@@ -538,7 +536,7 @@ public class DayJob {
|
|
|
|
item.setValueType("MAX");
|
|
|
|
item.setValueType("MAX");
|
|
|
|
});
|
|
|
|
});
|
|
|
|
//平均值
|
|
|
|
//平均值
|
|
|
|
String sql3 = "select mean(fluc) AS fluc,mean(plt) AS plt,mean(pst) AS pst from data_flicker where "+stringBuilder+" time >= '" + startTime + "' and time <= '" + endTime + "' group by line_id,phasic_type,quality_flag tz('Asia/Shanghai')";
|
|
|
|
String sql3 = "select mean(fluc) AS fluc,mean(plt) AS plt,mean(pst) AS pst from "+ InfluxDBPublicParam.DATA_FLICKER+" where "+stringBuilder+" time >= '" + startTime + "' and time <= '" + endTime + "' group by line_id,phasic_type,quality_flag "+ InfluxDBPublicParam.TIME_ZONE;
|
|
|
|
QueryResult sqlResult3 = influxDbUtils.query(sql3);
|
|
|
|
QueryResult sqlResult3 = influxDbUtils.query(sql3);
|
|
|
|
InfluxDBResultMapper resultMapper3= new InfluxDBResultMapper();
|
|
|
|
InfluxDBResultMapper resultMapper3= new InfluxDBResultMapper();
|
|
|
|
List<DataFlicker> list3 = resultMapper3.toPOJO(sqlResult3, DataFlicker.class);
|
|
|
|
List<DataFlicker> list3 = resultMapper3.toPOJO(sqlResult3, DataFlicker.class);
|
|
|
|
@@ -546,7 +544,7 @@ public class DayJob {
|
|
|
|
item.setValueType("AVG");
|
|
|
|
item.setValueType("AVG");
|
|
|
|
});
|
|
|
|
});
|
|
|
|
//CP95值
|
|
|
|
//CP95值
|
|
|
|
String sql4 = "select percentile(fluc,95) AS fluc,percentile(plt,95) AS plt,percentile(pst,95) AS pst from data_flicker where "+stringBuilder+" time >= '" + startTime + "' and time <= '" + endTime + "' group by line_id,phasic_type,quality_flag tz('Asia/Shanghai')";
|
|
|
|
String sql4 = "select percentile(fluc,95) AS fluc,percentile(plt,95) AS plt,percentile(pst,95) AS pst from "+ InfluxDBPublicParam.DATA_FLICKER+" where "+stringBuilder+" time >= '" + startTime + "' and time <= '" + endTime + "' group by line_id,phasic_type,quality_flag "+ InfluxDBPublicParam.TIME_ZONE;;
|
|
|
|
QueryResult sqlResult4 = influxDbUtils.query(sql4);
|
|
|
|
QueryResult sqlResult4 = influxDbUtils.query(sql4);
|
|
|
|
InfluxDBResultMapper resultMapper4= new InfluxDBResultMapper();
|
|
|
|
InfluxDBResultMapper resultMapper4= new InfluxDBResultMapper();
|
|
|
|
List<DataFlicker> list4 = resultMapper4.toPOJO(sqlResult4, DataFlicker.class);
|
|
|
|
List<DataFlicker> list4 = resultMapper4.toPOJO(sqlResult4, DataFlicker.class);
|
|
|
|
@@ -579,12 +577,12 @@ public class DayJob {
|
|
|
|
fields.put("fluc",item.getFluc());
|
|
|
|
fields.put("fluc",item.getFluc());
|
|
|
|
fields.put("plt",item.getPlt());
|
|
|
|
fields.put("plt",item.getPlt());
|
|
|
|
fields.put("pst",item.getPst());
|
|
|
|
fields.put("pst",item.getPst());
|
|
|
|
Point point = influxDbUtils.pointBuilder("day_flicker", time, TimeUnit.MILLISECONDS, tags, fields);
|
|
|
|
Point point = influxDbUtils.pointBuilder(InfluxDBPublicParam.DAY_FLICKER, time, TimeUnit.MILLISECONDS, tags, fields);
|
|
|
|
BatchPoints batchPoints = BatchPoints.database(DATABASE).tag("line_id", item.getLineId()).tag("phasic_type",item.getPhaseType()).tag("quality_flag",item.getQualityFlag()).tag("value_type",item.getValueType()).retentionPolicy("").consistency(InfluxDB.ConsistencyLevel.ALL).build();
|
|
|
|
BatchPoints batchPoints = BatchPoints.database(InfluxDBPublicParam.DATABASE).tag(InfluxDBPublicParam.LINE_ID, item.getLineId()).tag(InfluxDBPublicParam.PHASIC_TYPE,item.getPhaseType()).tag(InfluxDBPublicParam.QUALITY_FLAG,item.getQualityFlag()).tag(InfluxDBPublicParam.VALUE_TYPE,item.getValueType()).retentionPolicy("").consistency(InfluxDB.ConsistencyLevel.ALL).build();
|
|
|
|
batchPoints.point(point);
|
|
|
|
batchPoints.point(point);
|
|
|
|
records.add(batchPoints.lineProtocol());
|
|
|
|
records.add(batchPoints.lineProtocol());
|
|
|
|
});
|
|
|
|
});
|
|
|
|
influxDbUtils.batchInsert(DATABASE,"", InfluxDB.ConsistencyLevel.ALL, records);
|
|
|
|
influxDbUtils.batchInsert(InfluxDBPublicParam.DATABASE,"", InfluxDB.ConsistencyLevel.ALL, records);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
@@ -600,7 +598,7 @@ public class DayJob {
|
|
|
|
List<DataFluc> result = new ArrayList<>();
|
|
|
|
List<DataFluc> result = new ArrayList<>();
|
|
|
|
StringBuilder stringBuilder = lineStringBuilder(lineList);
|
|
|
|
StringBuilder stringBuilder = lineStringBuilder(lineList);
|
|
|
|
//最小值
|
|
|
|
//最小值
|
|
|
|
String sql1 = "select min(fluc) AS fluc,min(fluccf) AS fluccf from data_fluc where "+stringBuilder+" time >= '" + startTime + "' and time <= '" + endTime + "' group by line_id,phasic_type,quality_flag tz('Asia/Shanghai')";
|
|
|
|
String sql1 = "select min(fluc) AS fluc,min(fluccf) AS fluccf from "+ InfluxDBPublicParam.DATA_FLUC+" where "+stringBuilder+" time >= '" + startTime + "' and time <= '" + endTime + "' group by line_id,phasic_type,quality_flag "+ InfluxDBPublicParam.TIME_ZONE;
|
|
|
|
QueryResult sqlResult1 = influxDbUtils.query(sql1);
|
|
|
|
QueryResult sqlResult1 = influxDbUtils.query(sql1);
|
|
|
|
InfluxDBResultMapper resultMapper1 = new InfluxDBResultMapper();
|
|
|
|
InfluxDBResultMapper resultMapper1 = new InfluxDBResultMapper();
|
|
|
|
List<DataFluc> list1 = resultMapper1.toPOJO(sqlResult1, DataFluc.class);
|
|
|
|
List<DataFluc> list1 = resultMapper1.toPOJO(sqlResult1, DataFluc.class);
|
|
|
|
@@ -608,7 +606,7 @@ public class DayJob {
|
|
|
|
item.setValueType("MIN");
|
|
|
|
item.setValueType("MIN");
|
|
|
|
});
|
|
|
|
});
|
|
|
|
//最大值
|
|
|
|
//最大值
|
|
|
|
String sql2 = "select max(fluc) AS fluc,max(fluccf) AS fluccf from data_fluc where "+stringBuilder+" time >= '" + startTime + "' and time <= '" + endTime + "' group by line_id,phasic_type,quality_flag tz('Asia/Shanghai')";
|
|
|
|
String sql2 = "select max(fluc) AS fluc,max(fluccf) AS fluccf from "+ InfluxDBPublicParam.DATA_FLUC+" where "+stringBuilder+" time >= '" + startTime + "' and time <= '" + endTime + "' group by line_id,phasic_type,quality_flag "+ InfluxDBPublicParam.TIME_ZONE;
|
|
|
|
QueryResult sqlResult2 = influxDbUtils.query(sql2);
|
|
|
|
QueryResult sqlResult2 = influxDbUtils.query(sql2);
|
|
|
|
InfluxDBResultMapper resultMapper2 = new InfluxDBResultMapper();
|
|
|
|
InfluxDBResultMapper resultMapper2 = new InfluxDBResultMapper();
|
|
|
|
List<DataFluc> list2 = resultMapper2.toPOJO(sqlResult2, DataFluc.class);
|
|
|
|
List<DataFluc> list2 = resultMapper2.toPOJO(sqlResult2, DataFluc.class);
|
|
|
|
@@ -616,7 +614,7 @@ public class DayJob {
|
|
|
|
item.setValueType("MAX");
|
|
|
|
item.setValueType("MAX");
|
|
|
|
});
|
|
|
|
});
|
|
|
|
//平均值
|
|
|
|
//平均值
|
|
|
|
String sql3 = "select mean(fluc) AS fluc,mean(fluccf) AS fluccf from data_fluc where "+stringBuilder+" time >= '" + startTime + "' and time <= '" + endTime + "' group by line_id,phasic_type,quality_flag tz('Asia/Shanghai')";
|
|
|
|
String sql3 = "select mean(fluc) AS fluc,mean(fluccf) AS fluccf from "+ InfluxDBPublicParam.DATA_FLUC+" where "+stringBuilder+" time >= '" + startTime + "' and time <= '" + endTime + "' group by line_id,phasic_type,quality_flag "+ InfluxDBPublicParam.TIME_ZONE;
|
|
|
|
QueryResult sqlResult3 = influxDbUtils.query(sql3);
|
|
|
|
QueryResult sqlResult3 = influxDbUtils.query(sql3);
|
|
|
|
InfluxDBResultMapper resultMapper3= new InfluxDBResultMapper();
|
|
|
|
InfluxDBResultMapper resultMapper3= new InfluxDBResultMapper();
|
|
|
|
List<DataFluc> list3 = resultMapper3.toPOJO(sqlResult3, DataFluc.class);
|
|
|
|
List<DataFluc> list3 = resultMapper3.toPOJO(sqlResult3, DataFluc.class);
|
|
|
|
@@ -624,7 +622,7 @@ public class DayJob {
|
|
|
|
item.setValueType("AVG");
|
|
|
|
item.setValueType("AVG");
|
|
|
|
});
|
|
|
|
});
|
|
|
|
//CP95值
|
|
|
|
//CP95值
|
|
|
|
String sql4 = "select percentile(fluc,95) AS fluc,percentile(fluccf,95) AS fluccf from data_fluc where "+stringBuilder+" time >= '" + startTime + "' and time <= '" + endTime + "' group by line_id,phasic_type,quality_flag tz('Asia/Shanghai')";
|
|
|
|
String sql4 = "select percentile(fluc,95) AS fluc,percentile(fluccf,95) AS fluccf from "+ InfluxDBPublicParam.DATA_FLUC+" where "+stringBuilder+" time >= '" + startTime + "' and time <= '" + endTime + "' group by line_id,phasic_type,quality_flag "+ InfluxDBPublicParam.TIME_ZONE;
|
|
|
|
QueryResult sqlResult4 = influxDbUtils.query(sql4);
|
|
|
|
QueryResult sqlResult4 = influxDbUtils.query(sql4);
|
|
|
|
InfluxDBResultMapper resultMapper4= new InfluxDBResultMapper();
|
|
|
|
InfluxDBResultMapper resultMapper4= new InfluxDBResultMapper();
|
|
|
|
List<DataFluc> list4 = resultMapper4.toPOJO(sqlResult4, DataFluc.class);
|
|
|
|
List<DataFluc> list4 = resultMapper4.toPOJO(sqlResult4, DataFluc.class);
|
|
|
|
@@ -656,12 +654,12 @@ public class DayJob {
|
|
|
|
tags.put("value_type",item.getValueType());
|
|
|
|
tags.put("value_type",item.getValueType());
|
|
|
|
fields.put("fluc",item.getFluc());
|
|
|
|
fields.put("fluc",item.getFluc());
|
|
|
|
fields.put("fluccf",item.getFluccf());
|
|
|
|
fields.put("fluccf",item.getFluccf());
|
|
|
|
Point point = influxDbUtils.pointBuilder("day_fluc", time, TimeUnit.MILLISECONDS, tags, fields);
|
|
|
|
Point point = influxDbUtils.pointBuilder(InfluxDBPublicParam.DAY_FLUC, time, TimeUnit.MILLISECONDS, tags, fields);
|
|
|
|
BatchPoints batchPoints = BatchPoints.database(DATABASE).tag("line_id", item.getLineId()).tag("phasic_type",item.getPhaseType()).tag("quality_flag",item.getQualityFlag()).tag("value_type",item.getValueType()).retentionPolicy("").consistency(InfluxDB.ConsistencyLevel.ALL).build();
|
|
|
|
BatchPoints batchPoints = BatchPoints.database(InfluxDBPublicParam.DATABASE).tag(InfluxDBPublicParam.LINE_ID, item.getLineId()).tag(InfluxDBPublicParam.PHASIC_TYPE,item.getPhaseType()).tag(InfluxDBPublicParam.QUALITY_FLAG,item.getQualityFlag()).tag(InfluxDBPublicParam.VALUE_TYPE,item.getValueType()).retentionPolicy("").consistency(InfluxDB.ConsistencyLevel.ALL).build();
|
|
|
|
batchPoints.point(point);
|
|
|
|
batchPoints.point(point);
|
|
|
|
records.add(batchPoints.lineProtocol());
|
|
|
|
records.add(batchPoints.lineProtocol());
|
|
|
|
});
|
|
|
|
});
|
|
|
|
influxDbUtils.batchInsert(DATABASE,"", InfluxDB.ConsistencyLevel.ALL, records);
|
|
|
|
influxDbUtils.batchInsert(InfluxDBPublicParam.DATABASE,"", InfluxDB.ConsistencyLevel.ALL, records);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
@@ -686,8 +684,8 @@ public class DayJob {
|
|
|
|
stringBuilder1.append("min(i_").append(i).append(") AS i_").append(i).append(",");
|
|
|
|
stringBuilder1.append("min(i_").append(i).append(") AS i_").append(i).append(",");
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
stringBuilder2.append(Param.TIME + " >= '").append(startTime).append("' and ").append(Param.TIME).append(" <= '").append(endTime).append("' and value_type = 'MIN' group by line_id,phasic_type,value_type,quality_flag tz('Asia/Shanghai')");
|
|
|
|
stringBuilder2.append(InfluxDBPublicParam.TIME + " >= '").append(startTime).append("' and ").append(InfluxDBPublicParam.TIME).append(" <= '").append(endTime).append("' and value_type = 'MIN' group by line_id,phasic_type,value_type,quality_flag ").append(InfluxDBPublicParam.TIME_ZONE);
|
|
|
|
String sql1 = "select "+stringBuilder1+" from data_harmphasic_i where "+stringBuilder+stringBuilder2;
|
|
|
|
String sql1 = "select "+stringBuilder1+" from "+ InfluxDBPublicParam.DATA_HARM_PHASIC_I+" where "+stringBuilder+stringBuilder2;
|
|
|
|
QueryResult sqlResult1 = influxDbUtils.query(sql1);
|
|
|
|
QueryResult sqlResult1 = influxDbUtils.query(sql1);
|
|
|
|
InfluxDBResultMapper resultMapper1 = new InfluxDBResultMapper();
|
|
|
|
InfluxDBResultMapper resultMapper1 = new InfluxDBResultMapper();
|
|
|
|
List<DataHarmPhasicI> list1 = resultMapper1.toPOJO(sqlResult1, DataHarmPhasicI.class);
|
|
|
|
List<DataHarmPhasicI> list1 = resultMapper1.toPOJO(sqlResult1, DataHarmPhasicI.class);
|
|
|
|
@@ -701,8 +699,8 @@ public class DayJob {
|
|
|
|
stringBuilder3.append("max(i_").append(i).append(") AS i_").append(i).append(",");
|
|
|
|
stringBuilder3.append("max(i_").append(i).append(") AS i_").append(i).append(",");
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
stringBuilder4.append(Param.TIME + " >= '").append(startTime).append("' and ").append(Param.TIME).append(" <= '").append(endTime).append("' and value_type = 'MAX' group by line_id,phasic_type,value_type,quality_flag tz('Asia/Shanghai')");
|
|
|
|
stringBuilder4.append(InfluxDBPublicParam.TIME + " >= '").append(startTime).append("' and ").append(InfluxDBPublicParam.TIME).append(" <= '").append(endTime).append("' and value_type = 'MAX' group by line_id,phasic_type,value_type,quality_flag ").append(InfluxDBPublicParam.TIME_ZONE);
|
|
|
|
String sql2 = "select "+stringBuilder3+" from data_harmphasic_i where "+stringBuilder+stringBuilder4;
|
|
|
|
String sql2 = "select "+stringBuilder3+" from "+ InfluxDBPublicParam.DATA_HARM_PHASIC_I+" where "+stringBuilder+stringBuilder4;
|
|
|
|
QueryResult sqlResult2 = influxDbUtils.query(sql2);
|
|
|
|
QueryResult sqlResult2 = influxDbUtils.query(sql2);
|
|
|
|
InfluxDBResultMapper resultMapper2 = new InfluxDBResultMapper();
|
|
|
|
InfluxDBResultMapper resultMapper2 = new InfluxDBResultMapper();
|
|
|
|
List<DataHarmPhasicI> list2 = resultMapper2.toPOJO(sqlResult2, DataHarmPhasicI.class);
|
|
|
|
List<DataHarmPhasicI> list2 = resultMapper2.toPOJO(sqlResult2, DataHarmPhasicI.class);
|
|
|
|
@@ -716,8 +714,8 @@ public class DayJob {
|
|
|
|
stringBuilder5.append("mean(i_").append(i).append(") AS i_").append(i).append(",");
|
|
|
|
stringBuilder5.append("mean(i_").append(i).append(") AS i_").append(i).append(",");
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
stringBuilder6.append(Param.TIME + " >= '").append(startTime).append("' and ").append(Param.TIME).append(" <= '").append(endTime).append("' and value_type = 'AVG' group by line_id,phasic_type,value_type,quality_flag tz('Asia/Shanghai')");
|
|
|
|
stringBuilder6.append(InfluxDBPublicParam.TIME + " >= '").append(startTime).append("' and ").append(InfluxDBPublicParam.TIME).append(" <= '").append(endTime).append("' and value_type = 'AVG' group by line_id,phasic_type,value_type,quality_flag ").append(InfluxDBPublicParam.TIME_ZONE);
|
|
|
|
String sql3 = "select "+stringBuilder5+" from data_harmphasic_i where "+stringBuilder+stringBuilder6;
|
|
|
|
String sql3 = "select "+stringBuilder5+" from "+ InfluxDBPublicParam.DATA_HARM_PHASIC_I+" where "+stringBuilder+stringBuilder6;
|
|
|
|
QueryResult sqlResult3 = influxDbUtils.query(sql3);
|
|
|
|
QueryResult sqlResult3 = influxDbUtils.query(sql3);
|
|
|
|
InfluxDBResultMapper resultMapper3 = new InfluxDBResultMapper();
|
|
|
|
InfluxDBResultMapper resultMapper3 = new InfluxDBResultMapper();
|
|
|
|
List<DataHarmPhasicI> list3 = resultMapper3.toPOJO(sqlResult3, DataHarmPhasicI.class);
|
|
|
|
List<DataHarmPhasicI> list3 = resultMapper3.toPOJO(sqlResult3, DataHarmPhasicI.class);
|
|
|
|
@@ -731,8 +729,8 @@ public class DayJob {
|
|
|
|
stringBuilder7.append("percentile(i_").append(i).append(",95) AS i_").append(i).append(",");
|
|
|
|
stringBuilder7.append("percentile(i_").append(i).append(",95) AS i_").append(i).append(",");
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
stringBuilder8.append(Param.TIME + " >= '").append(startTime).append("' and ").append(Param.TIME).append(" <= '").append(endTime).append("' and value_type = 'AVG' group by line_id,phasic_type,value_type,quality_flag tz('Asia/Shanghai')");
|
|
|
|
stringBuilder8.append(InfluxDBPublicParam.TIME + " >= '").append(startTime).append("' and ").append(InfluxDBPublicParam.TIME).append(" <= '").append(endTime).append("' and value_type = 'AVG' group by line_id,phasic_type,value_type,quality_flag ").append(InfluxDBPublicParam.TIME_ZONE);
|
|
|
|
String sql4 = "select "+stringBuilder7+" from data_harmphasic_i where "+stringBuilder+stringBuilder8;
|
|
|
|
String sql4 = "select "+stringBuilder7+" from "+ InfluxDBPublicParam.DATA_HARM_PHASIC_I+" where "+stringBuilder+stringBuilder8;
|
|
|
|
QueryResult sqlResult4 = influxDbUtils.query(sql4);
|
|
|
|
QueryResult sqlResult4 = influxDbUtils.query(sql4);
|
|
|
|
InfluxDBResultMapper resultMapper4 = new InfluxDBResultMapper();
|
|
|
|
InfluxDBResultMapper resultMapper4 = new InfluxDBResultMapper();
|
|
|
|
List<DataHarmPhasicI> list4 = resultMapper4.toPOJO(sqlResult4, DataHarmPhasicI.class);
|
|
|
|
List<DataHarmPhasicI> list4 = resultMapper4.toPOJO(sqlResult4, DataHarmPhasicI.class);
|
|
|
|
@@ -812,12 +810,12 @@ public class DayJob {
|
|
|
|
fields.put("i_48",item.getI48());
|
|
|
|
fields.put("i_48",item.getI48());
|
|
|
|
fields.put("i_49",item.getI49());
|
|
|
|
fields.put("i_49",item.getI49());
|
|
|
|
fields.put("i_50",item.getI50());
|
|
|
|
fields.put("i_50",item.getI50());
|
|
|
|
Point point = influxDbUtils.pointBuilder("day_harmphasic_i", time, TimeUnit.MILLISECONDS, tags, fields);
|
|
|
|
Point point = influxDbUtils.pointBuilder(InfluxDBPublicParam.DAY_HARM_PHASIC_I, time, TimeUnit.MILLISECONDS, tags, fields);
|
|
|
|
BatchPoints batchPoints = BatchPoints.database(DATABASE).tag("line_id", item.getLineId()).tag("phasic_type",item.getPhaseType()).tag("quality_flag",item.getQualityFlag()).tag("value_type",item.getValueType()).retentionPolicy("").consistency(InfluxDB.ConsistencyLevel.ALL).build();
|
|
|
|
BatchPoints batchPoints = BatchPoints.database(InfluxDBPublicParam.DATABASE).tag(InfluxDBPublicParam.LINE_ID, item.getLineId()).tag(InfluxDBPublicParam.PHASIC_TYPE,item.getPhaseType()).tag(InfluxDBPublicParam.QUALITY_FLAG,item.getQualityFlag()).tag(InfluxDBPublicParam.VALUE_TYPE,item.getValueType()).retentionPolicy("").consistency(InfluxDB.ConsistencyLevel.ALL).build();
|
|
|
|
batchPoints.point(point);
|
|
|
|
batchPoints.point(point);
|
|
|
|
records.add(batchPoints.lineProtocol());
|
|
|
|
records.add(batchPoints.lineProtocol());
|
|
|
|
});
|
|
|
|
});
|
|
|
|
influxDbUtils.batchInsert(DATABASE,"", InfluxDB.ConsistencyLevel.ALL, records);
|
|
|
|
influxDbUtils.batchInsert(InfluxDBPublicParam.DATABASE,"", InfluxDB.ConsistencyLevel.ALL, records);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
/**
|
|
|
|
/**
|
|
|
|
@@ -841,8 +839,8 @@ public class DayJob {
|
|
|
|
stringBuilder1.append("min(v_").append(i).append(") AS v_").append(i).append(",");
|
|
|
|
stringBuilder1.append("min(v_").append(i).append(") AS v_").append(i).append(",");
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
stringBuilder2.append(Param.TIME + " >= '").append(startTime).append("' and ").append(Param.TIME).append(" <= '").append(endTime).append("' and value_type = 'MIN' group by line_id,phasic_type,value_type,quality_flag tz('Asia/Shanghai')");
|
|
|
|
stringBuilder2.append(InfluxDBPublicParam.TIME + " >= '").append(startTime).append("' and ").append(InfluxDBPublicParam.TIME).append(" <= '").append(endTime).append("' and value_type = 'MIN' group by line_id,phasic_type,value_type,quality_flag ").append(InfluxDBPublicParam.TIME_ZONE);
|
|
|
|
String sql1 = "select "+stringBuilder1+" from data_harmphasic_v where "+stringBuilder+stringBuilder2;
|
|
|
|
String sql1 = "select "+stringBuilder1+" from "+ InfluxDBPublicParam.DATA_HARM_PHASIC_V+" where "+stringBuilder+stringBuilder2;
|
|
|
|
QueryResult sqlResult1 = influxDbUtils.query(sql1);
|
|
|
|
QueryResult sqlResult1 = influxDbUtils.query(sql1);
|
|
|
|
InfluxDBResultMapper resultMapper1 = new InfluxDBResultMapper();
|
|
|
|
InfluxDBResultMapper resultMapper1 = new InfluxDBResultMapper();
|
|
|
|
List<DataHarmPhasicV> list1 = resultMapper1.toPOJO(sqlResult1, DataHarmPhasicV.class);
|
|
|
|
List<DataHarmPhasicV> list1 = resultMapper1.toPOJO(sqlResult1, DataHarmPhasicV.class);
|
|
|
|
@@ -856,8 +854,8 @@ public class DayJob {
|
|
|
|
stringBuilder3.append("max(v_").append(i).append(") AS v_").append(i).append(",");
|
|
|
|
stringBuilder3.append("max(v_").append(i).append(") AS v_").append(i).append(",");
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
stringBuilder4.append(Param.TIME + " >= '").append(startTime).append("' and ").append(Param.TIME).append(" <= '").append(endTime).append("' and value_type = 'MAX' group by line_id,phasic_type,value_type,quality_flag tz('Asia/Shanghai')");
|
|
|
|
stringBuilder4.append(InfluxDBPublicParam.TIME + " >= '").append(startTime).append("' and ").append(InfluxDBPublicParam.TIME).append(" <= '").append(endTime).append("' and value_type = 'MAX' group by line_id,phasic_type,value_type,quality_flag ").append(InfluxDBPublicParam.TIME_ZONE);
|
|
|
|
String sql2 = "select "+stringBuilder3+" from data_harmphasic_v where "+stringBuilder+stringBuilder4;
|
|
|
|
String sql2 = "select "+stringBuilder3+" from "+ InfluxDBPublicParam.DATA_HARM_PHASIC_V+" where "+stringBuilder+stringBuilder4;
|
|
|
|
QueryResult sqlResult2 = influxDbUtils.query(sql2);
|
|
|
|
QueryResult sqlResult2 = influxDbUtils.query(sql2);
|
|
|
|
InfluxDBResultMapper resultMapper2 = new InfluxDBResultMapper();
|
|
|
|
InfluxDBResultMapper resultMapper2 = new InfluxDBResultMapper();
|
|
|
|
List<DataHarmPhasicV> list2 = resultMapper2.toPOJO(sqlResult2, DataHarmPhasicV.class);
|
|
|
|
List<DataHarmPhasicV> list2 = resultMapper2.toPOJO(sqlResult2, DataHarmPhasicV.class);
|
|
|
|
@@ -871,8 +869,8 @@ public class DayJob {
|
|
|
|
stringBuilder5.append("mean(v_").append(i).append(") AS v_").append(i).append(",");
|
|
|
|
stringBuilder5.append("mean(v_").append(i).append(") AS v_").append(i).append(",");
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
stringBuilder6.append(Param.TIME + " >= '").append(startTime).append("' and ").append(Param.TIME).append(" <= '").append(endTime).append("' and value_type = 'AVG' group by line_id,phasic_type,value_type,quality_flag tz('Asia/Shanghai')");
|
|
|
|
stringBuilder6.append(InfluxDBPublicParam.TIME + " >= '").append(startTime).append("' and ").append(InfluxDBPublicParam.TIME).append(" <= '").append(endTime).append("' and value_type = 'AVG' group by line_id,phasic_type,value_type,quality_flag ").append(InfluxDBPublicParam.TIME_ZONE);
|
|
|
|
String sql3 = "select "+stringBuilder5+" from data_harmphasic_v where "+stringBuilder+stringBuilder6;
|
|
|
|
String sql3 = "select "+stringBuilder5+" from "+ InfluxDBPublicParam.DATA_HARM_PHASIC_V+" where "+stringBuilder+stringBuilder6;
|
|
|
|
QueryResult sqlResult3 = influxDbUtils.query(sql3);
|
|
|
|
QueryResult sqlResult3 = influxDbUtils.query(sql3);
|
|
|
|
InfluxDBResultMapper resultMapper3 = new InfluxDBResultMapper();
|
|
|
|
InfluxDBResultMapper resultMapper3 = new InfluxDBResultMapper();
|
|
|
|
List<DataHarmPhasicV> list3 = resultMapper3.toPOJO(sqlResult3, DataHarmPhasicV.class);
|
|
|
|
List<DataHarmPhasicV> list3 = resultMapper3.toPOJO(sqlResult3, DataHarmPhasicV.class);
|
|
|
|
@@ -886,8 +884,8 @@ public class DayJob {
|
|
|
|
stringBuilder7.append("percentile(v_").append(i).append(",95) AS v_").append(i).append(",");
|
|
|
|
stringBuilder7.append("percentile(v_").append(i).append(",95) AS v_").append(i).append(",");
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
stringBuilder8.append(Param.TIME + " >= '").append(startTime).append("' and ").append(Param.TIME).append(" <= '").append(endTime).append("' and value_type = 'AVG' group by line_id,phasic_type,value_type,quality_flag tz('Asia/Shanghai')");
|
|
|
|
stringBuilder8.append(InfluxDBPublicParam.TIME + " >= '").append(startTime).append("' and ").append(InfluxDBPublicParam.TIME).append(" <= '").append(endTime).append("' and value_type = 'AVG' group by line_id,phasic_type,value_type,quality_flag ").append(InfluxDBPublicParam.TIME_ZONE);
|
|
|
|
String sql4 = "select "+stringBuilder7+" from data_harmphasic_v where "+stringBuilder+stringBuilder8;
|
|
|
|
String sql4 = "select "+stringBuilder7+" from "+ InfluxDBPublicParam.DATA_HARM_PHASIC_V+" where "+stringBuilder+stringBuilder8;
|
|
|
|
QueryResult sqlResult4 = influxDbUtils.query(sql4);
|
|
|
|
QueryResult sqlResult4 = influxDbUtils.query(sql4);
|
|
|
|
InfluxDBResultMapper resultMapper4 = new InfluxDBResultMapper();
|
|
|
|
InfluxDBResultMapper resultMapper4 = new InfluxDBResultMapper();
|
|
|
|
List<DataHarmPhasicV> list4 = resultMapper4.toPOJO(sqlResult4, DataHarmPhasicV.class);
|
|
|
|
List<DataHarmPhasicV> list4 = resultMapper4.toPOJO(sqlResult4, DataHarmPhasicV.class);
|
|
|
|
@@ -967,12 +965,12 @@ public class DayJob {
|
|
|
|
fields.put("v_48",item.getV48());
|
|
|
|
fields.put("v_48",item.getV48());
|
|
|
|
fields.put("v_49",item.getV49());
|
|
|
|
fields.put("v_49",item.getV49());
|
|
|
|
fields.put("v_50",item.getV50());
|
|
|
|
fields.put("v_50",item.getV50());
|
|
|
|
Point point = influxDbUtils.pointBuilder("day_harmphasic_v", time, TimeUnit.MILLISECONDS, tags, fields);
|
|
|
|
Point point = influxDbUtils.pointBuilder(InfluxDBPublicParam.DAY_HARM_PHASIC_V, time, TimeUnit.MILLISECONDS, tags, fields);
|
|
|
|
BatchPoints batchPoints = BatchPoints.database(DATABASE).tag("line_id", item.getLineId()).tag("phasic_type",item.getPhaseType()).tag("quality_flag",item.getQualityFlag()).tag("value_type",item.getValueType()).retentionPolicy("").consistency(InfluxDB.ConsistencyLevel.ALL).build();
|
|
|
|
BatchPoints batchPoints = BatchPoints.database(InfluxDBPublicParam.DATABASE).tag(InfluxDBPublicParam.LINE_ID, item.getLineId()).tag(InfluxDBPublicParam.PHASIC_TYPE,item.getPhaseType()).tag(InfluxDBPublicParam.QUALITY_FLAG,item.getQualityFlag()).tag(InfluxDBPublicParam.VALUE_TYPE,item.getValueType()).retentionPolicy("").consistency(InfluxDB.ConsistencyLevel.ALL).build();
|
|
|
|
batchPoints.point(point);
|
|
|
|
batchPoints.point(point);
|
|
|
|
records.add(batchPoints.lineProtocol());
|
|
|
|
records.add(batchPoints.lineProtocol());
|
|
|
|
});
|
|
|
|
});
|
|
|
|
influxDbUtils.batchInsert(DATABASE,"", InfluxDB.ConsistencyLevel.ALL, records);
|
|
|
|
influxDbUtils.batchInsert(InfluxDBPublicParam.DATABASE,"", InfluxDB.ConsistencyLevel.ALL, records);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
/**
|
|
|
|
/**
|
|
|
|
@@ -997,8 +995,8 @@ public class DayJob {
|
|
|
|
stringBuilder1.append("min(p_").append(i).append(") AS p_").append(i).append(",");
|
|
|
|
stringBuilder1.append("min(p_").append(i).append(") AS p_").append(i).append(",");
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
stringBuilder2.append(Param.TIME + " >= '").append(startTime).append("' and ").append(Param.TIME).append(" <= '").append(endTime).append("' and value_type = 'MIN' group by line_id,phasic_type,value_type,quality_flag tz('Asia/Shanghai')");
|
|
|
|
stringBuilder2.append(InfluxDBPublicParam.TIME + " >= '").append(startTime).append("' and ").append(InfluxDBPublicParam.TIME).append(" <= '").append(endTime).append("' and value_type = 'MIN' group by line_id,phasic_type,value_type,quality_flag ").append(InfluxDBPublicParam.TIME_ZONE);
|
|
|
|
String sql1 = "select "+stringBuilder1+" from data_harmpower_p where "+stringBuilder+stringBuilder2;
|
|
|
|
String sql1 = "select "+stringBuilder1+" from "+ InfluxDBPublicParam.DATA_HARM_POWER_P+" where "+stringBuilder+stringBuilder2;
|
|
|
|
QueryResult sqlResult1 = influxDbUtils.query(sql1);
|
|
|
|
QueryResult sqlResult1 = influxDbUtils.query(sql1);
|
|
|
|
InfluxDBResultMapper resultMapper1 = new InfluxDBResultMapper();
|
|
|
|
InfluxDBResultMapper resultMapper1 = new InfluxDBResultMapper();
|
|
|
|
List<DataHarmPowerP> list1 = resultMapper1.toPOJO(sqlResult1, DataHarmPowerP.class);
|
|
|
|
List<DataHarmPowerP> list1 = resultMapper1.toPOJO(sqlResult1, DataHarmPowerP.class);
|
|
|
|
@@ -1013,8 +1011,8 @@ public class DayJob {
|
|
|
|
stringBuilder3.append("max(p_").append(i).append(") AS p_").append(i).append(",");
|
|
|
|
stringBuilder3.append("max(p_").append(i).append(") AS p_").append(i).append(",");
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
stringBuilder4.append(Param.TIME + " >= '").append(startTime).append("' and ").append(Param.TIME).append(" <= '").append(endTime).append("' and value_type = 'MAX' group by line_id,phasic_type,value_type,quality_flag tz('Asia/Shanghai')");
|
|
|
|
stringBuilder4.append(InfluxDBPublicParam.TIME + " >= '").append(startTime).append("' and ").append(InfluxDBPublicParam.TIME).append(" <= '").append(endTime).append("' and value_type = 'MAX' group by line_id,phasic_type,value_type,quality_flag ").append(InfluxDBPublicParam.TIME_ZONE);
|
|
|
|
String sql2 = "select "+stringBuilder3+" from data_harmpower_p where "+stringBuilder+stringBuilder4;
|
|
|
|
String sql2 = "select "+stringBuilder3+" from "+ InfluxDBPublicParam.DATA_HARM_POWER_P+" where "+stringBuilder+stringBuilder4;
|
|
|
|
QueryResult sqlResult2 = influxDbUtils.query(sql2);
|
|
|
|
QueryResult sqlResult2 = influxDbUtils.query(sql2);
|
|
|
|
InfluxDBResultMapper resultMapper2 = new InfluxDBResultMapper();
|
|
|
|
InfluxDBResultMapper resultMapper2 = new InfluxDBResultMapper();
|
|
|
|
List<DataHarmPowerP> list2 = resultMapper2.toPOJO(sqlResult2, DataHarmPowerP.class);
|
|
|
|
List<DataHarmPowerP> list2 = resultMapper2.toPOJO(sqlResult2, DataHarmPowerP.class);
|
|
|
|
@@ -1029,8 +1027,8 @@ public class DayJob {
|
|
|
|
stringBuilder5.append("mean(p_").append(i).append(") AS p_").append(i).append(",");
|
|
|
|
stringBuilder5.append("mean(p_").append(i).append(") AS p_").append(i).append(",");
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
stringBuilder6.append(Param.TIME + " >= '").append(startTime).append("' and ").append(Param.TIME).append(" <= '").append(endTime).append("' and value_type = 'AVG' group by line_id,phasic_type,value_type,quality_flag tz('Asia/Shanghai')");
|
|
|
|
stringBuilder6.append(InfluxDBPublicParam.TIME + " >= '").append(startTime).append("' and ").append(InfluxDBPublicParam.TIME).append(" <= '").append(endTime).append("' and value_type = 'AVG' group by line_id,phasic_type,value_type,quality_flag ").append(InfluxDBPublicParam.TIME_ZONE);
|
|
|
|
String sql3 = "select "+stringBuilder5+" from data_harmpower_p where "+stringBuilder+stringBuilder6;
|
|
|
|
String sql3 = "select "+stringBuilder5+" from "+ InfluxDBPublicParam.DATA_HARM_POWER_P+" where "+stringBuilder+stringBuilder6;
|
|
|
|
QueryResult sqlResult3 = influxDbUtils.query(sql3);
|
|
|
|
QueryResult sqlResult3 = influxDbUtils.query(sql3);
|
|
|
|
InfluxDBResultMapper resultMapper3 = new InfluxDBResultMapper();
|
|
|
|
InfluxDBResultMapper resultMapper3 = new InfluxDBResultMapper();
|
|
|
|
List<DataHarmPowerP> list3 = resultMapper3.toPOJO(sqlResult3, DataHarmPowerP.class);
|
|
|
|
List<DataHarmPowerP> list3 = resultMapper3.toPOJO(sqlResult3, DataHarmPowerP.class);
|
|
|
|
@@ -1045,8 +1043,8 @@ public class DayJob {
|
|
|
|
stringBuilder7.append("percentile(p_").append(i).append(",95) AS p_").append(i).append(",");
|
|
|
|
stringBuilder7.append("percentile(p_").append(i).append(",95) AS p_").append(i).append(",");
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
stringBuilder8.append(Param.TIME + " >= '").append(startTime).append("' and ").append(Param.TIME).append(" <= '").append(endTime).append("' and value_type = 'AVG' group by line_id,phasic_type,value_type,quality_flag tz('Asia/Shanghai')");
|
|
|
|
stringBuilder8.append(InfluxDBPublicParam.TIME + " >= '").append(startTime).append("' and ").append(InfluxDBPublicParam.TIME).append(" <= '").append(endTime).append("' and value_type = 'AVG' group by line_id,phasic_type,value_type,quality_flag ").append(InfluxDBPublicParam.TIME_ZONE);
|
|
|
|
String sql4 = "select "+stringBuilder7+" from data_harmpower_p where "+stringBuilder+stringBuilder8;
|
|
|
|
String sql4 = "select "+stringBuilder7+" from "+ InfluxDBPublicParam.DATA_HARM_POWER_P+" where "+stringBuilder+stringBuilder8;
|
|
|
|
QueryResult sqlResult4 = influxDbUtils.query(sql4);
|
|
|
|
QueryResult sqlResult4 = influxDbUtils.query(sql4);
|
|
|
|
InfluxDBResultMapper resultMapper4 = new InfluxDBResultMapper();
|
|
|
|
InfluxDBResultMapper resultMapper4 = new InfluxDBResultMapper();
|
|
|
|
List<DataHarmPowerP> list4 = resultMapper4.toPOJO(sqlResult4, DataHarmPowerP.class);
|
|
|
|
List<DataHarmPowerP> list4 = resultMapper4.toPOJO(sqlResult4, DataHarmPowerP.class);
|
|
|
|
@@ -1129,12 +1127,12 @@ public class DayJob {
|
|
|
|
fields.put("p_48",item.getP48());
|
|
|
|
fields.put("p_48",item.getP48());
|
|
|
|
fields.put("p_49",item.getP49());
|
|
|
|
fields.put("p_49",item.getP49());
|
|
|
|
fields.put("p_50",item.getP50());
|
|
|
|
fields.put("p_50",item.getP50());
|
|
|
|
Point point = influxDbUtils.pointBuilder("day_harmpower_p", time, TimeUnit.MILLISECONDS, tags, fields);
|
|
|
|
Point point = influxDbUtils.pointBuilder(InfluxDBPublicParam.DAY_HARM_POWER_P, time, TimeUnit.MILLISECONDS, tags, fields);
|
|
|
|
BatchPoints batchPoints = BatchPoints.database(DATABASE).tag("line_id", item.getLineId()).tag("phasic_type",item.getPhaseType()).tag("quality_flag",item.getQualityFlag()).tag("value_type",item.getValueType()).retentionPolicy("").consistency(InfluxDB.ConsistencyLevel.ALL).build();
|
|
|
|
BatchPoints batchPoints = BatchPoints.database(InfluxDBPublicParam.DATABASE).tag(InfluxDBPublicParam.LINE_ID, item.getLineId()).tag(InfluxDBPublicParam.PHASIC_TYPE,item.getPhaseType()).tag(InfluxDBPublicParam.QUALITY_FLAG,item.getQualityFlag()).tag(InfluxDBPublicParam.VALUE_TYPE,item.getValueType()).retentionPolicy("").consistency(InfluxDB.ConsistencyLevel.ALL).build();
|
|
|
|
batchPoints.point(point);
|
|
|
|
batchPoints.point(point);
|
|
|
|
records.add(batchPoints.lineProtocol());
|
|
|
|
records.add(batchPoints.lineProtocol());
|
|
|
|
});
|
|
|
|
});
|
|
|
|
influxDbUtils.batchInsert(DATABASE,"", InfluxDB.ConsistencyLevel.ALL, records);
|
|
|
|
influxDbUtils.batchInsert(InfluxDBPublicParam.DATABASE,"", InfluxDB.ConsistencyLevel.ALL, records);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
@@ -1160,8 +1158,8 @@ public class DayJob {
|
|
|
|
stringBuilder1.append("min(q_").append(i).append(") AS q_").append(i).append(",");
|
|
|
|
stringBuilder1.append("min(q_").append(i).append(") AS q_").append(i).append(",");
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
stringBuilder2.append(Param.TIME + " >= '").append(startTime).append("' and ").append(Param.TIME).append(" <= '").append(endTime).append("' and value_type = 'MIN' group by line_id,phasic_type,value_type,quality_flag tz('Asia/Shanghai')");
|
|
|
|
stringBuilder2.append(InfluxDBPublicParam.TIME + " >= '").append(startTime).append("' and ").append(InfluxDBPublicParam.TIME).append(" <= '").append(endTime).append("' and value_type = 'MIN' group by line_id,phasic_type,value_type,quality_flag ").append(InfluxDBPublicParam.TIME_ZONE);
|
|
|
|
String sql1 = "select "+stringBuilder1+" from data_harmpower_q where "+stringBuilder+stringBuilder2;
|
|
|
|
String sql1 = "select "+stringBuilder1+" from "+ InfluxDBPublicParam.DATA_HARM_POWER_Q+" where "+stringBuilder+stringBuilder2;
|
|
|
|
QueryResult sqlResult1 = influxDbUtils.query(sql1);
|
|
|
|
QueryResult sqlResult1 = influxDbUtils.query(sql1);
|
|
|
|
InfluxDBResultMapper resultMapper1 = new InfluxDBResultMapper();
|
|
|
|
InfluxDBResultMapper resultMapper1 = new InfluxDBResultMapper();
|
|
|
|
List<DataHarmPowerQ> list1 = resultMapper1.toPOJO(sqlResult1, DataHarmPowerQ.class);
|
|
|
|
List<DataHarmPowerQ> list1 = resultMapper1.toPOJO(sqlResult1, DataHarmPowerQ.class);
|
|
|
|
@@ -1176,8 +1174,8 @@ public class DayJob {
|
|
|
|
stringBuilder3.append("max(q_").append(i).append(") AS q_").append(i).append(",");
|
|
|
|
stringBuilder3.append("max(q_").append(i).append(") AS q_").append(i).append(",");
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
stringBuilder4.append(Param.TIME + " >= '").append(startTime).append("' and ").append(Param.TIME).append(" <= '").append(endTime).append("' and value_type = 'MAX' group by line_id,phasic_type,value_type,quality_flag tz('Asia/Shanghai')");
|
|
|
|
stringBuilder4.append(InfluxDBPublicParam.TIME + " >= '").append(startTime).append("' and ").append(InfluxDBPublicParam.TIME).append(" <= '").append(endTime).append("' and value_type = 'MAX' group by line_id,phasic_type,value_type,quality_flag ").append(InfluxDBPublicParam.TIME_ZONE);
|
|
|
|
String sql2 = "select "+stringBuilder3+" from data_harmpower_q where "+stringBuilder+stringBuilder4;
|
|
|
|
String sql2 = "select "+stringBuilder3+" from "+ InfluxDBPublicParam.DATA_HARM_POWER_Q+" where "+stringBuilder+stringBuilder4;
|
|
|
|
QueryResult sqlResult2 = influxDbUtils.query(sql2);
|
|
|
|
QueryResult sqlResult2 = influxDbUtils.query(sql2);
|
|
|
|
InfluxDBResultMapper resultMapper2 = new InfluxDBResultMapper();
|
|
|
|
InfluxDBResultMapper resultMapper2 = new InfluxDBResultMapper();
|
|
|
|
List<DataHarmPowerQ> list2 = resultMapper2.toPOJO(sqlResult2, DataHarmPowerQ.class);
|
|
|
|
List<DataHarmPowerQ> list2 = resultMapper2.toPOJO(sqlResult2, DataHarmPowerQ.class);
|
|
|
|
@@ -1192,8 +1190,8 @@ public class DayJob {
|
|
|
|
stringBuilder5.append("mean(q_").append(i).append(") AS q_").append(i).append(",");
|
|
|
|
stringBuilder5.append("mean(q_").append(i).append(") AS q_").append(i).append(",");
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
stringBuilder6.append(Param.TIME + " >= '").append(startTime).append("' and ").append(Param.TIME).append(" <= '").append(endTime).append("' and value_type = 'AVG' group by line_id,phasic_type,value_type,quality_flag tz('Asia/Shanghai')");
|
|
|
|
stringBuilder6.append(InfluxDBPublicParam.TIME + " >= '").append(startTime).append("' and ").append(InfluxDBPublicParam.TIME).append(" <= '").append(endTime).append("' and value_type = 'AVG' group by line_id,phasic_type,value_type,quality_flag ").append(InfluxDBPublicParam.TIME_ZONE);
|
|
|
|
String sql3 = "select "+stringBuilder5+" from data_harmpower_q where "+stringBuilder+stringBuilder6;
|
|
|
|
String sql3 = "select "+stringBuilder5+" from "+ InfluxDBPublicParam.DATA_HARM_POWER_Q+" where "+stringBuilder+stringBuilder6;
|
|
|
|
QueryResult sqlResult3 = influxDbUtils.query(sql3);
|
|
|
|
QueryResult sqlResult3 = influxDbUtils.query(sql3);
|
|
|
|
InfluxDBResultMapper resultMapper3 = new InfluxDBResultMapper();
|
|
|
|
InfluxDBResultMapper resultMapper3 = new InfluxDBResultMapper();
|
|
|
|
List<DataHarmPowerQ> list3 = resultMapper3.toPOJO(sqlResult3, DataHarmPowerQ.class);
|
|
|
|
List<DataHarmPowerQ> list3 = resultMapper3.toPOJO(sqlResult3, DataHarmPowerQ.class);
|
|
|
|
@@ -1208,8 +1206,8 @@ public class DayJob {
|
|
|
|
stringBuilder7.append("percentile(q_").append(i).append(",95) AS q_").append(i).append(",");
|
|
|
|
stringBuilder7.append("percentile(q_").append(i).append(",95) AS q_").append(i).append(",");
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
stringBuilder8.append(Param.TIME + " >= '").append(startTime).append("' and ").append(Param.TIME).append(" <= '").append(endTime).append("' and value_type = 'AVG' group by line_id,phasic_type,value_type,quality_flag tz('Asia/Shanghai')");
|
|
|
|
stringBuilder8.append(InfluxDBPublicParam.TIME + " >= '").append(startTime).append("' and ").append(InfluxDBPublicParam.TIME).append(" <= '").append(endTime).append("' and value_type = 'AVG' group by line_id,phasic_type,value_type,quality_flag ").append(InfluxDBPublicParam.TIME_ZONE);
|
|
|
|
String sql4 = "select "+stringBuilder7+" from data_harmpower_q where "+stringBuilder+stringBuilder8;
|
|
|
|
String sql4 = "select "+stringBuilder7+" from "+ InfluxDBPublicParam.DATA_HARM_POWER_Q+" where "+stringBuilder+stringBuilder8;
|
|
|
|
QueryResult sqlResult4 = influxDbUtils.query(sql4);
|
|
|
|
QueryResult sqlResult4 = influxDbUtils.query(sql4);
|
|
|
|
InfluxDBResultMapper resultMapper4 = new InfluxDBResultMapper();
|
|
|
|
InfluxDBResultMapper resultMapper4 = new InfluxDBResultMapper();
|
|
|
|
List<DataHarmPowerQ> list4 = resultMapper4.toPOJO(sqlResult4, DataHarmPowerQ.class);
|
|
|
|
List<DataHarmPowerQ> list4 = resultMapper4.toPOJO(sqlResult4, DataHarmPowerQ.class);
|
|
|
|
@@ -1290,12 +1288,12 @@ public class DayJob {
|
|
|
|
fields.put("q_48",item.getQ48());
|
|
|
|
fields.put("q_48",item.getQ48());
|
|
|
|
fields.put("q_49",item.getQ49());
|
|
|
|
fields.put("q_49",item.getQ49());
|
|
|
|
fields.put("q_50",item.getQ50());
|
|
|
|
fields.put("q_50",item.getQ50());
|
|
|
|
Point point = influxDbUtils.pointBuilder("day_harmpower_q", time, TimeUnit.MILLISECONDS, tags, fields);
|
|
|
|
Point point = influxDbUtils.pointBuilder(InfluxDBPublicParam.DAY_HARM_POWER_Q, time, TimeUnit.MILLISECONDS, tags, fields);
|
|
|
|
BatchPoints batchPoints = BatchPoints.database(DATABASE).tag("line_id", item.getLineId()).tag("phasic_type",item.getPhaseType()).tag("quality_flag",item.getQualityFlag()).tag("value_type",item.getValueType()).retentionPolicy("").consistency(InfluxDB.ConsistencyLevel.ALL).build();
|
|
|
|
BatchPoints batchPoints = BatchPoints.database(InfluxDBPublicParam.DATABASE).tag(InfluxDBPublicParam.LINE_ID, item.getLineId()).tag(InfluxDBPublicParam.PHASIC_TYPE,item.getPhaseType()).tag(InfluxDBPublicParam.QUALITY_FLAG,item.getQualityFlag()).tag(InfluxDBPublicParam.VALUE_TYPE,item.getValueType()).retentionPolicy("").consistency(InfluxDB.ConsistencyLevel.ALL).build();
|
|
|
|
batchPoints.point(point);
|
|
|
|
batchPoints.point(point);
|
|
|
|
records.add(batchPoints.lineProtocol());
|
|
|
|
records.add(batchPoints.lineProtocol());
|
|
|
|
});
|
|
|
|
});
|
|
|
|
influxDbUtils.batchInsert(DATABASE,"", InfluxDB.ConsistencyLevel.ALL, records);
|
|
|
|
influxDbUtils.batchInsert(InfluxDBPublicParam.DATABASE,"", InfluxDB.ConsistencyLevel.ALL, records);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
/**
|
|
|
|
/**
|
|
|
|
@@ -1320,8 +1318,8 @@ public class DayJob {
|
|
|
|
stringBuilder1.append("min(s_").append(i).append(") AS s_").append(i).append(",");
|
|
|
|
stringBuilder1.append("min(s_").append(i).append(") AS s_").append(i).append(",");
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
stringBuilder2.append(Param.TIME + " >= '").append(startTime).append("' and ").append(Param.TIME).append(" <= '").append(endTime).append("' and value_type = 'MIN' group by line_id,phasic_type,value_type,quality_flag tz('Asia/Shanghai')");
|
|
|
|
stringBuilder2.append(InfluxDBPublicParam.TIME + " >= '").append(startTime).append("' and ").append(InfluxDBPublicParam.TIME).append(" <= '").append(endTime).append("' and value_type = 'MIN' group by line_id,phasic_type,value_type,quality_flag ").append(InfluxDBPublicParam.TIME_ZONE);
|
|
|
|
String sql1 = "select "+stringBuilder1+" from data_harmpower_s where "+stringBuilder+stringBuilder2;
|
|
|
|
String sql1 = "select "+stringBuilder1+" from "+ InfluxDBPublicParam.DATA_HARM_POWER_S+" where "+stringBuilder+stringBuilder2;
|
|
|
|
QueryResult sqlResult1 = influxDbUtils.query(sql1);
|
|
|
|
QueryResult sqlResult1 = influxDbUtils.query(sql1);
|
|
|
|
InfluxDBResultMapper resultMapper1 = new InfluxDBResultMapper();
|
|
|
|
InfluxDBResultMapper resultMapper1 = new InfluxDBResultMapper();
|
|
|
|
List<DataHarmPowerS> list1 = resultMapper1.toPOJO(sqlResult1, DataHarmPowerS.class);
|
|
|
|
List<DataHarmPowerS> list1 = resultMapper1.toPOJO(sqlResult1, DataHarmPowerS.class);
|
|
|
|
@@ -1336,8 +1334,8 @@ public class DayJob {
|
|
|
|
stringBuilder3.append("max(s_").append(i).append(") AS s_").append(i).append(",");
|
|
|
|
stringBuilder3.append("max(s_").append(i).append(") AS s_").append(i).append(",");
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
stringBuilder4.append(Param.TIME + " >= '").append(startTime).append("' and ").append(Param.TIME).append(" <= '").append(endTime).append("' and value_type = 'MAX' group by line_id,phasic_type,value_type,quality_flag tz('Asia/Shanghai')");
|
|
|
|
stringBuilder4.append(InfluxDBPublicParam.TIME + " >= '").append(startTime).append("' and ").append(InfluxDBPublicParam.TIME).append(" <= '").append(endTime).append("' and value_type = 'MAX' group by line_id,phasic_type,value_type,quality_flag ").append(InfluxDBPublicParam.TIME_ZONE);
|
|
|
|
String sql2 = "select "+stringBuilder3+" from data_harmpower_s where "+stringBuilder+stringBuilder4;
|
|
|
|
String sql2 = "select "+stringBuilder3+" from "+ InfluxDBPublicParam.DATA_HARM_POWER_S+" where "+stringBuilder+stringBuilder4;
|
|
|
|
QueryResult sqlResult2 = influxDbUtils.query(sql2);
|
|
|
|
QueryResult sqlResult2 = influxDbUtils.query(sql2);
|
|
|
|
InfluxDBResultMapper resultMapper2 = new InfluxDBResultMapper();
|
|
|
|
InfluxDBResultMapper resultMapper2 = new InfluxDBResultMapper();
|
|
|
|
List<DataHarmPowerS> list2 = resultMapper2.toPOJO(sqlResult2, DataHarmPowerS.class);
|
|
|
|
List<DataHarmPowerS> list2 = resultMapper2.toPOJO(sqlResult2, DataHarmPowerS.class);
|
|
|
|
@@ -1352,8 +1350,8 @@ public class DayJob {
|
|
|
|
stringBuilder5.append("mean(s_").append(i).append(") AS s_").append(i).append(",");
|
|
|
|
stringBuilder5.append("mean(s_").append(i).append(") AS s_").append(i).append(",");
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
stringBuilder6.append(Param.TIME + " >= '").append(startTime).append("' and ").append(Param.TIME).append(" <= '").append(endTime).append("' and value_type = 'AVG' group by line_id,phasic_type,value_type,quality_flag tz('Asia/Shanghai')");
|
|
|
|
stringBuilder6.append(InfluxDBPublicParam.TIME + " >= '").append(startTime).append("' and ").append(InfluxDBPublicParam.TIME).append(" <= '").append(endTime).append("' and value_type = 'AVG' group by line_id,phasic_type,value_type,quality_flag ").append(InfluxDBPublicParam.TIME_ZONE);
|
|
|
|
String sql3 = "select "+stringBuilder5+" from data_harmpower_s where "+stringBuilder+stringBuilder6;
|
|
|
|
String sql3 = "select "+stringBuilder5+" from "+ InfluxDBPublicParam.DATA_HARM_POWER_S+" where "+stringBuilder+stringBuilder6;
|
|
|
|
QueryResult sqlResult3 = influxDbUtils.query(sql3);
|
|
|
|
QueryResult sqlResult3 = influxDbUtils.query(sql3);
|
|
|
|
InfluxDBResultMapper resultMapper3 = new InfluxDBResultMapper();
|
|
|
|
InfluxDBResultMapper resultMapper3 = new InfluxDBResultMapper();
|
|
|
|
List<DataHarmPowerS> list3 = resultMapper3.toPOJO(sqlResult3, DataHarmPowerS.class);
|
|
|
|
List<DataHarmPowerS> list3 = resultMapper3.toPOJO(sqlResult3, DataHarmPowerS.class);
|
|
|
|
@@ -1368,8 +1366,8 @@ public class DayJob {
|
|
|
|
stringBuilder7.append("percentile(s_").append(i).append(",95) AS s_").append(i).append(",");
|
|
|
|
stringBuilder7.append("percentile(s_").append(i).append(",95) AS s_").append(i).append(",");
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
stringBuilder8.append(Param.TIME + " >= '").append(startTime).append("' and ").append(Param.TIME).append(" <= '").append(endTime).append("' and value_type = 'AVG' group by line_id,phasic_type,value_type,quality_flag tz('Asia/Shanghai')");
|
|
|
|
stringBuilder8.append(InfluxDBPublicParam.TIME + " >= '").append(startTime).append("' and ").append(InfluxDBPublicParam.TIME).append(" <= '").append(endTime).append("' and value_type = 'AVG' group by line_id,phasic_type,value_type,quality_flag ").append(InfluxDBPublicParam.TIME_ZONE);
|
|
|
|
String sql4 = "select "+stringBuilder7+" from data_harmpower_s where "+stringBuilder+stringBuilder8;
|
|
|
|
String sql4 = "select "+stringBuilder7+" from "+ InfluxDBPublicParam.DATA_HARM_POWER_S+" where "+stringBuilder+stringBuilder8;
|
|
|
|
QueryResult sqlResult4 = influxDbUtils.query(sql4);
|
|
|
|
QueryResult sqlResult4 = influxDbUtils.query(sql4);
|
|
|
|
InfluxDBResultMapper resultMapper4 = new InfluxDBResultMapper();
|
|
|
|
InfluxDBResultMapper resultMapper4 = new InfluxDBResultMapper();
|
|
|
|
List<DataHarmPowerS> list4 = resultMapper4.toPOJO(sqlResult4, DataHarmPowerS.class);
|
|
|
|
List<DataHarmPowerS> list4 = resultMapper4.toPOJO(sqlResult4, DataHarmPowerS.class);
|
|
|
|
@@ -1450,12 +1448,12 @@ public class DayJob {
|
|
|
|
fields.put("s_48",item.getS48());
|
|
|
|
fields.put("s_48",item.getS48());
|
|
|
|
fields.put("s_49",item.getS49());
|
|
|
|
fields.put("s_49",item.getS49());
|
|
|
|
fields.put("s_50",item.getS50());
|
|
|
|
fields.put("s_50",item.getS50());
|
|
|
|
Point point = influxDbUtils.pointBuilder("day_harmpower_s", time, TimeUnit.MILLISECONDS, tags, fields);
|
|
|
|
Point point = influxDbUtils.pointBuilder(InfluxDBPublicParam.DAY_HARM_POWER_S, time, TimeUnit.MILLISECONDS, tags, fields);
|
|
|
|
BatchPoints batchPoints = BatchPoints.database(DATABASE).tag("line_id", item.getLineId()).tag("phasic_type",item.getPhaseType()).tag("quality_flag",item.getQualityFlag()).tag("value_type",item.getValueType()).retentionPolicy("").consistency(InfluxDB.ConsistencyLevel.ALL).build();
|
|
|
|
BatchPoints batchPoints = BatchPoints.database(InfluxDBPublicParam.DATABASE).tag(InfluxDBPublicParam.LINE_ID, item.getLineId()).tag(InfluxDBPublicParam.PHASIC_TYPE,item.getPhaseType()).tag(InfluxDBPublicParam.QUALITY_FLAG,item.getQualityFlag()).tag(InfluxDBPublicParam.VALUE_TYPE,item.getValueType()).retentionPolicy("").consistency(InfluxDB.ConsistencyLevel.ALL).build();
|
|
|
|
batchPoints.point(point);
|
|
|
|
batchPoints.point(point);
|
|
|
|
records.add(batchPoints.lineProtocol());
|
|
|
|
records.add(batchPoints.lineProtocol());
|
|
|
|
});
|
|
|
|
});
|
|
|
|
influxDbUtils.batchInsert(DATABASE,"", InfluxDB.ConsistencyLevel.ALL, records);
|
|
|
|
influxDbUtils.batchInsert(InfluxDBPublicParam.DATABASE,"", InfluxDB.ConsistencyLevel.ALL, records);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
/**
|
|
|
|
/**
|
|
|
|
@@ -1479,8 +1477,8 @@ public class DayJob {
|
|
|
|
stringBuilder1.append("min(i_").append(i).append(") AS i_").append(i).append(",");
|
|
|
|
stringBuilder1.append("min(i_").append(i).append(") AS i_").append(i).append(",");
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
stringBuilder2.append(Param.TIME + " >= '").append(startTime).append("' and ").append(Param.TIME).append(" <= '").append(endTime).append("' and value_type = 'MIN' group by line_id,phasic_type,value_type,quality_flag tz('Asia/Shanghai')");
|
|
|
|
stringBuilder2.append(InfluxDBPublicParam.TIME + " >= '").append(startTime).append("' and ").append(InfluxDBPublicParam.TIME).append(" <= '").append(endTime).append("' and value_type = 'MIN' group by line_id,phasic_type,value_type,quality_flag ").append(InfluxDBPublicParam.TIME_ZONE);
|
|
|
|
String sql1 = "select "+stringBuilder1+" from data_harmrate_i where "+stringBuilder+stringBuilder2;
|
|
|
|
String sql1 = "select "+stringBuilder1+" from "+ InfluxDBPublicParam.DATA_HARM_RATE_I+" where "+stringBuilder+stringBuilder2;
|
|
|
|
QueryResult sqlResult1 = influxDbUtils.query(sql1);
|
|
|
|
QueryResult sqlResult1 = influxDbUtils.query(sql1);
|
|
|
|
InfluxDBResultMapper resultMapper1 = new InfluxDBResultMapper();
|
|
|
|
InfluxDBResultMapper resultMapper1 = new InfluxDBResultMapper();
|
|
|
|
List<DataHarmRateI> list1 = resultMapper1.toPOJO(sqlResult1, DataHarmRateI.class);
|
|
|
|
List<DataHarmRateI> list1 = resultMapper1.toPOJO(sqlResult1, DataHarmRateI.class);
|
|
|
|
@@ -1494,8 +1492,8 @@ public class DayJob {
|
|
|
|
stringBuilder3.append("max(i_").append(i).append(") AS i_").append(i).append(",");
|
|
|
|
stringBuilder3.append("max(i_").append(i).append(") AS i_").append(i).append(",");
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
stringBuilder4.append(Param.TIME + " >= '").append(startTime).append("' and ").append(Param.TIME).append(" <= '").append(endTime).append("' and value_type = 'MAX' group by line_id,phasic_type,value_type,quality_flag tz('Asia/Shanghai')");
|
|
|
|
stringBuilder4.append(InfluxDBPublicParam.TIME + " >= '").append(startTime).append("' and ").append(InfluxDBPublicParam.TIME).append(" <= '").append(endTime).append("' and value_type = 'MAX' group by line_id,phasic_type,value_type,quality_flag ").append(InfluxDBPublicParam.TIME_ZONE);
|
|
|
|
String sql2 = "select "+stringBuilder3+" from data_harmrate_i where "+stringBuilder+stringBuilder4;
|
|
|
|
String sql2 = "select "+stringBuilder3+" from "+ InfluxDBPublicParam.DATA_HARM_RATE_I+" where "+stringBuilder+stringBuilder4;
|
|
|
|
QueryResult sqlResult2 = influxDbUtils.query(sql2);
|
|
|
|
QueryResult sqlResult2 = influxDbUtils.query(sql2);
|
|
|
|
InfluxDBResultMapper resultMapper2 = new InfluxDBResultMapper();
|
|
|
|
InfluxDBResultMapper resultMapper2 = new InfluxDBResultMapper();
|
|
|
|
List<DataHarmRateI> list2 = resultMapper2.toPOJO(sqlResult2, DataHarmRateI.class);
|
|
|
|
List<DataHarmRateI> list2 = resultMapper2.toPOJO(sqlResult2, DataHarmRateI.class);
|
|
|
|
@@ -1509,8 +1507,8 @@ public class DayJob {
|
|
|
|
stringBuilder5.append("mean(i_").append(i).append(") AS i_").append(i).append(",");
|
|
|
|
stringBuilder5.append("mean(i_").append(i).append(") AS i_").append(i).append(",");
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
stringBuilder6.append(Param.TIME + " >= '").append(startTime).append("' and ").append(Param.TIME).append(" <= '").append(endTime).append("' and value_type = 'AVG' group by line_id,phasic_type,value_type,quality_flag tz('Asia/Shanghai')");
|
|
|
|
stringBuilder6.append(InfluxDBPublicParam.TIME + " >= '").append(startTime).append("' and ").append(InfluxDBPublicParam.TIME).append(" <= '").append(endTime).append("' and value_type = 'AVG' group by line_id,phasic_type,value_type,quality_flag ").append(InfluxDBPublicParam.TIME_ZONE);
|
|
|
|
String sql3 = "select "+stringBuilder5+" from data_harmrate_i where "+stringBuilder+stringBuilder6;
|
|
|
|
String sql3 = "select "+stringBuilder5+" from "+ InfluxDBPublicParam.DATA_HARM_RATE_I+" where "+stringBuilder+stringBuilder6;
|
|
|
|
QueryResult sqlResult3 = influxDbUtils.query(sql3);
|
|
|
|
QueryResult sqlResult3 = influxDbUtils.query(sql3);
|
|
|
|
InfluxDBResultMapper resultMapper3 = new InfluxDBResultMapper();
|
|
|
|
InfluxDBResultMapper resultMapper3 = new InfluxDBResultMapper();
|
|
|
|
List<DataHarmRateI> list3 = resultMapper3.toPOJO(sqlResult3, DataHarmRateI.class);
|
|
|
|
List<DataHarmRateI> list3 = resultMapper3.toPOJO(sqlResult3, DataHarmRateI.class);
|
|
|
|
@@ -1524,8 +1522,8 @@ public class DayJob {
|
|
|
|
stringBuilder7.append("percentile(i_").append(i).append(",95) AS i_").append(i).append(",");
|
|
|
|
stringBuilder7.append("percentile(i_").append(i).append(",95) AS i_").append(i).append(",");
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
stringBuilder8.append(Param.TIME + " >= '").append(startTime).append("' and ").append(Param.TIME).append(" <= '").append(endTime).append("' and value_type = 'AVG' group by line_id,phasic_type,value_type,quality_flag tz('Asia/Shanghai')");
|
|
|
|
stringBuilder8.append(InfluxDBPublicParam.TIME + " >= '").append(startTime).append("' and ").append(InfluxDBPublicParam.TIME).append(" <= '").append(endTime).append("' and value_type = 'AVG' group by line_id,phasic_type,value_type,quality_flag ").append(InfluxDBPublicParam.TIME_ZONE);
|
|
|
|
String sql4 = "select "+stringBuilder7+" from data_harmrate_i where "+stringBuilder+stringBuilder8;
|
|
|
|
String sql4 = "select "+stringBuilder7+" from "+ InfluxDBPublicParam.DATA_HARM_RATE_I+" where "+stringBuilder+stringBuilder8;
|
|
|
|
QueryResult sqlResult4 = influxDbUtils.query(sql4);
|
|
|
|
QueryResult sqlResult4 = influxDbUtils.query(sql4);
|
|
|
|
InfluxDBResultMapper resultMapper4 = new InfluxDBResultMapper();
|
|
|
|
InfluxDBResultMapper resultMapper4 = new InfluxDBResultMapper();
|
|
|
|
List<DataHarmRateI> list4 = resultMapper4.toPOJO(sqlResult4, DataHarmRateI.class);
|
|
|
|
List<DataHarmRateI> list4 = resultMapper4.toPOJO(sqlResult4, DataHarmRateI.class);
|
|
|
|
@@ -1605,12 +1603,12 @@ public class DayJob {
|
|
|
|
fields.put("i_48",item.getI48());
|
|
|
|
fields.put("i_48",item.getI48());
|
|
|
|
fields.put("i_49",item.getI49());
|
|
|
|
fields.put("i_49",item.getI49());
|
|
|
|
fields.put("i_50",item.getI50());
|
|
|
|
fields.put("i_50",item.getI50());
|
|
|
|
Point point = influxDbUtils.pointBuilder("day_harmrate_i", time, TimeUnit.MILLISECONDS, tags, fields);
|
|
|
|
Point point = influxDbUtils.pointBuilder(InfluxDBPublicParam.DAY_HARM_RATE_I, time, TimeUnit.MILLISECONDS, tags, fields);
|
|
|
|
BatchPoints batchPoints = BatchPoints.database(DATABASE).tag("line_id", item.getLineId()).tag("phasic_type",item.getPhaseType()).tag("quality_flag",item.getQualityFlag()).tag("value_type",item.getValueType()).retentionPolicy("").consistency(InfluxDB.ConsistencyLevel.ALL).build();
|
|
|
|
BatchPoints batchPoints = BatchPoints.database(InfluxDBPublicParam.DATABASE).tag(InfluxDBPublicParam.LINE_ID, item.getLineId()).tag(InfluxDBPublicParam.PHASIC_TYPE,item.getPhaseType()).tag(InfluxDBPublicParam.QUALITY_FLAG,item.getQualityFlag()).tag(InfluxDBPublicParam.VALUE_TYPE,item.getValueType()).retentionPolicy("").consistency(InfluxDB.ConsistencyLevel.ALL).build();
|
|
|
|
batchPoints.point(point);
|
|
|
|
batchPoints.point(point);
|
|
|
|
records.add(batchPoints.lineProtocol());
|
|
|
|
records.add(batchPoints.lineProtocol());
|
|
|
|
});
|
|
|
|
});
|
|
|
|
influxDbUtils.batchInsert(DATABASE,"", InfluxDB.ConsistencyLevel.ALL, records);
|
|
|
|
influxDbUtils.batchInsert(InfluxDBPublicParam.DATABASE,"", InfluxDB.ConsistencyLevel.ALL, records);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
/**
|
|
|
|
/**
|
|
|
|
@@ -1634,8 +1632,8 @@ public class DayJob {
|
|
|
|
stringBuilder1.append("min(v_").append(i).append(") AS v_").append(i).append(",");
|
|
|
|
stringBuilder1.append("min(v_").append(i).append(") AS v_").append(i).append(",");
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
stringBuilder2.append(Param.TIME + " >= '").append(startTime).append("' and ").append(Param.TIME).append(" <= '").append(endTime).append("' and value_type = 'MIN' group by line_id,phasic_type,value_type,quality_flag tz('Asia/Shanghai')");
|
|
|
|
stringBuilder2.append(InfluxDBPublicParam.TIME + " >= '").append(startTime).append("' and ").append(InfluxDBPublicParam.TIME).append(" <= '").append(endTime).append("' and value_type = 'MIN' group by line_id,phasic_type,value_type,quality_flag ").append(InfluxDBPublicParam.TIME_ZONE);
|
|
|
|
String sql1 = "select "+stringBuilder1+" from data_harmrate_v where "+stringBuilder+stringBuilder2;
|
|
|
|
String sql1 = "select "+stringBuilder1+" from "+ InfluxDBPublicParam.DATA_HARM_RATE_V+" where "+stringBuilder+stringBuilder2;
|
|
|
|
QueryResult sqlResult1 = influxDbUtils.query(sql1);
|
|
|
|
QueryResult sqlResult1 = influxDbUtils.query(sql1);
|
|
|
|
InfluxDBResultMapper resultMapper1 = new InfluxDBResultMapper();
|
|
|
|
InfluxDBResultMapper resultMapper1 = new InfluxDBResultMapper();
|
|
|
|
List<DataHarmRateV> list1 = resultMapper1.toPOJO(sqlResult1, DataHarmRateV.class);
|
|
|
|
List<DataHarmRateV> list1 = resultMapper1.toPOJO(sqlResult1, DataHarmRateV.class);
|
|
|
|
@@ -1649,8 +1647,8 @@ public class DayJob {
|
|
|
|
stringBuilder3.append("max(v_").append(i).append(") AS v_").append(i).append(",");
|
|
|
|
stringBuilder3.append("max(v_").append(i).append(") AS v_").append(i).append(",");
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
stringBuilder4.append(Param.TIME + " >= '").append(startTime).append("' and ").append(Param.TIME).append(" <= '").append(endTime).append("' and value_type = 'MAX' group by line_id,phasic_type,value_type,quality_flag tz('Asia/Shanghai')");
|
|
|
|
stringBuilder4.append(InfluxDBPublicParam.TIME + " >= '").append(startTime).append("' and ").append(InfluxDBPublicParam.TIME).append(" <= '").append(endTime).append("' and value_type = 'MAX' group by line_id,phasic_type,value_type,quality_flag ").append(InfluxDBPublicParam.TIME_ZONE);
|
|
|
|
String sql2 = "select "+stringBuilder3+" from data_harmrate_v where "+stringBuilder+stringBuilder4;
|
|
|
|
String sql2 = "select "+stringBuilder3+" from "+ InfluxDBPublicParam.DATA_HARM_RATE_V+" where "+stringBuilder+stringBuilder4;
|
|
|
|
QueryResult sqlResult2 = influxDbUtils.query(sql2);
|
|
|
|
QueryResult sqlResult2 = influxDbUtils.query(sql2);
|
|
|
|
InfluxDBResultMapper resultMapper2 = new InfluxDBResultMapper();
|
|
|
|
InfluxDBResultMapper resultMapper2 = new InfluxDBResultMapper();
|
|
|
|
List<DataHarmRateV> list2 = resultMapper2.toPOJO(sqlResult2, DataHarmRateV.class);
|
|
|
|
List<DataHarmRateV> list2 = resultMapper2.toPOJO(sqlResult2, DataHarmRateV.class);
|
|
|
|
@@ -1664,8 +1662,8 @@ public class DayJob {
|
|
|
|
stringBuilder5.append("mean(v_").append(i).append(") AS v_").append(i).append(",");
|
|
|
|
stringBuilder5.append("mean(v_").append(i).append(") AS v_").append(i).append(",");
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
stringBuilder6.append(Param.TIME + " >= '").append(startTime).append("' and ").append(Param.TIME).append(" <= '").append(endTime).append("' and value_type = 'AVG' group by line_id,phasic_type,value_type,quality_flag tz('Asia/Shanghai')");
|
|
|
|
stringBuilder6.append(InfluxDBPublicParam.TIME + " >= '").append(startTime).append("' and ").append(InfluxDBPublicParam.TIME).append(" <= '").append(endTime).append("' and value_type = 'AVG' group by line_id,phasic_type,value_type,quality_flag ").append(InfluxDBPublicParam.TIME_ZONE);
|
|
|
|
String sql3 = "select "+stringBuilder5+" from data_harmrate_v where "+stringBuilder+stringBuilder6;
|
|
|
|
String sql3 = "select "+stringBuilder5+" from "+ InfluxDBPublicParam.DATA_HARM_RATE_V+" where "+stringBuilder+stringBuilder6;
|
|
|
|
QueryResult sqlResult3 = influxDbUtils.query(sql3);
|
|
|
|
QueryResult sqlResult3 = influxDbUtils.query(sql3);
|
|
|
|
InfluxDBResultMapper resultMapper3 = new InfluxDBResultMapper();
|
|
|
|
InfluxDBResultMapper resultMapper3 = new InfluxDBResultMapper();
|
|
|
|
List<DataHarmRateV> list3 = resultMapper3.toPOJO(sqlResult3, DataHarmRateV.class);
|
|
|
|
List<DataHarmRateV> list3 = resultMapper3.toPOJO(sqlResult3, DataHarmRateV.class);
|
|
|
|
@@ -1679,8 +1677,8 @@ public class DayJob {
|
|
|
|
stringBuilder7.append("percentile(v_").append(i).append(",95) AS v_").append(i).append(",");
|
|
|
|
stringBuilder7.append("percentile(v_").append(i).append(",95) AS v_").append(i).append(",");
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
stringBuilder8.append(Param.TIME + " >= '").append(startTime).append("' and ").append(Param.TIME).append(" <= '").append(endTime).append("' and value_type = 'AVG' group by line_id,phasic_type,value_type,quality_flag tz('Asia/Shanghai')");
|
|
|
|
stringBuilder8.append(InfluxDBPublicParam.TIME + " >= '").append(startTime).append("' and ").append(InfluxDBPublicParam.TIME).append(" <= '").append(endTime).append("' and value_type = 'AVG' group by line_id,phasic_type,value_type,quality_flag ").append(InfluxDBPublicParam.TIME_ZONE);
|
|
|
|
String sql4 = "select "+stringBuilder7+" from data_harmrate_v where "+stringBuilder+stringBuilder8;
|
|
|
|
String sql4 = "select "+stringBuilder7+" from "+ InfluxDBPublicParam.DATA_HARM_RATE_V+" where "+stringBuilder+stringBuilder8;
|
|
|
|
QueryResult sqlResult4 = influxDbUtils.query(sql4);
|
|
|
|
QueryResult sqlResult4 = influxDbUtils.query(sql4);
|
|
|
|
InfluxDBResultMapper resultMapper4 = new InfluxDBResultMapper();
|
|
|
|
InfluxDBResultMapper resultMapper4 = new InfluxDBResultMapper();
|
|
|
|
List<DataHarmRateV> list4 = resultMapper4.toPOJO(sqlResult4, DataHarmRateV.class);
|
|
|
|
List<DataHarmRateV> list4 = resultMapper4.toPOJO(sqlResult4, DataHarmRateV.class);
|
|
|
|
@@ -1760,12 +1758,12 @@ public class DayJob {
|
|
|
|
fields.put("v_48",item.getV48());
|
|
|
|
fields.put("v_48",item.getV48());
|
|
|
|
fields.put("v_49",item.getV49());
|
|
|
|
fields.put("v_49",item.getV49());
|
|
|
|
fields.put("v_50",item.getV50());
|
|
|
|
fields.put("v_50",item.getV50());
|
|
|
|
Point point = influxDbUtils.pointBuilder("day_harmrate_v", time, TimeUnit.MILLISECONDS, tags, fields);
|
|
|
|
Point point = influxDbUtils.pointBuilder(InfluxDBPublicParam.DAY_HARM_RATE_V, time, TimeUnit.MILLISECONDS, tags, fields);
|
|
|
|
BatchPoints batchPoints = BatchPoints.database(DATABASE).tag("line_id", item.getLineId()).tag("phasic_type",item.getPhaseType()).tag("quality_flag",item.getQualityFlag()).tag("value_type",item.getValueType()).retentionPolicy("").consistency(InfluxDB.ConsistencyLevel.ALL).build();
|
|
|
|
BatchPoints batchPoints = BatchPoints.database(InfluxDBPublicParam.DATABASE).tag(InfluxDBPublicParam.LINE_ID, item.getLineId()).tag(InfluxDBPublicParam.PHASIC_TYPE,item.getPhaseType()).tag(InfluxDBPublicParam.QUALITY_FLAG,item.getQualityFlag()).tag(InfluxDBPublicParam.VALUE_TYPE,item.getValueType()).retentionPolicy("").consistency(InfluxDB.ConsistencyLevel.ALL).build();
|
|
|
|
batchPoints.point(point);
|
|
|
|
batchPoints.point(point);
|
|
|
|
records.add(batchPoints.lineProtocol());
|
|
|
|
records.add(batchPoints.lineProtocol());
|
|
|
|
});
|
|
|
|
});
|
|
|
|
influxDbUtils.batchInsert(DATABASE,"", InfluxDB.ConsistencyLevel.ALL, records);
|
|
|
|
influxDbUtils.batchInsert(InfluxDBPublicParam.DATABASE,"", InfluxDB.ConsistencyLevel.ALL, records);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
/**
|
|
|
|
/**
|
|
|
|
@@ -1789,8 +1787,8 @@ public class DayJob {
|
|
|
|
stringBuilder1.append("min(i_").append(i).append(") AS i_").append(i).append(",");
|
|
|
|
stringBuilder1.append("min(i_").append(i).append(") AS i_").append(i).append(",");
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
stringBuilder2.append(Param.TIME + " >= '").append(startTime).append("' and ").append(Param.TIME).append(" <= '").append(endTime).append("' and value_type = 'MIN' group by line_id,phasic_type,value_type,quality_flag tz('Asia/Shanghai')");
|
|
|
|
stringBuilder2.append(InfluxDBPublicParam.TIME + " >= '").append(startTime).append("' and ").append(InfluxDBPublicParam.TIME).append(" <= '").append(endTime).append("' and value_type = 'MIN' group by line_id,phasic_type,value_type,quality_flag ").append(InfluxDBPublicParam.TIME_ZONE);
|
|
|
|
String sql1 = "select "+stringBuilder1+" from data_inharm_i where "+stringBuilder+stringBuilder2;
|
|
|
|
String sql1 = "select "+stringBuilder1+" from "+ InfluxDBPublicParam.DATA_IN_HARM_I+" where "+stringBuilder+stringBuilder2;
|
|
|
|
QueryResult sqlResult1 = influxDbUtils.query(sql1);
|
|
|
|
QueryResult sqlResult1 = influxDbUtils.query(sql1);
|
|
|
|
InfluxDBResultMapper resultMapper1 = new InfluxDBResultMapper();
|
|
|
|
InfluxDBResultMapper resultMapper1 = new InfluxDBResultMapper();
|
|
|
|
List<DataInHarmI> list1 = resultMapper1.toPOJO(sqlResult1, DataInHarmI.class);
|
|
|
|
List<DataInHarmI> list1 = resultMapper1.toPOJO(sqlResult1, DataInHarmI.class);
|
|
|
|
@@ -1804,8 +1802,8 @@ public class DayJob {
|
|
|
|
stringBuilder3.append("max(i_").append(i).append(") AS i_").append(i).append(",");
|
|
|
|
stringBuilder3.append("max(i_").append(i).append(") AS i_").append(i).append(",");
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
stringBuilder4.append(Param.TIME + " >= '").append(startTime).append("' and ").append(Param.TIME).append(" <= '").append(endTime).append("' and value_type = 'MAX' group by line_id,phasic_type,value_type,quality_flag tz('Asia/Shanghai')");
|
|
|
|
stringBuilder4.append(InfluxDBPublicParam.TIME + " >= '").append(startTime).append("' and ").append(InfluxDBPublicParam.TIME).append(" <= '").append(endTime).append("' and value_type = 'MAX' group by line_id,phasic_type,value_type,quality_flag ").append(InfluxDBPublicParam.TIME_ZONE);
|
|
|
|
String sql2 = "select "+stringBuilder3+" from data_inharm_i where "+stringBuilder+stringBuilder4;
|
|
|
|
String sql2 = "select "+stringBuilder3+" from "+ InfluxDBPublicParam.DATA_IN_HARM_I+" where "+stringBuilder+stringBuilder4;
|
|
|
|
QueryResult sqlResult2 = influxDbUtils.query(sql2);
|
|
|
|
QueryResult sqlResult2 = influxDbUtils.query(sql2);
|
|
|
|
InfluxDBResultMapper resultMapper2 = new InfluxDBResultMapper();
|
|
|
|
InfluxDBResultMapper resultMapper2 = new InfluxDBResultMapper();
|
|
|
|
List<DataInHarmI> list2 = resultMapper2.toPOJO(sqlResult2, DataInHarmI.class);
|
|
|
|
List<DataInHarmI> list2 = resultMapper2.toPOJO(sqlResult2, DataInHarmI.class);
|
|
|
|
@@ -1819,8 +1817,8 @@ public class DayJob {
|
|
|
|
stringBuilder5.append("mean(i_").append(i).append(") AS i_").append(i).append(",");
|
|
|
|
stringBuilder5.append("mean(i_").append(i).append(") AS i_").append(i).append(",");
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
stringBuilder6.append(Param.TIME + " >= '").append(startTime).append("' and ").append(Param.TIME).append(" <= '").append(endTime).append("' and value_type = 'AVG' group by line_id,phasic_type,value_type,quality_flag tz('Asia/Shanghai')");
|
|
|
|
stringBuilder6.append(InfluxDBPublicParam.TIME + " >= '").append(startTime).append("' and ").append(InfluxDBPublicParam.TIME).append(" <= '").append(endTime).append("' and value_type = 'AVG' group by line_id,phasic_type,value_type,quality_flag ").append(InfluxDBPublicParam.TIME_ZONE);
|
|
|
|
String sql3 = "select "+stringBuilder5+" from data_inharm_i where "+stringBuilder+stringBuilder6;
|
|
|
|
String sql3 = "select "+stringBuilder5+" from "+ InfluxDBPublicParam.DATA_IN_HARM_I+" where "+stringBuilder+stringBuilder6;
|
|
|
|
QueryResult sqlResult3 = influxDbUtils.query(sql3);
|
|
|
|
QueryResult sqlResult3 = influxDbUtils.query(sql3);
|
|
|
|
InfluxDBResultMapper resultMapper3 = new InfluxDBResultMapper();
|
|
|
|
InfluxDBResultMapper resultMapper3 = new InfluxDBResultMapper();
|
|
|
|
List<DataInHarmI> list3 = resultMapper3.toPOJO(sqlResult3, DataInHarmI.class);
|
|
|
|
List<DataInHarmI> list3 = resultMapper3.toPOJO(sqlResult3, DataInHarmI.class);
|
|
|
|
@@ -1834,8 +1832,8 @@ public class DayJob {
|
|
|
|
stringBuilder7.append("percentile(i_").append(i).append(",95) AS i_").append(i).append(",");
|
|
|
|
stringBuilder7.append("percentile(i_").append(i).append(",95) AS i_").append(i).append(",");
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
stringBuilder8.append(Param.TIME + " >= '").append(startTime).append("' and ").append(Param.TIME).append(" <= '").append(endTime).append("' and value_type = 'AVG' group by line_id,phasic_type,value_type,quality_flag tz('Asia/Shanghai')");
|
|
|
|
stringBuilder8.append(InfluxDBPublicParam.TIME + " >= '").append(startTime).append("' and ").append(InfluxDBPublicParam.TIME).append(" <= '").append(endTime).append("' and value_type = 'AVG' group by line_id,phasic_type,value_type,quality_flag ").append(InfluxDBPublicParam.TIME_ZONE);
|
|
|
|
String sql4 = "select "+stringBuilder7+" from data_inharm_i where "+stringBuilder+stringBuilder8;
|
|
|
|
String sql4 = "select "+stringBuilder7+" from "+ InfluxDBPublicParam.DATA_IN_HARM_I+" where "+stringBuilder+stringBuilder8;
|
|
|
|
QueryResult sqlResult4 = influxDbUtils.query(sql4);
|
|
|
|
QueryResult sqlResult4 = influxDbUtils.query(sql4);
|
|
|
|
InfluxDBResultMapper resultMapper4 = new InfluxDBResultMapper();
|
|
|
|
InfluxDBResultMapper resultMapper4 = new InfluxDBResultMapper();
|
|
|
|
List<DataInHarmI> list4 = resultMapper4.toPOJO(sqlResult4, DataInHarmI.class);
|
|
|
|
List<DataInHarmI> list4 = resultMapper4.toPOJO(sqlResult4, DataInHarmI.class);
|
|
|
|
@@ -1915,12 +1913,12 @@ public class DayJob {
|
|
|
|
fields.put("i_48",item.getI48());
|
|
|
|
fields.put("i_48",item.getI48());
|
|
|
|
fields.put("i_49",item.getI49());
|
|
|
|
fields.put("i_49",item.getI49());
|
|
|
|
fields.put("i_50",item.getI50());
|
|
|
|
fields.put("i_50",item.getI50());
|
|
|
|
Point point = influxDbUtils.pointBuilder("day_inharm_i", time, TimeUnit.MILLISECONDS, tags, fields);
|
|
|
|
Point point = influxDbUtils.pointBuilder(InfluxDBPublicParam.DAY_IN_HARM_I, time, TimeUnit.MILLISECONDS, tags, fields);
|
|
|
|
BatchPoints batchPoints = BatchPoints.database(DATABASE).tag("line_id", item.getLineId()).tag("phasic_type",item.getPhaseType()).tag("quality_flag",item.getQualityFlag()).tag("value_type",item.getValueType()).retentionPolicy("").consistency(InfluxDB.ConsistencyLevel.ALL).build();
|
|
|
|
BatchPoints batchPoints = BatchPoints.database(InfluxDBPublicParam.DATABASE).tag(InfluxDBPublicParam.LINE_ID, item.getLineId()).tag(InfluxDBPublicParam.PHASIC_TYPE,item.getPhaseType()).tag(InfluxDBPublicParam.QUALITY_FLAG,item.getQualityFlag()).tag(InfluxDBPublicParam.VALUE_TYPE,item.getValueType()).retentionPolicy("").consistency(InfluxDB.ConsistencyLevel.ALL).build();
|
|
|
|
batchPoints.point(point);
|
|
|
|
batchPoints.point(point);
|
|
|
|
records.add(batchPoints.lineProtocol());
|
|
|
|
records.add(batchPoints.lineProtocol());
|
|
|
|
});
|
|
|
|
});
|
|
|
|
influxDbUtils.batchInsert(DATABASE,"", InfluxDB.ConsistencyLevel.ALL, records);
|
|
|
|
influxDbUtils.batchInsert(InfluxDBPublicParam.DATABASE,"", InfluxDB.ConsistencyLevel.ALL, records);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
/**
|
|
|
|
/**
|
|
|
|
@@ -1944,8 +1942,8 @@ public class DayJob {
|
|
|
|
stringBuilder1.append("min(v_").append(i).append(") AS v_").append(i).append(",");
|
|
|
|
stringBuilder1.append("min(v_").append(i).append(") AS v_").append(i).append(",");
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
stringBuilder2.append(Param.TIME + " >= '").append(startTime).append("' and ").append(Param.TIME).append(" <= '").append(endTime).append("' and value_type = 'MIN' group by line_id,phasic_type,value_type,quality_flag tz('Asia/Shanghai')");
|
|
|
|
stringBuilder2.append(InfluxDBPublicParam.TIME + " >= '").append(startTime).append("' and ").append(InfluxDBPublicParam.TIME).append(" <= '").append(endTime).append("' and value_type = 'MIN' group by line_id,phasic_type,value_type,quality_flag ").append(InfluxDBPublicParam.TIME_ZONE);
|
|
|
|
String sql1 = "select "+stringBuilder1+" from data_inharm_v where "+stringBuilder+stringBuilder2;
|
|
|
|
String sql1 = "select "+stringBuilder1+" from "+ InfluxDBPublicParam.DATA_IN_HARM_V+" where "+stringBuilder+stringBuilder2;
|
|
|
|
QueryResult sqlResult1 = influxDbUtils.query(sql1);
|
|
|
|
QueryResult sqlResult1 = influxDbUtils.query(sql1);
|
|
|
|
InfluxDBResultMapper resultMapper1 = new InfluxDBResultMapper();
|
|
|
|
InfluxDBResultMapper resultMapper1 = new InfluxDBResultMapper();
|
|
|
|
List<DataInHarmV> list1 = resultMapper1.toPOJO(sqlResult1, DataInHarmV.class);
|
|
|
|
List<DataInHarmV> list1 = resultMapper1.toPOJO(sqlResult1, DataInHarmV.class);
|
|
|
|
@@ -1959,8 +1957,8 @@ public class DayJob {
|
|
|
|
stringBuilder3.append("max(v_").append(i).append(") AS v_").append(i).append(",");
|
|
|
|
stringBuilder3.append("max(v_").append(i).append(") AS v_").append(i).append(",");
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
stringBuilder4.append(Param.TIME + " >= '").append(startTime).append("' and ").append(Param.TIME).append(" <= '").append(endTime).append("' and value_type = 'MAX' group by line_id,phasic_type,value_type,quality_flag tz('Asia/Shanghai')");
|
|
|
|
stringBuilder4.append(InfluxDBPublicParam.TIME + " >= '").append(startTime).append("' and ").append(InfluxDBPublicParam.TIME).append(" <= '").append(endTime).append("' and value_type = 'MAX' group by line_id,phasic_type,value_type,quality_flag ").append(InfluxDBPublicParam.TIME_ZONE);
|
|
|
|
String sql2 = "select "+stringBuilder3+" from data_inharm_v where "+stringBuilder+stringBuilder4;
|
|
|
|
String sql2 = "select "+stringBuilder3+" from "+ InfluxDBPublicParam.DATA_IN_HARM_V+" where "+stringBuilder+stringBuilder4;
|
|
|
|
QueryResult sqlResult2 = influxDbUtils.query(sql2);
|
|
|
|
QueryResult sqlResult2 = influxDbUtils.query(sql2);
|
|
|
|
InfluxDBResultMapper resultMapper2 = new InfluxDBResultMapper();
|
|
|
|
InfluxDBResultMapper resultMapper2 = new InfluxDBResultMapper();
|
|
|
|
List<DataInHarmV> list2 = resultMapper2.toPOJO(sqlResult2, DataInHarmV.class);
|
|
|
|
List<DataInHarmV> list2 = resultMapper2.toPOJO(sqlResult2, DataInHarmV.class);
|
|
|
|
@@ -1974,8 +1972,8 @@ public class DayJob {
|
|
|
|
stringBuilder5.append("mean(v_").append(i).append(") AS v_").append(i).append(",");
|
|
|
|
stringBuilder5.append("mean(v_").append(i).append(") AS v_").append(i).append(",");
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
stringBuilder6.append(Param.TIME + " >= '").append(startTime).append("' and ").append(Param.TIME).append(" <= '").append(endTime).append("' and value_type = 'AVG' group by line_id,phasic_type,value_type,quality_flag tz('Asia/Shanghai')");
|
|
|
|
stringBuilder6.append(InfluxDBPublicParam.TIME + " >= '").append(startTime).append("' and ").append(InfluxDBPublicParam.TIME).append(" <= '").append(endTime).append("' and value_type = 'AVG' group by line_id,phasic_type,value_type,quality_flag ").append(InfluxDBPublicParam.TIME_ZONE);
|
|
|
|
String sql3 = "select "+stringBuilder5+" from data_inharm_v where "+stringBuilder+stringBuilder6;
|
|
|
|
String sql3 = "select "+stringBuilder5+" from "+ InfluxDBPublicParam.DATA_IN_HARM_V+" where "+stringBuilder+stringBuilder6;
|
|
|
|
QueryResult sqlResult3 = influxDbUtils.query(sql3);
|
|
|
|
QueryResult sqlResult3 = influxDbUtils.query(sql3);
|
|
|
|
InfluxDBResultMapper resultMapper3 = new InfluxDBResultMapper();
|
|
|
|
InfluxDBResultMapper resultMapper3 = new InfluxDBResultMapper();
|
|
|
|
List<DataInHarmV> list3 = resultMapper3.toPOJO(sqlResult3, DataInHarmV.class);
|
|
|
|
List<DataInHarmV> list3 = resultMapper3.toPOJO(sqlResult3, DataInHarmV.class);
|
|
|
|
@@ -1989,8 +1987,8 @@ public class DayJob {
|
|
|
|
stringBuilder7.append("percentile(v_").append(i).append(",95) AS v_").append(i).append(",");
|
|
|
|
stringBuilder7.append("percentile(v_").append(i).append(",95) AS v_").append(i).append(",");
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
stringBuilder8.append(Param.TIME + " >= '").append(startTime).append("' and ").append(Param.TIME).append(" <= '").append(endTime).append("' and value_type = 'AVG' group by line_id,phasic_type,value_type,quality_flag tz('Asia/Shanghai')");
|
|
|
|
stringBuilder8.append(InfluxDBPublicParam.TIME + " >= '").append(startTime).append("' and ").append(InfluxDBPublicParam.TIME).append(" <= '").append(endTime).append("' and value_type = 'AVG' group by line_id,phasic_type,value_type,quality_flag ").append(InfluxDBPublicParam.TIME_ZONE);
|
|
|
|
String sql4 = "select "+stringBuilder7+" from data_inharm_v where "+stringBuilder+stringBuilder8;
|
|
|
|
String sql4 = "select "+stringBuilder7+" from "+ InfluxDBPublicParam.DATA_IN_HARM_V+" where "+stringBuilder+stringBuilder8;
|
|
|
|
QueryResult sqlResult4 = influxDbUtils.query(sql4);
|
|
|
|
QueryResult sqlResult4 = influxDbUtils.query(sql4);
|
|
|
|
InfluxDBResultMapper resultMapper4 = new InfluxDBResultMapper();
|
|
|
|
InfluxDBResultMapper resultMapper4 = new InfluxDBResultMapper();
|
|
|
|
List<DataInHarmV> list4 = resultMapper4.toPOJO(sqlResult4, DataInHarmV.class);
|
|
|
|
List<DataInHarmV> list4 = resultMapper4.toPOJO(sqlResult4, DataInHarmV.class);
|
|
|
|
@@ -2070,12 +2068,12 @@ public class DayJob {
|
|
|
|
fields.put("v_48",item.getV48());
|
|
|
|
fields.put("v_48",item.getV48());
|
|
|
|
fields.put("v_49",item.getV49());
|
|
|
|
fields.put("v_49",item.getV49());
|
|
|
|
fields.put("v_50",item.getV50());
|
|
|
|
fields.put("v_50",item.getV50());
|
|
|
|
Point point = influxDbUtils.pointBuilder("day_inharm_v", time, TimeUnit.MILLISECONDS, tags, fields);
|
|
|
|
Point point = influxDbUtils.pointBuilder(InfluxDBPublicParam.DAY_IN_HARM_V, time, TimeUnit.MILLISECONDS, tags, fields);
|
|
|
|
BatchPoints batchPoints = BatchPoints.database(DATABASE).tag("line_id", item.getLineId()).tag("phasic_type",item.getPhaseType()).tag("quality_flag",item.getQualityFlag()).tag("value_type",item.getValueType()).retentionPolicy("").consistency(InfluxDB.ConsistencyLevel.ALL).build();
|
|
|
|
BatchPoints batchPoints = BatchPoints.database(InfluxDBPublicParam.DATABASE).tag(InfluxDBPublicParam.LINE_ID, item.getLineId()).tag(InfluxDBPublicParam.PHASIC_TYPE,item.getPhaseType()).tag(InfluxDBPublicParam.QUALITY_FLAG,item.getQualityFlag()).tag(InfluxDBPublicParam.VALUE_TYPE,item.getValueType()).retentionPolicy("").consistency(InfluxDB.ConsistencyLevel.ALL).build();
|
|
|
|
batchPoints.point(point);
|
|
|
|
batchPoints.point(point);
|
|
|
|
records.add(batchPoints.lineProtocol());
|
|
|
|
records.add(batchPoints.lineProtocol());
|
|
|
|
});
|
|
|
|
});
|
|
|
|
influxDbUtils.batchInsert(DATABASE,"", InfluxDB.ConsistencyLevel.ALL, records);
|
|
|
|
influxDbUtils.batchInsert(InfluxDBPublicParam.DATABASE,"", InfluxDB.ConsistencyLevel.ALL, records);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
@@ -2100,8 +2098,8 @@ public class DayJob {
|
|
|
|
stringBuilder1.append("min(i_").append(i).append(") AS i_").append(i).append(",");
|
|
|
|
stringBuilder1.append("min(i_").append(i).append(") AS i_").append(i).append(",");
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
stringBuilder2.append(Param.TIME + " >= '").append(startTime).append("' and ").append(Param.TIME).append(" <= '").append(endTime).append("' and value_type = 'MIN' group by line_id,phasic_type,value_type,quality_flag tz('Asia/Shanghai')");
|
|
|
|
stringBuilder2.append(InfluxDBPublicParam.TIME + " >= '").append(startTime).append("' and ").append(InfluxDBPublicParam.TIME).append(" <= '").append(endTime).append("' and value_type = 'MIN' group by line_id,phasic_type,value_type,quality_flag ").append(InfluxDBPublicParam.TIME_ZONE);
|
|
|
|
String sql1 = "select "+stringBuilder1+" from data_inharmrate_i where "+stringBuilder+stringBuilder2;
|
|
|
|
String sql1 = "select "+stringBuilder1+" from "+ InfluxDBPublicParam.DATA_IN_HARM_I+" where "+stringBuilder+stringBuilder2;
|
|
|
|
QueryResult sqlResult1 = influxDbUtils.query(sql1);
|
|
|
|
QueryResult sqlResult1 = influxDbUtils.query(sql1);
|
|
|
|
InfluxDBResultMapper resultMapper1 = new InfluxDBResultMapper();
|
|
|
|
InfluxDBResultMapper resultMapper1 = new InfluxDBResultMapper();
|
|
|
|
List<DataInHarmRateI> list1 = resultMapper1.toPOJO(sqlResult1, DataInHarmRateI.class);
|
|
|
|
List<DataInHarmRateI> list1 = resultMapper1.toPOJO(sqlResult1, DataInHarmRateI.class);
|
|
|
|
@@ -2115,8 +2113,8 @@ public class DayJob {
|
|
|
|
stringBuilder3.append("max(i_").append(i).append(") AS i_").append(i).append(",");
|
|
|
|
stringBuilder3.append("max(i_").append(i).append(") AS i_").append(i).append(",");
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
stringBuilder4.append(Param.TIME + " >= '").append(startTime).append("' and ").append(Param.TIME).append(" <= '").append(endTime).append("' and value_type = 'MAX' group by line_id,phasic_type,value_type,quality_flag tz('Asia/Shanghai')");
|
|
|
|
stringBuilder4.append(InfluxDBPublicParam.TIME + " >= '").append(startTime).append("' and ").append(InfluxDBPublicParam.TIME).append(" <= '").append(endTime).append("' and value_type = 'MAX' group by line_id,phasic_type,value_type,quality_flag ").append(InfluxDBPublicParam.TIME_ZONE);
|
|
|
|
String sql2 = "select "+stringBuilder3+" from data_inharmrate_i where "+stringBuilder+stringBuilder4;
|
|
|
|
String sql2 = "select "+stringBuilder3+" from "+ InfluxDBPublicParam.DATA_IN_HARM_I+" where "+stringBuilder+stringBuilder4;
|
|
|
|
QueryResult sqlResult2 = influxDbUtils.query(sql2);
|
|
|
|
QueryResult sqlResult2 = influxDbUtils.query(sql2);
|
|
|
|
InfluxDBResultMapper resultMapper2 = new InfluxDBResultMapper();
|
|
|
|
InfluxDBResultMapper resultMapper2 = new InfluxDBResultMapper();
|
|
|
|
List<DataInHarmRateI> list2 = resultMapper2.toPOJO(sqlResult2, DataInHarmRateI.class);
|
|
|
|
List<DataInHarmRateI> list2 = resultMapper2.toPOJO(sqlResult2, DataInHarmRateI.class);
|
|
|
|
@@ -2130,8 +2128,8 @@ public class DayJob {
|
|
|
|
stringBuilder5.append("mean(i_").append(i).append(") AS i_").append(i).append(",");
|
|
|
|
stringBuilder5.append("mean(i_").append(i).append(") AS i_").append(i).append(",");
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
stringBuilder6.append(Param.TIME + " >= '").append(startTime).append("' and ").append(Param.TIME).append(" <= '").append(endTime).append("' and value_type = 'AVG' group by line_id,phasic_type,value_type,quality_flag tz('Asia/Shanghai')");
|
|
|
|
stringBuilder6.append(InfluxDBPublicParam.TIME + " >= '").append(startTime).append("' and ").append(InfluxDBPublicParam.TIME).append(" <= '").append(endTime).append("' and value_type = 'AVG' group by line_id,phasic_type,value_type,quality_flag ").append(InfluxDBPublicParam.TIME_ZONE);
|
|
|
|
String sql3 = "select "+stringBuilder5+" from data_inharmrate_i where "+stringBuilder+stringBuilder6;
|
|
|
|
String sql3 = "select "+stringBuilder5+" from "+ InfluxDBPublicParam.DATA_IN_HARM_I+" where "+stringBuilder+stringBuilder6;
|
|
|
|
QueryResult sqlResult3 = influxDbUtils.query(sql3);
|
|
|
|
QueryResult sqlResult3 = influxDbUtils.query(sql3);
|
|
|
|
InfluxDBResultMapper resultMapper3 = new InfluxDBResultMapper();
|
|
|
|
InfluxDBResultMapper resultMapper3 = new InfluxDBResultMapper();
|
|
|
|
List<DataInHarmRateI> list3 = resultMapper3.toPOJO(sqlResult3, DataInHarmRateI.class);
|
|
|
|
List<DataInHarmRateI> list3 = resultMapper3.toPOJO(sqlResult3, DataInHarmRateI.class);
|
|
|
|
@@ -2145,8 +2143,8 @@ public class DayJob {
|
|
|
|
stringBuilder7.append("percentile(i_").append(i).append(",95) AS i_").append(i).append(",");
|
|
|
|
stringBuilder7.append("percentile(i_").append(i).append(",95) AS i_").append(i).append(",");
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
stringBuilder8.append(Param.TIME + " >= '").append(startTime).append("' and ").append(Param.TIME).append(" <= '").append(endTime).append("' and value_type = 'AVG' group by line_id,phasic_type,value_type,quality_flag tz('Asia/Shanghai')");
|
|
|
|
stringBuilder8.append(InfluxDBPublicParam.TIME + " >= '").append(startTime).append("' and ").append(InfluxDBPublicParam.TIME).append(" <= '").append(endTime).append("' and value_type = 'AVG' group by line_id,phasic_type,value_type,quality_flag ").append(InfluxDBPublicParam.TIME_ZONE);
|
|
|
|
String sql4 = "select "+stringBuilder7+" from data_inharmrate_i where "+stringBuilder+stringBuilder8;
|
|
|
|
String sql4 = "select "+stringBuilder7+" from "+ InfluxDBPublicParam.DATA_IN_HARM_I+" where "+stringBuilder+stringBuilder8;
|
|
|
|
QueryResult sqlResult4 = influxDbUtils.query(sql4);
|
|
|
|
QueryResult sqlResult4 = influxDbUtils.query(sql4);
|
|
|
|
InfluxDBResultMapper resultMapper4 = new InfluxDBResultMapper();
|
|
|
|
InfluxDBResultMapper resultMapper4 = new InfluxDBResultMapper();
|
|
|
|
List<DataInHarmRateI> list4 = resultMapper4.toPOJO(sqlResult4, DataInHarmRateI.class);
|
|
|
|
List<DataInHarmRateI> list4 = resultMapper4.toPOJO(sqlResult4, DataInHarmRateI.class);
|
|
|
|
@@ -2226,12 +2224,12 @@ public class DayJob {
|
|
|
|
fields.put("i_48",item.getI48());
|
|
|
|
fields.put("i_48",item.getI48());
|
|
|
|
fields.put("i_49",item.getI49());
|
|
|
|
fields.put("i_49",item.getI49());
|
|
|
|
fields.put("i_50",item.getI50());
|
|
|
|
fields.put("i_50",item.getI50());
|
|
|
|
Point point = influxDbUtils.pointBuilder("day_inharmrate_i", time, TimeUnit.MILLISECONDS, tags, fields);
|
|
|
|
Point point = influxDbUtils.pointBuilder(InfluxDBPublicParam.DAY_IN_HARM_I, time, TimeUnit.MILLISECONDS, tags, fields);
|
|
|
|
BatchPoints batchPoints = BatchPoints.database(DATABASE).tag("line_id", item.getLineId()).tag("phasic_type",item.getPhaseType()).tag("quality_flag",item.getQualityFlag()).tag("value_type",item.getValueType()).retentionPolicy("").consistency(InfluxDB.ConsistencyLevel.ALL).build();
|
|
|
|
BatchPoints batchPoints = BatchPoints.database(InfluxDBPublicParam.DATABASE).tag(InfluxDBPublicParam.LINE_ID, item.getLineId()).tag(InfluxDBPublicParam.PHASIC_TYPE,item.getPhaseType()).tag(InfluxDBPublicParam.QUALITY_FLAG,item.getQualityFlag()).tag(InfluxDBPublicParam.VALUE_TYPE,item.getValueType()).retentionPolicy("").consistency(InfluxDB.ConsistencyLevel.ALL).build();
|
|
|
|
batchPoints.point(point);
|
|
|
|
batchPoints.point(point);
|
|
|
|
records.add(batchPoints.lineProtocol());
|
|
|
|
records.add(batchPoints.lineProtocol());
|
|
|
|
});
|
|
|
|
});
|
|
|
|
influxDbUtils.batchInsert(DATABASE,"", InfluxDB.ConsistencyLevel.ALL, records);
|
|
|
|
influxDbUtils.batchInsert(InfluxDBPublicParam.DATABASE,"", InfluxDB.ConsistencyLevel.ALL, records);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
/**
|
|
|
|
/**
|
|
|
|
@@ -2255,8 +2253,8 @@ public class DayJob {
|
|
|
|
stringBuilder1.append("min(v_").append(i).append(") AS v_").append(i).append(",");
|
|
|
|
stringBuilder1.append("min(v_").append(i).append(") AS v_").append(i).append(",");
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
stringBuilder2.append(Param.TIME + " >= '").append(startTime).append("' and ").append(Param.TIME).append(" <= '").append(endTime).append("' and value_type = 'MIN' group by line_id,phasic_type,value_type,quality_flag tz('Asia/Shanghai')");
|
|
|
|
stringBuilder2.append(InfluxDBPublicParam.TIME + " >= '").append(startTime).append("' and ").append(InfluxDBPublicParam.TIME).append(" <= '").append(endTime).append("' and value_type = 'MIN' group by line_id,phasic_type,value_type,quality_flag ").append(InfluxDBPublicParam.TIME_ZONE);
|
|
|
|
String sql1 = "select "+stringBuilder1+" from data_inharmrate_v where "+stringBuilder+stringBuilder2;
|
|
|
|
String sql1 = "select "+stringBuilder1+" from "+ InfluxDBPublicParam.DATA_IN_HARM_V+" where "+stringBuilder+stringBuilder2;
|
|
|
|
QueryResult sqlResult1 = influxDbUtils.query(sql1);
|
|
|
|
QueryResult sqlResult1 = influxDbUtils.query(sql1);
|
|
|
|
InfluxDBResultMapper resultMapper1 = new InfluxDBResultMapper();
|
|
|
|
InfluxDBResultMapper resultMapper1 = new InfluxDBResultMapper();
|
|
|
|
List<DataInHarmRateV> list1 = resultMapper1.toPOJO(sqlResult1, DataInHarmRateV.class);
|
|
|
|
List<DataInHarmRateV> list1 = resultMapper1.toPOJO(sqlResult1, DataInHarmRateV.class);
|
|
|
|
@@ -2270,8 +2268,8 @@ public class DayJob {
|
|
|
|
stringBuilder3.append("max(v_").append(i).append(") AS v_").append(i).append(",");
|
|
|
|
stringBuilder3.append("max(v_").append(i).append(") AS v_").append(i).append(",");
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
stringBuilder4.append(Param.TIME + " >= '").append(startTime).append("' and ").append(Param.TIME).append(" <= '").append(endTime).append("' and value_type = 'MAX' group by line_id,phasic_type,value_type,quality_flag tz('Asia/Shanghai')");
|
|
|
|
stringBuilder4.append(InfluxDBPublicParam.TIME + " >= '").append(startTime).append("' and ").append(InfluxDBPublicParam.TIME).append(" <= '").append(endTime).append("' and value_type = 'MAX' group by line_id,phasic_type,value_type,quality_flag ").append(InfluxDBPublicParam.TIME_ZONE);
|
|
|
|
String sql2 = "select "+stringBuilder3+" from data_inharmrate_v where "+stringBuilder+stringBuilder4;
|
|
|
|
String sql2 = "select "+stringBuilder3+" from "+ InfluxDBPublicParam.DATA_IN_HARM_V+" where "+stringBuilder+stringBuilder4;
|
|
|
|
QueryResult sqlResult2 = influxDbUtils.query(sql2);
|
|
|
|
QueryResult sqlResult2 = influxDbUtils.query(sql2);
|
|
|
|
InfluxDBResultMapper resultMapper2 = new InfluxDBResultMapper();
|
|
|
|
InfluxDBResultMapper resultMapper2 = new InfluxDBResultMapper();
|
|
|
|
List<DataInHarmRateV> list2 = resultMapper2.toPOJO(sqlResult2, DataInHarmRateV.class);
|
|
|
|
List<DataInHarmRateV> list2 = resultMapper2.toPOJO(sqlResult2, DataInHarmRateV.class);
|
|
|
|
@@ -2285,8 +2283,8 @@ public class DayJob {
|
|
|
|
stringBuilder5.append("mean(v_").append(i).append(") AS v_").append(i).append(",");
|
|
|
|
stringBuilder5.append("mean(v_").append(i).append(") AS v_").append(i).append(",");
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
stringBuilder6.append(Param.TIME + " >= '").append(startTime).append("' and ").append(Param.TIME).append(" <= '").append(endTime).append("' and value_type = 'AVG' group by line_id,phasic_type,value_type,quality_flag tz('Asia/Shanghai')");
|
|
|
|
stringBuilder6.append(InfluxDBPublicParam.TIME + " >= '").append(startTime).append("' and ").append(InfluxDBPublicParam.TIME).append(" <= '").append(endTime).append("' and value_type = 'AVG' group by line_id,phasic_type,value_type,quality_flag ").append(InfluxDBPublicParam.TIME_ZONE);
|
|
|
|
String sql3 = "select "+stringBuilder5+" from data_inharmrate_v where "+stringBuilder+stringBuilder6;
|
|
|
|
String sql3 = "select "+stringBuilder5+" from "+ InfluxDBPublicParam.DATA_IN_HARM_V+" where "+stringBuilder+stringBuilder6;
|
|
|
|
QueryResult sqlResult3 = influxDbUtils.query(sql3);
|
|
|
|
QueryResult sqlResult3 = influxDbUtils.query(sql3);
|
|
|
|
InfluxDBResultMapper resultMapper3 = new InfluxDBResultMapper();
|
|
|
|
InfluxDBResultMapper resultMapper3 = new InfluxDBResultMapper();
|
|
|
|
List<DataInHarmRateV> list3 = resultMapper3.toPOJO(sqlResult3, DataInHarmRateV.class);
|
|
|
|
List<DataInHarmRateV> list3 = resultMapper3.toPOJO(sqlResult3, DataInHarmRateV.class);
|
|
|
|
@@ -2300,8 +2298,8 @@ public class DayJob {
|
|
|
|
stringBuilder7.append("percentile(v_").append(i).append(",95) AS v_").append(i).append(",");
|
|
|
|
stringBuilder7.append("percentile(v_").append(i).append(",95) AS v_").append(i).append(",");
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
stringBuilder8.append(Param.TIME + " >= '").append(startTime).append("' and ").append(Param.TIME).append(" <= '").append(endTime).append("' and value_type = 'AVG' group by line_id,phasic_type,value_type,quality_flag tz('Asia/Shanghai')");
|
|
|
|
stringBuilder8.append(InfluxDBPublicParam.TIME + " >= '").append(startTime).append("' and ").append(InfluxDBPublicParam.TIME).append(" <= '").append(endTime).append("' and value_type = 'AVG' group by line_id,phasic_type,value_type,quality_flag ").append(InfluxDBPublicParam.TIME_ZONE);
|
|
|
|
String sql4 = "select "+stringBuilder7+" from data_inharmrate_v where "+stringBuilder+stringBuilder8;
|
|
|
|
String sql4 = "select "+stringBuilder7+" from "+ InfluxDBPublicParam.DATA_IN_HARM_V+" where "+stringBuilder+stringBuilder8;
|
|
|
|
QueryResult sqlResult4 = influxDbUtils.query(sql4);
|
|
|
|
QueryResult sqlResult4 = influxDbUtils.query(sql4);
|
|
|
|
InfluxDBResultMapper resultMapper4 = new InfluxDBResultMapper();
|
|
|
|
InfluxDBResultMapper resultMapper4 = new InfluxDBResultMapper();
|
|
|
|
List<DataInHarmRateV> list4 = resultMapper4.toPOJO(sqlResult4, DataInHarmRateV.class);
|
|
|
|
List<DataInHarmRateV> list4 = resultMapper4.toPOJO(sqlResult4, DataInHarmRateV.class);
|
|
|
|
@@ -2381,12 +2379,12 @@ public class DayJob {
|
|
|
|
fields.put("v_48",item.getV48());
|
|
|
|
fields.put("v_48",item.getV48());
|
|
|
|
fields.put("v_49",item.getV49());
|
|
|
|
fields.put("v_49",item.getV49());
|
|
|
|
fields.put("v_50",item.getV50());
|
|
|
|
fields.put("v_50",item.getV50());
|
|
|
|
Point point = influxDbUtils.pointBuilder("day_inharmrate_v", time, TimeUnit.MILLISECONDS, tags, fields);
|
|
|
|
Point point = influxDbUtils.pointBuilder(InfluxDBPublicParam.DATA_IN_HARM_V, time, TimeUnit.MILLISECONDS, tags, fields);
|
|
|
|
BatchPoints batchPoints = BatchPoints.database(DATABASE).tag("line_id", item.getLineId()).tag("phasic_type",item.getPhaseType()).tag("quality_flag",item.getQualityFlag()).tag("value_type",item.getValueType()).retentionPolicy("").consistency(InfluxDB.ConsistencyLevel.ALL).build();
|
|
|
|
BatchPoints batchPoints = BatchPoints.database(InfluxDBPublicParam.DATABASE).tag(InfluxDBPublicParam.LINE_ID, item.getLineId()).tag(InfluxDBPublicParam.PHASIC_TYPE,item.getPhaseType()).tag(InfluxDBPublicParam.QUALITY_FLAG,item.getQualityFlag()).tag(InfluxDBPublicParam.VALUE_TYPE,item.getValueType()).retentionPolicy("").consistency(InfluxDB.ConsistencyLevel.ALL).build();
|
|
|
|
batchPoints.point(point);
|
|
|
|
batchPoints.point(point);
|
|
|
|
records.add(batchPoints.lineProtocol());
|
|
|
|
records.add(batchPoints.lineProtocol());
|
|
|
|
});
|
|
|
|
});
|
|
|
|
influxDbUtils.batchInsert(DATABASE,"", InfluxDB.ConsistencyLevel.ALL, records);
|
|
|
|
influxDbUtils.batchInsert(InfluxDBPublicParam.DATABASE,"", InfluxDB.ConsistencyLevel.ALL, records);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
/**
|
|
|
|
/**
|
|
|
|
@@ -2401,7 +2399,7 @@ public class DayJob {
|
|
|
|
List<DataPlt> result = new ArrayList<>();
|
|
|
|
List<DataPlt> result = new ArrayList<>();
|
|
|
|
StringBuilder stringBuilder = lineStringBuilder(lineList);
|
|
|
|
StringBuilder stringBuilder = lineStringBuilder(lineList);
|
|
|
|
//最小值
|
|
|
|
//最小值
|
|
|
|
String sql1 = "select min(plt) AS plt from data_plt where "+stringBuilder+" time >= '" + startTime + "' and time <= '" + endTime + "' group by line_id,phasic_type,quality_flag tz('Asia/Shanghai')";
|
|
|
|
String sql1 = "select min(plt) AS plt from "+ InfluxDBPublicParam.DATA_PLT+" where "+stringBuilder+" time >= '" + startTime + "' and time <= '" + endTime + "' group by line_id,phasic_type,quality_flag "+ InfluxDBPublicParam.TIME_ZONE;
|
|
|
|
QueryResult sqlResult1 = influxDbUtils.query(sql1);
|
|
|
|
QueryResult sqlResult1 = influxDbUtils.query(sql1);
|
|
|
|
InfluxDBResultMapper resultMapper1 = new InfluxDBResultMapper();
|
|
|
|
InfluxDBResultMapper resultMapper1 = new InfluxDBResultMapper();
|
|
|
|
List<DataPlt> list1 = resultMapper1.toPOJO(sqlResult1, DataPlt.class);
|
|
|
|
List<DataPlt> list1 = resultMapper1.toPOJO(sqlResult1, DataPlt.class);
|
|
|
|
@@ -2409,7 +2407,7 @@ public class DayJob {
|
|
|
|
item.setValueType("MIN");
|
|
|
|
item.setValueType("MIN");
|
|
|
|
});
|
|
|
|
});
|
|
|
|
//最大值
|
|
|
|
//最大值
|
|
|
|
String sql2 = "select max(plt) AS plt from data_plt where "+stringBuilder+" time >= '" + startTime + "' and time <= '" + endTime + "' group by line_id,phasic_type,quality_flag tz('Asia/Shanghai')";
|
|
|
|
String sql2 = "select max(plt) AS plt from "+ InfluxDBPublicParam.DATA_PLT+" where "+stringBuilder+" time >= '" + startTime + "' and time <= '" + endTime + "' group by line_id,phasic_type,quality_flag "+ InfluxDBPublicParam.TIME_ZONE;
|
|
|
|
QueryResult sqlResult2 = influxDbUtils.query(sql2);
|
|
|
|
QueryResult sqlResult2 = influxDbUtils.query(sql2);
|
|
|
|
InfluxDBResultMapper resultMapper2 = new InfluxDBResultMapper();
|
|
|
|
InfluxDBResultMapper resultMapper2 = new InfluxDBResultMapper();
|
|
|
|
List<DataPlt> list2 = resultMapper2.toPOJO(sqlResult2, DataPlt.class);
|
|
|
|
List<DataPlt> list2 = resultMapper2.toPOJO(sqlResult2, DataPlt.class);
|
|
|
|
@@ -2417,7 +2415,7 @@ public class DayJob {
|
|
|
|
item.setValueType("MAX");
|
|
|
|
item.setValueType("MAX");
|
|
|
|
});
|
|
|
|
});
|
|
|
|
//平均值
|
|
|
|
//平均值
|
|
|
|
String sql3 = "select mean(plt) AS plt from data_plt where "+stringBuilder+" time >= '" + startTime + "' and time <= '" + endTime + "' group by line_id,phasic_type,quality_flag tz('Asia/Shanghai')";
|
|
|
|
String sql3 = "select mean(plt) AS plt from "+ InfluxDBPublicParam.DATA_PLT+" where "+stringBuilder+" time >= '" + startTime + "' and time <= '" + endTime + "' group by line_id,phasic_type,quality_flag "+ InfluxDBPublicParam.TIME_ZONE;
|
|
|
|
QueryResult sqlResult3 = influxDbUtils.query(sql3);
|
|
|
|
QueryResult sqlResult3 = influxDbUtils.query(sql3);
|
|
|
|
InfluxDBResultMapper resultMapper3= new InfluxDBResultMapper();
|
|
|
|
InfluxDBResultMapper resultMapper3= new InfluxDBResultMapper();
|
|
|
|
List<DataPlt> list3 = resultMapper3.toPOJO(sqlResult3, DataPlt.class);
|
|
|
|
List<DataPlt> list3 = resultMapper3.toPOJO(sqlResult3, DataPlt.class);
|
|
|
|
@@ -2425,7 +2423,7 @@ public class DayJob {
|
|
|
|
item.setValueType("AVG");
|
|
|
|
item.setValueType("AVG");
|
|
|
|
});
|
|
|
|
});
|
|
|
|
//CP95值
|
|
|
|
//CP95值
|
|
|
|
String sql4 = "select percentile(plt,95) AS plt from data_plt where "+stringBuilder+" time >= '" + startTime + "' and time <= '" + endTime + "' group by line_id,phasic_type,quality_flag tz('Asia/Shanghai')";
|
|
|
|
String sql4 = "select percentile(plt,95) AS plt from "+ InfluxDBPublicParam.DATA_PLT+" where "+stringBuilder+" time >= '" + startTime + "' and time <= '" + endTime + "' group by line_id,phasic_type,quality_flag "+ InfluxDBPublicParam.TIME_ZONE;
|
|
|
|
QueryResult sqlResult4 = influxDbUtils.query(sql4);
|
|
|
|
QueryResult sqlResult4 = influxDbUtils.query(sql4);
|
|
|
|
InfluxDBResultMapper resultMapper4= new InfluxDBResultMapper();
|
|
|
|
InfluxDBResultMapper resultMapper4= new InfluxDBResultMapper();
|
|
|
|
List<DataPlt> list4 = resultMapper4.toPOJO(sqlResult4, DataPlt.class);
|
|
|
|
List<DataPlt> list4 = resultMapper4.toPOJO(sqlResult4, DataPlt.class);
|
|
|
|
@@ -2456,12 +2454,12 @@ public class DayJob {
|
|
|
|
tags.put("quality_flag",item.getQualityFlag());
|
|
|
|
tags.put("quality_flag",item.getQualityFlag());
|
|
|
|
tags.put("value_type",item.getValueType());
|
|
|
|
tags.put("value_type",item.getValueType());
|
|
|
|
fields.put("plt",item.getPlt());
|
|
|
|
fields.put("plt",item.getPlt());
|
|
|
|
Point point = influxDbUtils.pointBuilder("day_plt", time, TimeUnit.MILLISECONDS, tags, fields);
|
|
|
|
Point point = influxDbUtils.pointBuilder(InfluxDBPublicParam.DAY_PLT, time, TimeUnit.MILLISECONDS, tags, fields);
|
|
|
|
BatchPoints batchPoints = BatchPoints.database(DATABASE).tag("line_id", item.getLineId()).tag("phasic_type",item.getPhaseType()).tag("quality_flag",item.getQualityFlag()).tag("value_type",item.getValueType()).retentionPolicy("").consistency(InfluxDB.ConsistencyLevel.ALL).build();
|
|
|
|
BatchPoints batchPoints = BatchPoints.database(InfluxDBPublicParam.DATABASE).tag(InfluxDBPublicParam.LINE_ID, item.getLineId()).tag(InfluxDBPublicParam.PHASIC_TYPE,item.getPhaseType()).tag(InfluxDBPublicParam.QUALITY_FLAG,item.getQualityFlag()).tag(InfluxDBPublicParam.VALUE_TYPE,item.getValueType()).retentionPolicy("").consistency(InfluxDB.ConsistencyLevel.ALL).build();
|
|
|
|
batchPoints.point(point);
|
|
|
|
batchPoints.point(point);
|
|
|
|
records.add(batchPoints.lineProtocol());
|
|
|
|
records.add(batchPoints.lineProtocol());
|
|
|
|
});
|
|
|
|
});
|
|
|
|
influxDbUtils.batchInsert(DATABASE,"", InfluxDB.ConsistencyLevel.ALL, records);
|
|
|
|
influxDbUtils.batchInsert(InfluxDBPublicParam.DATABASE,"", InfluxDB.ConsistencyLevel.ALL, records);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
public StringBuilder lineStringBuilder(List<String> list) {
|
|
|
|
public StringBuilder lineStringBuilder(List<String> list) {
|
|
|
|
|