Files
pqs/pqs-common/common-influxdb/src/test/java/test.java

205 lines
8.9 KiB
Java
Raw Normal View History

2022-06-21 20:47:46 +08:00
import com.njcn.influxdb.utils.InfluxDbUtils;
import org.influxdb.InfluxDB.ConsistencyLevel;
import org.influxdb.dto.BatchPoints;
import org.influxdb.dto.Point;
import org.influxdb.dto.QueryResult;
import java.util.*;
import java.util.concurrent.TimeUnit;
import java.util.stream.Collectors;
/**
* 类的介绍
*
* @author xuyang
* @version 1.0.0
* @createTime 2021/11/16 11:07
*/
public class test {
//查询
public static QueryResult select(InfluxDbUtils influxDBUtil) {
long startTime = System.currentTimeMillis();
QueryResult result = influxDBUtil.query("select * from data_V where phasic_type='A'");
long endTime = System.currentTimeMillis();
System.out.println(endTime - startTime);
return result;
}
//处理结果集
public static void chanelResult(QueryResult result) {
QueryResult.Result result1 = result.getResults().get(0);
if (result1.getSeries() != null) {
List<List<Object>> valueList = result1.getSeries().stream().map(QueryResult.Series::getValues).collect(Collectors.toList()).get(0);
if (valueList != null && valueList.size() > 0) {
for (List<Object> value : valueList) {
Map<String, String> map = new HashMap<String, String>();
// 数据库中字段1取值
String field1 = value.get(0) == null ? null : value.get(0).toString();
System.out.println(field1);
// 数据库中字段2取值
String field2 = value.get(1) == null ? null : value.get(1).toString();
System.out.println(field2);
// TODO 用取出的字段做你自己的业务逻辑……
}
}
}
}
public static void main(String[] args) {
InfluxDbUtils influxDBUtil = new InfluxDbUtils("root", "123456789", "http://192.168.1.16:8086", "pqsbase", "");
insert(influxDBUtil);
}
//单条数据插入
public static void insert(InfluxDbUtils influxDBUtil) {
Map<String, String> tags = new HashMap<>();
tags.put("lineid", "1e3b8531483b2a8cbee6747f1f641cf9");
Map<String, Object> fields = new HashMap<>();
fields.put("phasic_type","T");
fields.put("value_type","MAX");
fields.put("Freq_Dev", 48.6 );
fields.put("Voltage_Dev", 8.3 );
fields.put("UBalance", 7.6 );
fields.put("Flicker", 4.6 );
fields.put("UAberrance", 6.5 );
fields.put("I_Neg", 5.6 );
fields.put("UHarm_2", 6.8 );
fields.put("UHarm_3", 5.5 );
fields.put("UHarm_4", 5.4 );
fields.put("UHarm_5", 7.8 );
fields.put("UHarm_6", 6.2 );
fields.put("UHarm_7", 2.5 );
fields.put("UHarm_8", 8.3 );
fields.put("UHarm_9", 6.2 );
fields.put("UHarm_10", 12.8 );
fields.put("UHarm_11", 2.8 );
fields.put("UHarm_12", 8.4 );
fields.put("UHarm_13", 5.6 );
fields.put("UHarm_14", 5.2 );
fields.put("UHarm_15", 9.5 );
fields.put("UHarm_16", 8.3 );
fields.put("UHarm_17", 7.8 );
fields.put("UHarm_18", 6.2 );
fields.put("UHarm_19", 2.5);
fields.put("UHarm_20", 4.5 );
fields.put("UHarm_21", 4.5 );
fields.put("UHarm_22", 6.5 );
fields.put("UHarm_23", 5.9 );
fields.put("UHarm_24", 9.2 );
fields.put("UHarm_25", 5.8 );
fields.put("IHarm_2", 12.8 );
fields.put("IHarm_3", 5.4 );
fields.put("IHarm_4", 6.2 );
fields.put("IHarm_5", 3.2 );
fields.put("IHarm_6", 5.2 );
fields.put("IHarm_7", 5.2 );
fields.put("IHarm_8", 5.5 );
fields.put("IHarm_9", 4.8 );
fields.put("IHarm_10", 8.2 );
fields.put("IHarm_11", 2.5 );
fields.put("IHarm_12", 8.6 );
fields.put("IHarm_13", 5.8 );
fields.put("IHarm_14", 3.5 );
fields.put("IHarm_15", 2.4 );
fields.put("IHarm_16", 5.2 );
fields.put("IHarm_17", 2.5 );
fields.put("IHarm_18", 9.2 );
fields.put("IHarm_19", 8.5);
fields.put("IHarm_20", 8.5 );
fields.put("IHarm_21", 6.2 );
fields.put("IHarm_22", 5.2 );
fields.put("IHarm_23", 8.5 );
fields.put("IHarm_24", 5.2 );
fields.put("IHarm_25", 8.4 );
fields.put("InUHARM_1", 8.2 );
fields.put("InUHARM_2", 5.2 );
fields.put("InUHARM_3", 6.2 );
fields.put("InUHARM_4", 4.2 );
fields.put("InUHARM_5", 2.3 );
fields.put("InUHARM_6", 6.2 );
fields.put("InUHARM_7", 5.2 );
fields.put("InUHARM_8", 10.2 );
fields.put("InUHARM_9", 2.3 );
fields.put("InUHARM_10", 4.2 );
fields.put("InUHARM_11", 3.5 );
fields.put("InUHARM_12", 3.6 );
fields.put("InUHARM_13", 2.3 );
fields.put("InUHARM_14", 7.2 );
fields.put("InUHARM_15", 5.6 );
fields.put("InUHARM_16", 5.6 );
influxDBUtil.insert("PQS_AbnormalData", tags, fields, System.currentTimeMillis(), TimeUnit.MILLISECONDS);
}
//循环写入数据库
public static void batchInsertOne(InfluxDbUtils influxDBUtil) {
Map<String, String> tags1 = new HashMap<>();
tags1.put("LineID", "8");
tags1.put("Phasic_Type", "A");
Map<String, Object> fields1 = new HashMap<>();
fields1.put("RMS", 2);
fields1.put("RMS_AB", 2);
fields1.put("RMS_BC", 2);
fields1.put("RMS_CA", 2);
Map<String, String> tags2 = new HashMap<>();
tags2.put("LineID", "9");
tags2.put("Phasic_Type", "A");
Map<String, Object> fields2 = new HashMap<>();
fields2.put("RMS", 4);
fields2.put("RMS_AB", 4);
fields2.put("RMS_BC", 4);
fields2.put("RMS_CA", 4);
// 一条记录值
Point point1 = influxDBUtil.pointBuilder("test", System.currentTimeMillis(), TimeUnit.MILLISECONDS, tags1, fields1);
Point point2 = influxDBUtil.pointBuilder("test", System.currentTimeMillis(), TimeUnit.MILLISECONDS, tags2, fields2);
// 将两条记录添加到batchPoints中
BatchPoints batchPoints1 = BatchPoints.database("test").tag("LineID", "8").tag("Phasic_Type", "A").retentionPolicy("")
.consistency(ConsistencyLevel.ALL).build();
BatchPoints batchPoints2 = BatchPoints.database("test").tag("LineID", "9").tag("Phasic_Type", "A").retentionPolicy("")
.consistency(ConsistencyLevel.ALL).build();
batchPoints1.point(point1);
batchPoints2.point(point2);
// 将两条数据批量插入到数据库中
influxDBUtil.batchInsert(batchPoints1, TimeUnit.MILLISECONDS);
influxDBUtil.batchInsert(batchPoints2, TimeUnit.MILLISECONDS);
}
//批量插入数据
public static void batchInsert(InfluxDbUtils influxDBUtil) {
Map<String, String> tags1 = new HashMap<>();
tags1.put("LineID", "4");
tags1.put("Phasic_Type", "A");
Map<String, Object> fields1 = new HashMap<>();
fields1.put("RMS", 4.1111);
fields1.put("RMS_AB", 4.1111);
fields1.put("RMS_BC", 4.1111);
fields1.put("RMS_CA", 4.1111);
Map<String, String> tags2 = new HashMap<>();
tags2.put("LineID", "5");
tags2.put("Phasic_Type", "A");
Map<String, Object> fields2 = new HashMap<>();
fields2.put("RMS", 5.1111);
fields2.put("RMS_AB", 5.1111);
fields2.put("RMS_BC", 5.1111);
fields2.put("RMS_CA", 5.1111);
// 一条记录值。注意生产环境不要用System.currentTimeMillis(),因为数据量大会产生重复时间戳,导致数据丢失,要用数据自己的时间戳,这里只做演示)
Point point1 = influxDBUtil.pointBuilder("Data_v", System.currentTimeMillis(), TimeUnit.MILLISECONDS, tags1, fields1);
Point point2 = influxDBUtil.pointBuilder("Data_v", System.currentTimeMillis(), TimeUnit.MILLISECONDS, tags2, fields2);
// BatchPoints batchPoints1 = BatchPoints.database("Data_v").tag("LineID", "4").tag("Phasic_Type","A").retentionPolicy("").consistency(ConsistencyLevel.ALL).precision(TimeUnit.MILLISECONDS).build();
BatchPoints batchPoints1 = BatchPoints.database("test").tag("LineID", "4").tag("Phasic_Type", "A").retentionPolicy("").consistency(ConsistencyLevel.ALL).build();
batchPoints1.point(point1);
BatchPoints batchPoints2 = BatchPoints.database("test").tag("LineID", "5").tag("Phasic_Type", "A").retentionPolicy("").consistency(ConsistencyLevel.ALL).build();
// 将两条记录添加到batchPoints中
batchPoints2.point(point2);
// 将不同的batchPoints序列化后一次性写入数据库提高写入速度
List<String> records = new ArrayList<String>();
records.add(batchPoints1.lineProtocol());
records.add(batchPoints2.lineProtocol());
// 将两条数据批量插入到数据库中
influxDBUtil.batchInsert("test", "", ConsistencyLevel.ALL, TimeUnit.MILLISECONDS, records);
}
}