提交influx造数据的代码

This commit is contained in:
2024-02-02 15:08:07 +08:00
parent c44194c93e
commit f4e731d142
22 changed files with 1478 additions and 15 deletions

View File

@@ -135,7 +135,6 @@ public class OracleToInfluxDBServiceImpl implements OracleToInfluxDBService {
e.printStackTrace(); e.printStackTrace();
} }
} }
//采用弱引用接受后续手动调用gc后会清空该对象 //采用弱引用接受后续手动调用gc后会清空该对象

View File

@@ -0,0 +1,12 @@
FROM eclipse-temurin:8-jdk-centos7
MAINTAINER hongawen_13914774158@163.com
ENV JAVA_OPTS="-Xms2048m -Xmx8192m"
# 挂载时区的目录
VOLUME /usr/share/zoneinfo
# 设置时区为上海
ENV TZ=Asia/Shanghai
# 设置时区信息
RUN ln -snf /usr/share/zoneinfo/$TZ /etc/localtime && echo $TZ > /etc/timezone
ADD target/oracle_influx.jar oracle_influx.jar
ENTRYPOINT java ${JAVA_OPTS} -Djava.security.egd=file:/dev/./urandom -jar /oracle_influx.jar
EXPOSE 8090

View File

@@ -15,6 +15,15 @@
<maven.compiler.source>8</maven.compiler.source> <maven.compiler.source>8</maven.compiler.source>
<maven.compiler.target>8</maven.compiler.target> <maven.compiler.target>8</maven.compiler.target>
<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding> <project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
<!--docker仓库地址-->
<docker.server.url>192.168.1.24</docker.server.url>
<!--docker远程守护进程的url-->
<docker.url>http://${docker.server.url}:2375</docker.url>
<!--docker远程harborurl-->
<docker.repostory>${docker.server.url}:8090</docker.repostory>
<!--docker远程项目仓库名-->
<docker.registry.name>njcn</docker.registry.name>
<docker.operate>install</docker.operate>
</properties> </properties>
<dependencies> <dependencies>
@@ -29,6 +38,80 @@
<groupId>org.springframework.boot</groupId> <groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-web</artifactId> <artifactId>spring-boot-starter-web</artifactId>
</dependency> </dependency>
</dependencies> </dependencies>
<build>
<finalName>oracle_influx</finalName>
<plugins>
<plugin>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-maven-plugin</artifactId>
<executions>
<execution>
<phase>package</phase>
<goals>
<goal>repackage</goal>
</goals>
</execution>
</executions>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-compiler-plugin</artifactId>
<version>3.8.1</version>
<configuration>
<source>1.8</source>
<target>1.8</target>
<skip>true</skip>
</configuration>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-source-plugin</artifactId>
<version>2.2.1</version>
<executions>
<execution>
<id>attach-sources</id>
<goals>
<goal>jar-no-fork</goal>
</goals>
</execution>
</executions>
</plugin>
<plugin>
<groupId>com.spotify</groupId>
<artifactId>docker-maven-plugin</artifactId>
<version>1.0.0</version>
<executions>
<execution>
<id>build-image</id>
<phase>${docker.operate}</phase>
<goals>
<goal>build</goal>
</goals>
</execution>
</executions>
<configuration>
<!--<serverId>36dockerHarbor</serverId>-->
<registryUrl>http://${docker.repostory}</registryUrl>
<!-- 镜像名称 -->
<imageName>${docker.repostory}/${docker.registry.name}/${project.artifactId}</imageName>
<imageTags>
<imageTag>latest</imageTag>
</imageTags>
<dockerHost>${docker.url}</dockerHost>
<dockerDirectory>${basedir}/</dockerDirectory>
<resources>
<resource>
<targetPath>/ROOT</targetPath>
<directory>${project.build.directory}</directory>
<include>${project.build.finalName}.jar</include>
</resource>
</resources>
</configuration>
</plugin>
</plugins>
</build>
</project> </project>

View File

@@ -7,10 +7,14 @@ server:
spring: spring:
#influxDB内容配置 #influxDB内容配置
influx: influx:
url: http://192.168.1.81:18086 url: http://25.36.232.36:8086
user: admin user: admin
password: 123456 password: admin
database: pqsbase database: pqsbase_hbcs
# url: http://192.168.1.81:18086
# user: admin
# password: 123456
# database: pqsbase
mapper-location: com.njcn.influx.imapper mapper-location: com.njcn.influx.imapper
application: application:
name: oracle-influx name: oracle-influx
@@ -67,14 +71,20 @@ spring:
strict: false strict: false
datasource: datasource:
master: master:
url: jdbc:oracle:thin:@192.168.1.51:1521:pqsbase url: jdbc:oracle:thin:@10.122.32.73:11521/dwxb
username: pqsadmin_hn username: pqsadmin
password: pqsadmin password: pqsadmin_123
# url: jdbc:oracle:thin:@192.168.1.51:1521:pqsbase
# username: pqsadmin_hn
# password: pqsadmin
driver-class-name: oracle.jdbc.driver.OracleDriver driver-class-name: oracle.jdbc.driver.OracleDriver
target: target:
url: jdbc:mysql://192.168.1.24:13306/pqsinfo_pq?useUnicode=true&characterEncoding=utf-8&useSSL=true&serverTimezone=CTT url: jdbc:mysql://25.36.232.37:13306/pmsinfo?useUnicode=true&characterEncoding=utf-8&useSSL=true&serverTimezone=CTT
username: root username: root
password: njcnpqs password: Huawei12#
# url: jdbc:mysql://192.168.1.24:13306/pqsinfo_pq?useUnicode=true&characterEncoding=utf-8&useSSL=true&serverTimezone=CTT
# username: root
# password: njcnpqs
driver-class-name: com.mysql.cj.jdbc.Driver driver-class-name: com.mysql.cj.jdbc.Driver
#mybatis配置信息 #mybatis配置信息
mybatis-plus: mybatis-plus:

View File

@@ -0,0 +1,209 @@
<?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.0.0"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<parent>
<groupId>com.njcn</groupId>
<artifactId>data-migration</artifactId>
<version>1.0.0</version>
</parent>
<artifactId>manufact_influx_data</artifactId>
<description>数据制造--influxdb</description>
<properties>
<maven.compiler.source>8</maven.compiler.source>
<maven.compiler.target>8</maven.compiler.target>
<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
</properties>
<dependencies>
<!--java简化开发工具-->
<dependency>
<groupId>org.projectlombok</groupId>
<artifactId>lombok</artifactId>
</dependency>
<!--java工具包-->
<dependency>
<groupId>cn.hutool</groupId>
<artifactId>hutool-all</artifactId>
</dependency>
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-json</artifactId>
<exclusions>
<exclusion>
<groupId>org.apache.logging.log4j</groupId>
<artifactId>log4j-to-slf4j</artifactId>
</exclusion>
</exclusions>
</dependency>
<!-- Actuator监控当前应用的健康虚拟机等信息通过前端以可视化的界面展示出来 -->
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-actuator</artifactId>
</dependency>
<!--避免idea后端配置类报红-->
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-configuration-processor</artifactId>
<optional>true</optional>
</dependency>
<!--测试依赖-->
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-test</artifactId>
</dependency>
<!-- commons通用包 -->
<dependency>
<groupId>org.apache.commons</groupId>
<artifactId>commons-lang3</artifactId>
</dependency>
<dependency>
<groupId>commons-io</groupId>
<artifactId>commons-io</artifactId>
</dependency>
<dependency>
<groupId>commons-codec</groupId>
<artifactId>commons-codec</artifactId>
</dependency>
<!--API接口文档-->
<dependency>
<groupId>com.github.xiaoymin</groupId>
<artifactId>knife4j-spring-boot-starter</artifactId>
</dependency>
<dependency>
<groupId>com.github.xiaoymin</groupId>
<artifactId>knife4j-spring-ui</artifactId>
</dependency>
<!--jackson依赖-->
<dependency>
<groupId>com.fasterxml.jackson.core</groupId>
<artifactId>jackson-databind</artifactId>
</dependency>
<!--此处需要注意的事databind自身依赖的jackson-core/jackson-annotations由于版本偏低会出现异常-->
<dependency>
<groupId>com.fasterxml.jackson.core</groupId>
<artifactId>jackson-core</artifactId>
</dependency>
<dependency>
<groupId>com.fasterxml.jackson.core</groupId>
<artifactId>jackson-annotations</artifactId>
</dependency>
<dependency>
<groupId>com.fasterxml.jackson.datatype</groupId>
<artifactId>jackson-datatype-jsr310</artifactId>
</dependency>
<dependency>
<groupId>com.github.jeffreyning</groupId>
<artifactId>mybatisplus-plus</artifactId>
</dependency>
<!--数据库相关********satrt-->
<dependency>
<groupId>org.mybatis.spring.boot</groupId>
<artifactId>mybatis-spring-boot-starter</artifactId>
<exclusions>
<!-- 排除默认的 HikariCP 数据源 -->
<exclusion>
<groupId>com.zaxxer</groupId>
<artifactId>HikariCP</artifactId>
</exclusion>
</exclusions>
</dependency>
<!--druid连接池-->
<dependency>
<groupId>com.alibaba</groupId>
<artifactId>druid-spring-boot-starter</artifactId>
</dependency>
<!--oracle驱动-->
<dependency>
<groupId>com.oracle.database.jdbc</groupId>
<artifactId>ojdbc8</artifactId>
</dependency>
<!-- 不支持的字符集 (在类路径中添加 orai18n.jar): ZHS16GBK -->
<dependency>
<groupId>com.oracle.database.nls</groupId>
<artifactId>orai18n</artifactId>
</dependency>
<!--mybatis增强工具-->
<dependency>
<groupId>com.baomidou</groupId>
<artifactId>mybatis-plus-boot-starter</artifactId>
</dependency>
<!--数据库相关********end-->
<!-- 多数据源切换当数据源为oracle时需要使用 -->
<dependency>
<groupId>com.baomidou</groupId>
<artifactId>dynamic-datasource-spring-boot-starter</artifactId>
</dependency>
<dependency>
<groupId>com.njcn</groupId>
<artifactId>influxdb-springboot-starter</artifactId>
<version>1.0.0</version>
</dependency>
<dependency>
<groupId>com.njcn</groupId>
<artifactId>pqs-influx</artifactId>
<version>1.0.0</version>
</dependency>
<!--mysql驱动-->
<dependency>
<groupId>mysql</groupId>
<artifactId>mysql-connector-java</artifactId>
</dependency>
<dependency>
<groupId>org.apache.commons</groupId>
<artifactId>commons-collections4</artifactId>
<version>4.4</version>
<scope>compile</scope>
</dependency>
</dependencies>
<build>
<finalName>manufact-influx-data</finalName>
<plugins>
<plugin>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-maven-plugin</artifactId>
<executions>
<execution>
<phase>package</phase>
<goals>
<goal>repackage</goal>
</goals>
</execution>
</executions>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-compiler-plugin</artifactId>
<version>3.8.1</version>
<configuration>
<source>1.8</source>
<target>1.8</target>
<skip>true</skip>
</configuration>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-source-plugin</artifactId>
<version>2.2.1</version>
<executions>
<execution>
<id>attach-sources</id>
<goals>
<goal>jar-no-fork</goal>
</goals>
</execution>
</executions>
</plugin>
</plugins>
</build>
</project>

View File

@@ -0,0 +1,29 @@
package com.njcn.manufactinflux;
import lombok.extern.slf4j.Slf4j;
import org.mybatis.spring.annotation.MapperScan;
import org.springframework.boot.SpringApplication;
import org.springframework.boot.autoconfigure.SpringBootApplication;
import org.springframework.boot.autoconfigure.security.servlet.SecurityAutoConfiguration;
import org.springframework.boot.autoconfigure.security.servlet.SecurityFilterAutoConfiguration;
import org.springframework.context.annotation.DependsOn;
import org.springframework.scheduling.annotation.EnableAsync;
/**
* pqs
*
* @author cdf
* @date 2022/11/10
*/
@Slf4j
@EnableAsync
@DependsOn("proxyMapperRegister")
@MapperScan("com.njcn.**.mapper")
@SpringBootApplication(scanBasePackages = "com.njcn", exclude = {SecurityAutoConfiguration.class, SecurityFilterAutoConfiguration.class})
public class ManufactInfluxApplication {
public static void main(String[] args) {
SpringApplication.run(ManufactInfluxApplication.class, args);
}
}

View File

@@ -0,0 +1,33 @@
package com.njcn.manufactinflux.controller;
import com.baomidou.mybatisplus.core.metadata.IPage;
import com.njcn.manufactinflux.param.JobQueryParam;
import com.njcn.manufactinflux.response.HttpResult;
import com.njcn.manufactinflux.service.IInfluxManufactService;
import io.swagger.annotations.Api;
import io.swagger.annotations.ApiImplicitParam;
import io.swagger.annotations.ApiOperation;
import lombok.AllArgsConstructor;
import lombok.extern.slf4j.Slf4j;
import org.springframework.web.bind.annotation.PostMapping;
import org.springframework.web.bind.annotation.RequestBody;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RestController;
@Slf4j
@RestController
@RequestMapping("/manInflux")
@Api(tags = "制造influx数据")
@AllArgsConstructor
public class ManufactInfluxController {
private final IInfluxManufactService influxManufactService;
@PostMapping("/jobQuery")
@ApiOperation("任务查询")
@ApiImplicitParam(name = "jobQueryParam", value = "按需求制造influx假数据", required = true)
public HttpResult<Object> jobQuery(@RequestBody JobQueryParam jobQueryParam){
IPage<Object> jobDetails = influxManufactService.manufactInfluxData(jobQueryParam);
return new HttpResult<>();
}
}

View File

@@ -0,0 +1,22 @@
package com.njcn.manufactinflux.mapper;
import com.baomidou.mybatisplus.core.mapper.BaseMapper;
import com.njcn.manufactinflux.po.Monitor;
import org.apache.ibatis.annotations.Select;
import java.util.List;
/**
* <p>
* Mapper 接口
* </p>
*
* @author hongawen
* @since 2022-10-14
*/
public interface MonitorMapper extends BaseMapper<Monitor> {
@Select("SELECT id FROM pms_monitor")
List<String> queryIdList();
}

View File

@@ -0,0 +1,29 @@
package com.njcn.manufactinflux.param;
import com.fasterxml.jackson.annotation.JsonFormat;
import lombok.Data;
import java.time.LocalDate;
import java.util.List;
@Data
public class DataAsynParam {
//起始时间
@JsonFormat(shape = JsonFormat.Shape.STRING, pattern="yyyy-MM-dd")
private LocalDate startTime;
//截止时间
@JsonFormat(shape = JsonFormat.Shape.STRING, pattern="yyyy-MM-dd")
private LocalDate endTime;
//指标名,对应表映射后的实体名
private List<String> tableNames;
//执行同步数据类型1重网页端2跑批处理
private Integer excuteType=1;
//数据源 0 指定数据源 1 读文件
private Integer sourceType=0;
}

View File

@@ -0,0 +1,29 @@
package com.njcn.manufactinflux.param;
import io.swagger.annotations.ApiModelProperty;
import lombok.Data;
import javax.validation.constraints.Min;
import javax.validation.constraints.NotNull;
import java.util.List;
/**
* Description:
* Date: 2024/1/9 9:21【需求编号】
*
* @author clam
* @version V1.0.0
*/
@Data
public class JobQueryParam extends DataAsynParam{
private List<String> states;
@NotNull(message="当前页不能为空!")
@Min(value = 1, message = "当前页不能为0")
@ApiModelProperty(value = "当前页",name = "currentPage",dataType ="Integer",required = true)
private Integer currentPage;
/**显示条数*/
@NotNull(message="显示条数不能为空!")
@ApiModelProperty(value = "显示条数",name = "pageSize",dataType ="Integer",required = true)
private Integer pageSize;
}

View File

@@ -0,0 +1,305 @@
package com.njcn.manufactinflux.po;
import com.baomidou.mybatisplus.annotation.FieldFill;
import com.baomidou.mybatisplus.annotation.TableField;
import com.baomidou.mybatisplus.annotation.TableId;
import com.baomidou.mybatisplus.annotation.TableName;
import com.fasterxml.jackson.annotation.JsonFormat;
import com.fasterxml.jackson.databind.annotation.JsonDeserialize;
import com.fasterxml.jackson.databind.annotation.JsonSerialize;
import com.fasterxml.jackson.datatype.jsr310.deser.LocalDateTimeDeserializer;
import com.fasterxml.jackson.datatype.jsr310.ser.LocalDateTimeSerializer;
import lombok.Data;
import java.time.LocalDateTime;
/**
* <p>
*
* </p>
*
* @author hongawen
* @since 2022-10-14
*/
@Data
@TableName("pms_monitor")
public class Monitor {
private static final long serialVersionUID = 1L;
/**
* 监测点ID
*/
@TableId
private String id;
private String monitorId;
/**
* 监测点名称
*/
private String name;
/**
* 组织机构名称
*/
private String orgName;
/**
* 组织机构ID(外键)
*/
private String orgId;
/**
* 运维单位名称
*/
private String operationName;
/**
* 运维单位ID(外键)
*/
private String operationId;
/**
* 变电站名称
*/
private String powerrName;
/**
* 电站ID(外键)
*/
private String powerrId;
/**
* 监测线路名称
*/
private String lineName;
/**
* 监测线路ID
*/
private String lineId;
/**
* 中台母线ID
*/
@TableField(exist = false)
private String busId;
/**
* 电压等级(字典)
*/
private String voltageLevel;
/**
* 监测点状态(字典)
*/
private String monitorState;
/**
* 监测点类型(字典)
*/
private String monitorType;
/**
* 最小短路容量
*/
private Float minShortCircuitCapacity;
/**
* 供电设备容量
*/
private Float powerSupplyEqCapacity;
/**
* 用户协议容量
*/
private Float userAgreementCapacity;
/**
* 基准短路容量
*/
@TableField(exist = false)
private Float standShortCapacity;
/**
* 电压偏差限值(上)
*/
private Float voltageDeviationUpperLimit;
/**
* 电压偏差限值(下)
*/
private Float voltageDeviationLowerLimit;
/**
* 电压互感器类型(字典)
*/
private String potentialTransFormerType;
/**
* 中性点接地方式(字典)
*/
private String neutralGroundingMode;
/**
* 是否用户专线(字典)
*/
private Integer isSpecialSupplyElectricity;
/**
* 监测点标签
*/
private String monitorTag;
/**
* 三级级监测对象类型
*/
private String objType;
/**
* 101 电网侧102 用户侧
*/
private String powerFlag;
/**
* 监测对象编号
*/
private String monitorObjectId;
/**
* 监测对象名称
*/
private String monitorObjectName;
/**
* 统计间隔
*/
private Integer statisticalInterval;
/**
* 关联的监测终端编号(外键)
*/
private String terminalId;
/**
* 关联的监测终端同源编号(外键)
*/
@TableField(exist = false)
private String terminalCode;
/**
* 监测终端接线方式(字典)
*/
private String terminalWiringMethod;
/**
* 是否是上送国网监测点,0-否 1-是
*/
private Integer isUpToGrid;
/**
* 数据状态0-删除1-正常;
*/
private Integer status;
/**
* 是否专项分析监测点 0-否 1-是
*/
private Integer isSpecialMonitor;
/**
* pt变比
*/
private Float pt1;
/**
* pt变比
*/
private Float pt2;
/**
* ct变比
*/
private Float ct1;
/**
* ct变比
*/
private Float ct2;
/**
* 监测点接线相别0单相,1三相默认三相
*/
private String ptPhaseType;
/**
* 是否发电用户0-否1-是;
*/
private Integer ifPowerUser;
/**
* 场站属性
*/
private String fieldStation;
/**
* 行业类型
*/
private String tradeCode;
/**
* 0.未上送 1.已上送 2.取消上送 3.待重新上送(用于典型负荷)
*/
private Integer isUploadHead;
/**
* 0.未上送 1.已上送 2.取消上送 3.待重新上送(用于主网监测点)
*/
private Integer monitorUploadStatus;
/**
* 0.未上送 1.已上送 2.取消上送 3.待重新上送(限值)
*/
private Integer monitorOverlimitStatus;
/**
* 牵引站id
*/
private String tractionId;
/**
* 创建用户
*/
@TableField(fill = FieldFill.INSERT)
private String createBy;
/**
* 创建时间
*/
@TableField(fill = FieldFill.INSERT)
@JsonFormat(pattern = "yyyy-MM-dd HH:mm:ss")
@JsonDeserialize(using = LocalDateTimeDeserializer.class)
@JsonSerialize(using = LocalDateTimeSerializer.class)
private LocalDateTime createTime;
/**
* 更新用户
*/
@TableField(fill = FieldFill.INSERT_UPDATE)
private String updateBy;
/**
* 更新时间
*/
@TableField(fill = FieldFill.INSERT_UPDATE)
@JsonFormat(pattern = "yyyy-MM-dd HH:mm:ss")
@JsonDeserialize(using = LocalDateTimeDeserializer.class)
@JsonSerialize(using = LocalDateTimeSerializer.class)
private LocalDateTime updateTime;
}

View File

@@ -0,0 +1,39 @@
package com.njcn.manufactinflux.response;
import lombok.AllArgsConstructor;
import lombok.Data;
import lombok.NoArgsConstructor;
import java.io.Serializable;
/**
* @author hongawen
* @version 1.0.0
* @date 2021年03月25日 15:38
*/
@Data
@NoArgsConstructor
@AllArgsConstructor
public class HttpResult<T> implements Serializable {
/**状态码*/
private String code;
/**回执信息*/
private String message;
/**响应数据*/
private T data;
public HttpResult(String code, String message) {
this.code = code;
this.message = message;
}
public HttpResult(Integer code, String message) {
this.code = String.valueOf(code);
this.message = message;
}
}

View File

@@ -0,0 +1,13 @@
package com.njcn.manufactinflux.service;
import com.baomidou.mybatisplus.core.metadata.IPage;
import com.njcn.manufactinflux.param.JobQueryParam;
public interface IInfluxManufactService {
/**
* 根据参数迁移数据
* @param jobQueryParam 查询参数
*/
IPage<Object> manufactInfluxData(JobQueryParam jobQueryParam);
}

View File

@@ -0,0 +1,19 @@
package com.njcn.manufactinflux.service;
import com.baomidou.mybatisplus.extension.service.IService;
import com.njcn.manufactinflux.po.Monitor;
import java.util.List;
/**
* <p>
* 服务类
* </p>
*
* @author hongawen
* @since 2022-10-14
*/
public interface IMonitorService extends IService<Monitor> {
List<String> queryIdList();
}

View File

@@ -0,0 +1,313 @@
package com.njcn.manufactinflux.service.impl;
import cn.hutool.core.date.DatePattern;
import cn.hutool.core.date.LocalDateTimeUtil;
import cn.hutool.core.text.StrPool;
import com.baomidou.mybatisplus.core.metadata.IPage;
import com.njcn.influx.imapper.*;
import com.njcn.influx.pojo.po.*;
import com.njcn.influx.query.InfluxQueryWrapper;
import com.njcn.influx.utils.InfluxDbUtils;
import com.njcn.manufactinflux.param.JobQueryParam;
import com.njcn.manufactinflux.service.IInfluxManufactService;
import com.njcn.manufactinflux.service.IMonitorService;
import lombok.RequiredArgsConstructor;
import org.apache.commons.collections4.ListUtils;
import org.influxdb.dto.QueryResult;
import org.influxdb.impl.InfluxDBResultMapper;
import org.springframework.beans.BeanUtils;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.stereotype.Service;
import javax.annotation.Resource;
import java.time.LocalDateTime;
import java.time.ZoneId;
import java.time.temporal.ChronoUnit;
import java.time.temporal.TemporalUnit;
import java.util.*;
import java.util.stream.Collectors;
@Service
@RequiredArgsConstructor
public class InfluxManufactServiceImpl implements IInfluxManufactService {
static InfluxDBResultMapper resultMapper = new InfluxDBResultMapper();
@Resource
private IMonitorService monitorService;
private final DataVMapper dataVMapper;
private final IDataIMapper dataIMapper;
private final DataInHarmVMapper dataInHarmVMapper;
private final DataHarmRateVMapper dataHarmRateVMapper;
private final DataFlickerMapper dataFlickerMapper;
private final DataPltMapper dataPltMapper;
@Value("${spring.influx.target.database}")
private String database;
@Value("${spring.influx.target.url}")
private String url;
@Value("${spring.influx.target.user}")
private String user;
@Value("${spring.influx.target.password}")
private String password;
/**
* 思路:
* 1、指定数据源的。
* 数据来源: 指定数据源的情况下每个表查出联合索引的1000条模板数据比如phasicType+valueType+qualityFlag
* 后续所有相同的联合索引从模版中随机获取数据赋值
* 数据录入: 从目标mysql库中查询出所有的需要做处理的监测点id做业务循环处理
* <p>
* 2、未指定数据源。
* todo... 区别是底层模板数据是提前准备好的
*
* @param jobQueryParam 查询参数
*/
@Override
public IPage<Object> manufactInfluxData(JobQueryParam jobQueryParam) {
System.out.println("执行开始当前时间:" + System.currentTimeMillis());
//查出需要模拟测试监测点id todo...该方法目前默认1分钟一条数据
List<String> pmsIds = monitorService.queryIdList();
//样品数据
List<DataV> dataVList = new ArrayList<>();
List<DataI> dataIList = new ArrayList<>();
List<DataInHarmV> dataInHarmVList = new ArrayList<>();
List<DataHarmRateV> dataHarmRateVList = new ArrayList<>();
List<DataPlt> dataPltList = new ArrayList<>();
List<DataFlicker> dataFlickerList = new ArrayList<>();
if (jobQueryParam.getSourceType() == 0) {
//从数据源读取influx数据
List<String> lineList = Arrays.asList("e5807e73521aa58dec3cb1cf74805281"
, "a78e16cd28a428b1c1ee3af25880281d", "ff2d9674c1f1ecce7f33a5bf17fc4f2d"
, "f7c58f82fd8ba1f307ef5beec4d462be", "b65ef14ed9e25f682840061b1abb464f"
, "44ea885554d88eef661c2671f738125a", "1aacf83cd01dfa3581438cbecf06977f"
, "23b8eb64453fbb0e23d9ff1e913c5843", "07ee83be55ec8fa8c8af50cb8a6925d6"
, "fbd1201ac204b5f40583a53456ed2de1", "692f8201ba6f50a31f1ab99051ed6985"
, "b7f94884e69deb54a26414dcb015b88d");
String startTime = "2023-09-24 00:00:00";
String endTime = "2023-09-24 02:00:00";
//准备数据
InfluxDbUtils influxDbUtils = new InfluxDbUtils(user, password, url, database, "");
//todo... 目前仅仅是6张表后续调整成动态的
//电压
InfluxQueryWrapper influxQueryWrapper = new InfluxQueryWrapper(DataV.class);
influxQueryWrapper.between(DataV::getTime, startTime, endTime).regular(DataV::getLineId, lineList);
QueryResult dataVResult = influxDbUtils.query(influxQueryWrapper.generateSql());
dataVList = resultMapper.toPOJO(dataVResult, DataV.class);
//电流
influxQueryWrapper = new InfluxQueryWrapper(DataI.class);
influxQueryWrapper.between(DataI::getTime, startTime, endTime).regular(DataI::getLineId, lineList);
QueryResult dataIResult = influxDbUtils.query(influxQueryWrapper.generateSql());
dataIList = resultMapper.toPOJO(dataIResult, DataI.class);
//电压简谐波赋值
influxQueryWrapper = new InfluxQueryWrapper(DataInHarmV.class);
influxQueryWrapper.between(DataInHarmV::getTime, startTime, endTime).regular(DataInHarmV::getLineId, lineList);
QueryResult dataInHarmRateVResult = influxDbUtils.query(influxQueryWrapper.generateSql());
dataInHarmVList = resultMapper.toPOJO(dataInHarmRateVResult, DataInHarmV.class);
//谐波电压含有率数据表
influxQueryWrapper = new InfluxQueryWrapper(DataHarmRateV.class);
influxQueryWrapper.between(DataHarmRateV::getTime, startTime, endTime).regular(DataHarmRateV::getLineId, lineList);
QueryResult dataHarmRateVResult = influxDbUtils.query(influxQueryWrapper.generateSql());
dataHarmRateVList = resultMapper.toPOJO(dataHarmRateVResult, DataHarmRateV.class);
//长时闪变数据表
influxQueryWrapper = new InfluxQueryWrapper(DataPlt.class);
influxQueryWrapper.between(DataPlt::getTime, startTime, endTime).regular(DataPlt::getLineId, lineList);
QueryResult dataPltResult = influxDbUtils.query(influxQueryWrapper.generateSql());
dataPltList = resultMapper.toPOJO(dataPltResult, DataPlt.class);
//电压闪变数据表
influxQueryWrapper = new InfluxQueryWrapper(DataFlicker.class);
influxQueryWrapper.between(DataFlicker::getTime, startTime, endTime).regular(DataFlicker::getLineId, lineList);
QueryResult dataFlickerResult = influxDbUtils.query(influxQueryWrapper.generateSql());
dataFlickerList = resultMapper.toPOJO(dataFlickerResult, DataFlicker.class);
}
//todo...读文件初始化样品数据
//根据样品数据开始组装需要插入的数据
Random random = new Random();
String start = "2024-01-30 00:00:00";
String end = "2024-01-30 23:59:59";
LocalDateTime startTime = LocalDateTimeUtil.parse(start, DatePattern.NORM_DATETIME_PATTERN);
LocalDateTime endTime = LocalDateTimeUtil.parse(end, DatePattern.NORM_DATETIME_PATTERN);
/**
* 一个监测点一个监测的造数据?
*/
for (String pmsId : pmsIds) {
List<DataV> dataVTempList = new ArrayList<>();
List<DataI> dataITempList = new ArrayList<>();
List<DataInHarmV> dataInHarmVTempList = new ArrayList<>();
List<DataHarmRateV> dataHarmRateVTempList = new ArrayList<>();
List<DataPlt> dataPltTempList = new ArrayList<>();
List<DataFlicker> dataFlickerTempList = new ArrayList<>();
while (startTime.isBefore(endTime)) {
//电压
Map<String, List<DataV>> stringListMapDataV = dataVList.stream().collect(Collectors.groupingBy(obj -> obj.getPhasicType().concat(StrPool.UNDERLINE).concat(obj.getValueType())));
for (String string : stringListMapDataV.keySet()) {
DataV tempEle = new DataV();
List<DataV> dataVS = stringListMapDataV.get(string);
int randomIndex = random.nextInt(dataVS.size());
// 获取随机索引处的元素
DataV randomElement = dataVS.get(randomIndex);
BeanUtils.copyProperties(randomElement, tempEle);
tempEle.setTime(startTime.atZone(ZoneId.systemDefault()).toInstant());
tempEle.setLineId(pmsId);
dataVTempList.add(tempEle);
}
//电流
Map<String, List<DataI>> stringListMapDataI = dataIList.stream().collect(Collectors.groupingBy(obj -> obj.getPhaseType().concat(StrPool.UNDERLINE).concat(obj.getValueType())));
for (String string : stringListMapDataI.keySet()) {
DataI tempEle = new DataI();
List<DataI> dataIS = stringListMapDataI.get(string);
int randomIndex = random.nextInt(dataIS.size());
// 获取随机索引处的元素
DataI randomElement = dataIS.get(randomIndex);
BeanUtils.copyProperties(randomElement, tempEle);
tempEle.setTime(startTime.atZone(ZoneId.systemDefault()).toInstant());
tempEle.setLineId(pmsId);
dataITempList.add(tempEle);
}
//电压简谐波赋值
Map<String, List<DataInHarmV>> stringListMapDataInHarmRateV = dataInHarmVList.stream().collect(Collectors.groupingBy(obj -> obj.getPhaseType().concat(StrPool.UNDERLINE).concat(obj.getValueType())));
for (String string : stringListMapDataInHarmRateV.keySet()) {
DataInHarmV tempEle = new DataInHarmV();
List<DataInHarmV> dataInHarmRateVS = stringListMapDataInHarmRateV.get(string);
int randomIndex = random.nextInt(dataInHarmRateVS.size());
// 获取随机索引处的元素
DataInHarmV randomElement = dataInHarmRateVS.get(randomIndex);
BeanUtils.copyProperties(randomElement, tempEle);
tempEle.setTime(startTime.atZone(ZoneId.systemDefault()).toInstant());
tempEle.setLineId(pmsId);
dataInHarmVTempList.add(tempEle);
}
//谐波电压含有率数据表
Map<String, List<DataHarmRateV>> stringListMapDataHarmRateV = dataHarmRateVList.stream().collect(Collectors.groupingBy(obj -> obj.getPhaseType().concat(StrPool.UNDERLINE).concat(obj.getValueType())));
for (String string : stringListMapDataHarmRateV.keySet()) {
DataHarmRateV tempEle = new DataHarmRateV();
List<DataHarmRateV> DataHarmRateVS = stringListMapDataHarmRateV.get(string);
int randomIndex = random.nextInt(DataHarmRateVS.size());
// 获取随机索引处的元素
DataHarmRateV randomElement = DataHarmRateVS.get(randomIndex);
BeanUtils.copyProperties(randomElement, tempEle);
tempEle.setTime(startTime.atZone(ZoneId.systemDefault()).toInstant());
tempEle.setLineId(pmsId);
dataHarmRateVTempList.add(tempEle);
}
//长时闪变数据表 2小时一条数据
Map<String, List<DataPlt>> stringListMapDataPlt = dataPltList.stream().collect(Collectors.groupingBy(DataPlt::getPhaseType));
if (startTime.getHour() % 2 == 0 && startTime.getMinute() == 0 && startTime.getSecond() == 0) {
for (String string : stringListMapDataPlt.keySet()) {
DataPlt tempEle = new DataPlt();
List<DataPlt> DataPltS = stringListMapDataPlt.get(string);
int randomIndex = random.nextInt(DataPltS.size());
// 获取随机索引处的元素
DataPlt randomElement = DataPltS.get(randomIndex);
BeanUtils.copyProperties(randomElement, tempEle);
tempEle.setTime(startTime.atZone(ZoneId.systemDefault()).toInstant());
tempEle.setLineId(pmsId);
dataPltTempList.add(tempEle);
}
}
//电压闪变数据表 10分钟一条数据
Map<String, List<DataFlicker>> stringListMapDataFlicker = dataFlickerList.stream().collect(Collectors.groupingBy(DataFlicker::getPhaseType));
if (startTime.getMinute() % 10 == 0 && startTime.getSecond() == 0) {
for (String string : stringListMapDataFlicker.keySet()) {
DataFlicker tempEle = new DataFlicker();
List<DataFlicker> DataFlickerS = stringListMapDataFlicker.get(string);
int randomIndex = random.nextInt(DataFlickerS.size());
// 获取随机索引处的元素
DataFlicker randomElement = DataFlickerS.get(randomIndex);
BeanUtils.copyProperties(randomElement, tempEle);
tempEle.setTime(startTime.atZone(ZoneId.systemDefault()).toInstant());
tempEle.setLineId(pmsId);
dataFlickerTempList.add(tempEle);
}
}
startTime = LocalDateTimeUtil.offset(startTime, 1, ChronoUnit.MINUTES);
}
//执行批量入库操作
System.out.println("插入dataV" + dataVTempList.size() + "");
List<List<DataV>> partition = ListUtils.partition(dataVTempList, 10000);
for (List<DataV> sliceList : partition) {
List<DataV> tempList = new ArrayList<>(sliceList);
dataVMapper.insertBatch(tempList);
}
System.out.println("插入dataI" + dataITempList.size() + "");
List<List<DataI>> partition1 = ListUtils.partition(dataITempList, 10000);
for (List<DataI> sliceList : partition1) {
List<DataI> tempList = new ArrayList<>(sliceList);
dataIMapper.insertBatch(tempList);
}
System.out.println("插入dataInHarmV" + dataInHarmVTempList.size() + "");
List<List<DataInHarmV>> partition2 = ListUtils.partition(dataInHarmVTempList, 10000);
for (List<DataInHarmV> sliceList : partition2) {
List<DataInHarmV> tempList = new ArrayList<>(sliceList);
dataInHarmVMapper.insertBatch(tempList);
}
System.out.println("插入dataInHarmV" + dataHarmRateVTempList.size() + "");
List<List<DataHarmRateV>> partition3 = ListUtils.partition(dataHarmRateVTempList, 10000);
for (List<DataHarmRateV> sliceList : partition3) {
List<DataHarmRateV> tempList = new ArrayList<>(sliceList);
dataHarmRateVMapper.insertBatch(tempList);
}
System.out.println("插入DataPlt" + dataPltTempList.size() + "");
dataPltMapper.insertBatch(dataPltTempList);
System.out.println("插入DataFlicker" + dataFlickerTempList.size() + "");
dataFlickerMapper.insertBatch(dataFlickerTempList);
//将起始时间初始化
startTime = LocalDateTimeUtil.parse(start, DatePattern.NORM_DATETIME_PATTERN);
//清空所有表数据
dataVTempList.clear();
dataITempList.clear();
dataInHarmVTempList.clear();
dataHarmRateVTempList.clear();
dataPltTempList.clear();
dataFlickerTempList.clear();
System.gc();
}
System.out.println("执行结束当前时间:" + System.currentTimeMillis());
return null;
}
public static void main(String[] args) {
LocalDateTime startTime = LocalDateTimeUtil.parse("2024-01-23 15:00:00", DatePattern.NORM_DATETIME_PATTERN);
LocalDateTime endTime = LocalDateTimeUtil.parse("2024-01-23 16:59:59", DatePattern.NORM_DATETIME_PATTERN);
System.out.println(startTime.getHour() % 2);
System.out.println(endTime.getHour() % 2);
}
}

View File

@@ -0,0 +1,34 @@
package com.njcn.manufactinflux.service.impl;
import com.baomidou.dynamic.datasource.annotation.DS;
import com.baomidou.mybatisplus.extension.service.impl.ServiceImpl;
import com.njcn.manufactinflux.mapper.MonitorMapper;
import com.njcn.manufactinflux.po.Monitor;
import com.njcn.manufactinflux.service.IMonitorService;
import lombok.RequiredArgsConstructor;
import lombok.extern.slf4j.Slf4j;
import org.springframework.stereotype.Service;
import java.util.List;
/**
* <p>
* 服务实现类
* </p>
*
* @author cdf
* @since 2022-10-14
*/
@DS("target")
@Slf4j
@Service
@RequiredArgsConstructor
public class MonitorServiceImpl extends ServiceImpl<MonitorMapper, Monitor> implements IMonitorService {
@Override
public List<String> queryIdList() {
return this.baseMapper.queryIdList();
}
}

View File

@@ -0,0 +1,110 @@
#文件位置配置
business:
#分片次数一定为24的约数1 2 3 4 6 8 12 24
slice: 4
server:
port: 8092
spring:
#influxDB内容配置
influx:
# url: http://25.36.232.36:8086
# user: admin
# password: admin
# database: pqsbase_hbcs
url: http://192.168.1.26:8086
user: admin
password: 123456
database: pqsbase
mapper-location: com.njcn.influx.imapper
target:
url: http://192.168.1.102:8086
user: admin
password: 123456
database: pqsbase_sjzx
application:
name: manufact-influx
autoconfigure:
exclude: com.alibaba.druid.spring.boot.autoconfigure.DruidDataSourceAutoConfigure
datasource:
dynamic:
druid:
initial-size: 10
# 初始化大小,最小,最大
min-idle: 20
maxActive: 500
# 配置获取连接等待超时的时间
maxWait: 60000
# 配置间隔多久才进行一次检测,检测需要关闭的空闲连接,单位是毫秒
timeBetweenEvictionRunsMillis: 60000
# 配置一个连接在池中最小生存的时间,单位是毫秒
minEvictableIdleTimeMillis: 300000
testWhileIdle: true
testOnBorrow: true
validation-query: SELECT 1 from dual
testOnReturn: false
# 打开PSCache并且指定每个连接上PSCache的大小
poolPreparedStatements: true
maxPoolPreparedStatementPerConnectionSize: 20
filters: stat,wall
filter:
wall:
config:
multi-statement-allow: true
none-base-statement-allow: true
enabled: true
# 配置DruidStatFilter
web-stat-filter:
enabled: true
url-pattern: "/*"
exclusions: "*.js,*.gif,*.jpg,*.bmp,*.png,*.css,*.ico,/druid/*"
# 配置DruidStatViewServlet
stat-view-servlet:
enabled: true
url-pattern: "/druid/*"
# IP白名单(没有配置或者为空,则允许所有访问)
allow: #127.0.0.1,192.168.163.1
# IP黑名单 (存在共同时deny优先于allow)
deny: #192.168.1.73
# 禁用HTML页面上的“Reset All”功能
reset-enable: false
# 登录名
login-username: admin
# 登录密码
login-password: njcnpqs
query-timeout: 36000
primary: master
strict: false
datasource:
master:
# url: jdbc:oracle:thin:@10.122.32.73:11521/dwxb
# username: pqsadmin
# password: pqsadmin_123
url: jdbc:oracle:thin:@192.168.1.51:1521:pqsbase
username: pqsadmin_hn
password: pqsadmin
driver-class-name: oracle.jdbc.driver.OracleDriver
target:
# url: jdbc:mysql://25.36.232.37:13306/pmsinfo?useUnicode=true&characterEncoding=utf-8&useSSL=true&serverTimezone=CTT
# username: root
# password: Huawei12#
url: jdbc:mysql://192.168.1.24:13306/pqsinfo_pmscs?useUnicode=true&characterEncoding=utf-8&useSSL=true&serverTimezone=CTT
username: root
password: njcnpqs
driver-class-name: com.mysql.cj.jdbc.Driver
#mybatis配置信息
mybatis-plus:
#别名扫描
type-aliases-package: com.njcn.oracle.bo
mapper-locations: classpath*:com/njcn/**/mapping/*.xml
configuration:
#驼峰命名
map-underscore-to-camel-case: true
#配置sql日志输出
# log-impl: org.apache.ibatis.logging.stdout.StdOutImpl
#关闭日志输出
log-impl: org.apache.ibatis.logging.nologging.NoLoggingImpl
global-config:
db-config:
#指定主键生成策略
id-type: assign_uuid

View File

@@ -0,0 +1,18 @@
package com.njcn;
import com.njcn.manufactinflux.ManufactInfluxApplication;
import org.junit.runner.RunWith;
import org.springframework.boot.test.context.SpringBootTest;
import org.springframework.test.context.junit4.SpringRunner;
import org.springframework.test.context.web.WebAppConfiguration;
/**
* @author hongawen
* @version 1.0.0
* @date 2021年12月10日 15:05
*/
@RunWith(SpringRunner.class)
@WebAppConfiguration
@SpringBootTest(classes = ManufactInfluxApplication.class)
public class BaseJunitTest {
}

View File

@@ -0,0 +1,105 @@
package com.njcn;
import cn.hutool.core.text.StrPool;
import com.baomidou.mybatisplus.core.conditions.query.LambdaQueryWrapper;
import com.njcn.influx.pojo.po.DataFlicker;
import com.njcn.influx.pojo.po.DataInHarmRateV;
import com.njcn.influx.pojo.po.DataV;
import com.njcn.influx.pojo.po.PqsCommunicate;
import com.njcn.influx.query.InfluxQueryWrapper;
import com.njcn.influx.utils.InfluxDbUtils;
import com.njcn.manufactinflux.param.JobQueryParam;
import com.njcn.manufactinflux.po.Monitor;
import com.njcn.manufactinflux.service.IInfluxManufactService;
import com.njcn.manufactinflux.service.IMonitorService;
import lombok.SneakyThrows;
import org.influxdb.dto.QueryResult;
import org.influxdb.impl.InfluxDBResultMapper;
import org.junit.Test;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Value;
import java.util.Arrays;
import java.util.List;
import java.util.Map;
import java.util.stream.Collectors;
/**
* @author hongawen
* @version 1.0.0
* @date 2021年12月14日 12:55
*/
public class DataTest extends BaseJunitTest {
static InfluxDBResultMapper resultMapper = new InfluxDBResultMapper();
@Value("${spring.influx.target.database}")
private String database;
@Value("${spring.influx.target.url}")
private String url;
@Value("${spring.influx.target.user}")
private String user;
@Value("${spring.influx.target.password}")
private String password;
@Autowired
private IMonitorService monitorService;
@Autowired
private IInfluxManufactService iInfluxManufactService;
@SneakyThrows
@Test
public void test() {
List<String> lineList = Arrays.asList("e5807e73521aa58dec3cb1cf74805281"
,"a78e16cd28a428b1c1ee3af25880281d","ff2d9674c1f1ecce7f33a5bf17fc4f2d"
,"f7c58f82fd8ba1f307ef5beec4d462be","b65ef14ed9e25f682840061b1abb464f"
,"44ea885554d88eef661c2671f738125a","1aacf83cd01dfa3581438cbecf06977f"
,"23b8eb64453fbb0e23d9ff1e913c5843","07ee83be55ec8fa8c8af50cb8a6925d6"
,"fbd1201ac204b5f40583a53456ed2de1","692f8201ba6f50a31f1ab99051ed6985"
,"b7f94884e69deb54a26414dcb015b88d");
String startTime = "2023-09-23 00:00:00";
String endTime ="2023-09-23 01:00:00";
InfluxQueryWrapper influxQueryWrapper = new InfluxQueryWrapper(DataV.class);
influxQueryWrapper.between(DataV::getTime, startTime,endTime).regular(DataV::getLineId,lineList);
System.out.println(influxQueryWrapper.generateSql());
InfluxDbUtils influxDbUtils = new InfluxDbUtils(user, password, url, database, "");
QueryResult sqlResult = influxDbUtils.query(influxQueryWrapper.generateSql());
System.out.println(2222);
System.out.println(2222);
System.out.println(2222);
System.out.println(2222);
System.out.println(2222);
List<DataV> list = resultMapper.toPOJO(sqlResult, DataV.class);
Map<String, List<DataV>> stringListMap = list.stream().collect(Collectors.groupingBy(obj -> obj.getPhasicType().concat(StrPool.UNDERLINE).concat(obj.getValueType())));
influxQueryWrapper = new InfluxQueryWrapper(DataInHarmRateV.class);
influxQueryWrapper.between(DataInHarmRateV::getTime, startTime, endTime).regular(DataInHarmRateV::getLineId, lineList);
QueryResult dataInHarmRateVResult = influxDbUtils.query(influxQueryWrapper.generateSql());
System.out.println(influxQueryWrapper.generateSql());
List<DataInHarmRateV> dataInHarmRateVList = resultMapper.toPOJO(dataInHarmRateVResult, DataInHarmRateV.class);
System.out.println(11);
System.out.println(11);
System.out.println(11);
// List<DataV> collect = list.stream().filter(dataV -> dataV.getLineId().equalsIgnoreCase("07ee83be55ec8fa8c8af50cb8a6925d6")).collect(Collectors.toList());
System.out.println(11);
}
@SneakyThrows
@Test
public void test1() {
JobQueryParam jobQueryParam = new JobQueryParam();
jobQueryParam.setSourceType(0);
iInfluxManufactService.manufactInfluxData(jobQueryParam);
}
}

View File

@@ -14,7 +14,9 @@
<maven.compiler.source>8</maven.compiler.source> <maven.compiler.source>8</maven.compiler.source>
<maven.compiler.target>8</maven.compiler.target> <maven.compiler.target>8</maven.compiler.target>
<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding> <project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
<!-- <failOnMissingWebXml>false</failOnMissingWebXml>-->
</properties> </properties>
<!-- <packaging>war</packaging>-->
<dependencies> <dependencies>
<!--oracle数据源--> <!--oracle数据源-->
@@ -30,4 +32,46 @@
</dependency> </dependency>
</dependencies> </dependencies>
<build>
<finalName>oracle</finalName>
<plugins>
<plugin>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-maven-plugin</artifactId>
<executions>
<execution>
<phase>package</phase>
<goals>
<goal>repackage</goal>
</goals>
</execution>
</executions>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-compiler-plugin</artifactId>
<version>3.8.1</version>
<configuration>
<source>1.8</source>
<target>1.8</target>
<skip>true</skip>
</configuration>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-source-plugin</artifactId>
<version>2.2.1</version>
<executions>
<execution>
<id>attach-sources</id>
<goals>
<goal>jar-no-fork</goal>
</goals>
</execution>
</executions>
</plugin>
</plugins>
</build>
</project> </project>

View File

@@ -56,14 +56,20 @@ spring:
strict: false strict: false
datasource: datasource:
master: master:
url: jdbc:oracle:thin:@192.168.1.51:1521:pqsbase url: jdbc:oracle:thin:@10.118.135.139:1521:pqsbase
username: pqsadmin_hn # url: jdbc:oracle:thin:@192.168.1.51:1521:pqsbase
username: pqsbase_xcs
password: pqsadmin password: pqsadmin
# username: pqsadmin_hn
# password: pqsadmin
driver-class-name: oracle.jdbc.driver.OracleDriver driver-class-name: oracle.jdbc.driver.OracleDriver
target: target:
url: jdbc:oracle:thin:@192.168.1.51:1521:pqsbase url: jdbc:oracle:thin:@10.118.135.129:1521:pqsbase
# url: jdbc:oracle:thin:@192.168.1.51:1521:pqsbase
username: pqsadmin username: pqsadmin
password: Pqsadmin123 password: Pqsadmin123
# username: pqsadmin
# password: Pqsadmin123
driver-class-name: oracle.jdbc.driver.OracleDriver driver-class-name: oracle.jdbc.driver.OracleDriver
#mybatis配置信息 #mybatis配置信息
mybatis-plus: mybatis-plus:
@@ -84,11 +90,12 @@ mybatis-plus:
#分片次数一定为24的约数1 2 3 4 6 8 12 24 #分片次数一定为24的约数1 2 3 4 6 8 12 24
business: business:
slice: 4 slice: 12
#配置job相关参数startime>endtime;重最近数据往历史数据慢慢迁移 #配置job相关参数startime>endtime;重最近数据往历史数据慢慢迁移
job: job:
startime: 2024-01-18 startime: 2024-01-18
endtime: 2023-01-01 endtime: 2022-01-01
#每天执行数据量(天数大于1 #每天执行数据量(天数大于1
slice: 2 slice: 2

View File

@@ -10,6 +10,7 @@
<module>oracle-data</module> <module>oracle-data</module>
<module>mysql-data</module> <module>mysql-data</module>
<module>influx-data</module> <module>influx-data</module>
<module>manufact_influx_data</module>
</modules> </modules>
<packaging>pom</packaging> <packaging>pom</packaging>
<name>灿能数据迁移系统</name> <name>灿能数据迁移系统</name>