Merge remote-tracking branch 'origin/master'

This commit is contained in:
hekaiyu 2025-10-23 10:20:41 +08:00
commit 3dcf5db691
43 changed files with 1822 additions and 1017 deletions

View File

@ -1,21 +1,19 @@
package org.jeecg.common.constant.enums;
import java.time.Instant;
/**
* Prometheus 范围查询条件枚举
*/
public enum PrometheusQueryTypeEnum {
public enum PrometheusDBQueryTypeEnum {
FIVE_MIN("Last 5 minutes",(5*60),"15s"),
THIRTY_MIN("Last 30 minutes",(30*60),"15s"),
ONE_HOUR("Last 1 hour",(60*60),"15s"),
THREE_HOURS("Last 3 hours",(3*60*60),"15s"),
SIX_HOURS("Last 6 hours",(6*60*60),"15s"),
TWELVE_HOURS("Last 12 hours",(12*60*60),"1m0s"),
TWENTY_FOUR_HOURS("Last 24 hours",(24*60*60),"2m0s"),
TWO_DAYS("Last 2 days",(2*24*60*60),"5m0s"),
SEVEN_DAYS("Last 7 days",(7*24*60*60),"15m0s");
FIVE_MIN("Last 5 minutes",(5*60),"30s"),
THIRTY_MIN("Last 30 minutes",(30*60),"30s"),
ONE_HOUR("Last 1 hour",(60*60),"30s"),
THREE_HOURS("Last 3 hours",(3*60*60),"40s"),
SIX_HOURS("Last 6 hours",(6*60*60),"2m0s"),
TWELVE_HOURS("Last 12 hours",(12*60*60),"4m0s"),
TWENTY_FOUR_HOURS("Last 24 hours",(24*60*60),"4m0s"),
TWO_DAYS("Last 2 days",(2*24*60*60),"10m0s"),
SEVEN_DAYS("Last 7 days",(7*24*60*60),"40m0s");
/**
* Prometheus 范围查询条件
@ -30,7 +28,7 @@ public enum PrometheusQueryTypeEnum {
*/
private String step;
PrometheusQueryTypeEnum(String conditions, Integer number, String step) {
PrometheusDBQueryTypeEnum(String conditions, Integer number, String step) {
this.conditions = conditions;
this.lastSecond = number;
this.step = step;
@ -53,8 +51,8 @@ public enum PrometheusQueryTypeEnum {
* @param conditions
* @return
*/
public static PrometheusQueryTypeEnum getQueryTypeEnum(String conditions) {
for (PrometheusQueryTypeEnum queryTypeEnum : PrometheusQueryTypeEnum.values()) {
public static PrometheusDBQueryTypeEnum getQueryTypeEnum(String conditions) {
for (PrometheusDBQueryTypeEnum queryTypeEnum : PrometheusDBQueryTypeEnum.values()) {
if (queryTypeEnum.getConditions().equals(conditions)) {
return queryTypeEnum;
}

View File

@ -0,0 +1,72 @@
package org.jeecg.common.constant.enums;
/**
* Prometheus 范围查询条件枚举
*/
public enum PrometheusHostQueryTypeEnum {
FIVE_MIN("Last 5 minutes",(5*60),"15s","1m0s"),
THIRTY_MIN("Last 30 minutes",(30*60),"15s","1m0s"),
ONE_HOUR("Last 1 hour",(60*60),"15s","1m0s"),
THREE_HOURS("Last 3 hours",(3*60*60),"15s","1m0s"),
SIX_HOURS("Last 6 hours",(6*60*60),"15s","1m0s"),
TWELVE_HOURS("Last 12 hours",(12*60*60),"1m0s","1m15s"),
TWENTY_FOUR_HOURS("Last 24 hours",(24*60*60),"2m0s","2m15s"),
TWO_DAYS("Last 2 days",(2*24*60*60),"5m0s","2m15s"),
SEVEN_DAYS("Last 7 days",(7*24*60*60),"15m0s","15m15s");
/**
* Prometheus 范围查询条件
*/
private String conditions;
/**
* Prometheus 范围起始时间需要减的值
*/
private long lastSecond;
/**
* 间隔步长
*/
private String step;
/**
* 范围向量的时间窗口
*/
private String exprTime;
PrometheusHostQueryTypeEnum(String conditions, Integer number, String step,String exprTime) {
this.conditions = conditions;
this.lastSecond = number;
this.step = step;
this.exprTime = exprTime;
}
public String getConditions() {
return conditions;
}
public long getLastSecond() {
return lastSecond;
}
public String getStep() {
return step;
}
public String getExprTime() {
return exprTime;
}
/**
* 返回对应枚
* @param conditions
* @return
*/
public static PrometheusHostQueryTypeEnum getQueryTypeEnum(String conditions) {
for (PrometheusHostQueryTypeEnum queryTypeEnum : PrometheusHostQueryTypeEnum.values()) {
if (queryTypeEnum.getConditions().equals(conditions)) {
return queryTypeEnum;
}
}
return null;
}
}

View File

@ -23,4 +23,34 @@ public class PrometheusServerProperties {
* 监测的网卡名称
*/
private String networkCardName;
/**
* pg数据库主机
*/
private String pgHost;
/**
* postgresql数据库端口
*/
private Integer pgDBPort;
/**
* postgresql管理员用户名称
*/
private String pgAdmin;
/**
* postgresql管理员用户密码
*/
private String pgAdminPwd;
/**
* 对于活跃连接数监控的哪些数据库
*/
private String monitorDBNames;
/**
* 数据库实例地址
*/
private String dbInstance;
}

View File

@ -31,14 +31,13 @@ public class SourceRebuildMonitoringData implements Serializable {
/**
* 任务ID
*/
@Excel(name = "任务主键", width = 10,height = 20,orderNum="0")
@TableField(value = "task_id")
private Integer taskId;
/**
* 台站名称
*/
@Excel(name = "台站名称", width = 20,height = 20,orderNum="1")
@Excel(name = "台站名称", width = 20,height = 20,orderNum="0")
@NotBlank(message = "台站名称不能为空", groups = {InsertGroup.class, UpdateGroup.class})
@TableField(value = "station")
private String station;
@ -46,7 +45,7 @@ public class SourceRebuildMonitoringData implements Serializable {
/**
* 核素名称
*/
@Excel(name = "核素名称", width = 20,height = 20,orderNum="2")
@Excel(name = "核素名称", width = 20,height = 20,orderNum="1")
@NotBlank(message = "核素名称不能为空", groups = {InsertGroup.class, UpdateGroup.class})
@TableField(value = "nuclide")
private String nuclide;
@ -54,7 +53,7 @@ public class SourceRebuildMonitoringData implements Serializable {
/**
* 测量停止时间
*/
@Excel(name = "测量停止时间", width = 25,height = 20,format = "yyyy-MM-dd HH:mm:ss",orderNum="3")
@Excel(name = "测量停止时间", width = 25,height = 20,format = "yyyy-MM-dd HH:mm:ss",orderNum="2")
@NotNull(message = "测量停止时间不能为空", groups = {InsertGroup.class, UpdateGroup.class})
@TableField(value = "collect_stop")
@JsonFormat(timezone = "GMT+8", pattern = "yyyy-MM-dd HH:mm:ss")
@ -63,7 +62,7 @@ public class SourceRebuildMonitoringData implements Serializable {
/**
* 活度浓度
*/
@Excel(name = "活度浓度", width = 25,height = 20,orderNum="4")
@Excel(name = "活度浓度", width = 25,height = 20,orderNum="3")
@NotBlank(message = "活度浓度不能为空", groups = {InsertGroup.class, UpdateGroup.class})
@TableField(value = "activity")
private String activity;
@ -71,7 +70,7 @@ public class SourceRebuildMonitoringData implements Serializable {
/**
* 不确定度
*/
@Excel(name = "不确定度", width = 25,height = 20,orderNum="5")
@Excel(name = "不确定度", width = 25,height = 20,orderNum="4")
@NotBlank(message = "不确定度不能为空", groups = {InsertGroup.class, UpdateGroup.class})
@TableField(value = "uncertainty")
private String uncertainty;
@ -79,7 +78,7 @@ public class SourceRebuildMonitoringData implements Serializable {
/**
* mdc
*/
@Excel(name = "MDC", width = 25,height = 20,orderNum="6")
@Excel(name = "MDC", width = 25,height = 20,orderNum="5")
@NotBlank(message = "mdc不能为空", groups = {InsertGroup.class, UpdateGroup.class})
@TableField(value = "mdc")
private String mdc;

View File

@ -5,6 +5,8 @@ import io.swagger.annotations.Api;
import io.swagger.annotations.ApiOperation;
import org.jeecg.common.api.vo.Result;
import org.jeecg.common.util.DateUtils;
import org.jeecg.entity.GardsStations;
import org.jeecg.entity.SysDefaultNuclide;
import org.jeecg.service.ISampleStatAnalysisService;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.format.annotation.DateTimeFormat;
@ -14,7 +16,9 @@ import org.springframework.web.bind.annotation.RequestParam;
import org.springframework.web.bind.annotation.RestController;
import java.time.LocalDate;
import java.util.ArrayList;
import java.util.Date;
import java.util.List;
import java.util.Objects;
@RestController
@ -127,4 +131,33 @@ public class DataAnalysisController {
}
@GetMapping("/findStationList")
public Result findStationList(String systemType) {
Result result = new Result();
try {
List<GardsStations> gardsStations = sampleStatAnalysisService.findStationListByMenuName(systemType);
result.setCode(200);
result.setSuccess(true);
result.setResult(gardsStations);
} catch (Exception e) {
result.setCode(500);
result.setSuccess(false);
}
return result;
}
@GetMapping("/findNuclideList")
public Result findNuclideList(String systemType) {
Result result = new Result();
try {
List<SysDefaultNuclide> defaultNuclides = sampleStatAnalysisService.findNuclideList(systemType);
result.setCode(200);
result.setSuccess(true);
result.setResult(defaultNuclides);
} catch (Exception e) {
result.setCode(500);
result.setSuccess(false);
}
return result;
}
}

View File

@ -0,0 +1,103 @@
package org.jeecg.entity;
import com.baomidou.mybatisplus.annotation.IdType;
import com.baomidou.mybatisplus.annotation.TableField;
import com.baomidou.mybatisplus.annotation.TableId;
import com.baomidou.mybatisplus.annotation.TableName;
import com.fasterxml.jackson.annotation.JsonFormat;
import lombok.Data;
import org.springframework.format.annotation.DateTimeFormat;
import java.io.Serializable;
import java.util.Date;
@Data
@TableName(value = "CONFIGURATION.GARDS_STATIONS")
public class GardsStations implements Serializable {
/**
* 台站id
*/
@TableId(type = IdType.INPUT)
private Integer stationId;
/**
* 台站编码
*/
@TableField(value = "STATION_CODE")
private String stationCode;
/**
* 城市编码
*/
@TableField(value = "COUNTRY_CODE")
private String countryCode;
/**
* 台站类型
*/
@TableField(value = "TYPE")
private String type;
/**
* 经度
*/
@TableField(value = "LON")
private Double lon;
/**
* 纬度
*/
@TableField(value = "LAT")
private Double lat;
/**
* 海拔
*/
@TableField(value = "ELEVATION")
private Double elevation;
/**
* 描述
*/
@TableField(value = "DESCRIPTION")
private String description;
/**
* 开始运行日期
*/
@TableField(value = "DATE_BEGIN")
@DateTimeFormat(pattern = "yyyy-MM-dd")
@JsonFormat(pattern = "yyyy-MM-dd", timezone = "GMT+8")
private Date dateBegin;
/**
* 运行终止日期
*/
@TableField(value = "DATE_END")
@DateTimeFormat(pattern = "yyyy-MM-dd")
@JsonFormat(pattern = "yyyy-MM-dd", timezone = "GMT+8")
private Date dateEnd;
/**
* 运行状态
*/
@TableField(value = "STATUS")
private String status;
/**
* 操作时间
*/
@TableField(value = "MODDATE")
@DateTimeFormat(pattern = "yyyy-MM-dd HH:mm:ss")
@JsonFormat(pattern = "yyyy-MM-dd HH:mm:ss", timezone = "GMT+8")
private Date moddate;
@TableField(value = "CATEGORY")
private Integer category;
/**
* 有效率计算类型
*/
@TableField(value = "EFFIC_CALCUL_TYPE")
private String efficCalculType;
}

View File

@ -13,6 +13,10 @@ public class NuclideActConcIntvl {
*
*/
private Integer sampleId;
/**
* 级别
*/
private Integer category;
/**样品类型
* 系统类型P : particulate; B :gas with 3-D β-γ coincidence detection; G :all other gas systems (high-resolution

View File

@ -0,0 +1,61 @@
package org.jeecg.entity;
import com.baomidou.mybatisplus.annotation.IdType;
import com.baomidou.mybatisplus.annotation.TableId;
import com.fasterxml.jackson.annotation.JsonFormat;
import lombok.Data;
import lombok.EqualsAndHashCode;
import lombok.experimental.Accessors;
import org.jeecgframework.poi.excel.annotation.Excel;
import org.springframework.format.annotation.DateTimeFormat;
import java.io.Serializable;
import java.util.Date;
@Data
@EqualsAndHashCode(callSuper = false)
@Accessors(chain = true)
public class SysDefaultNuclide implements Serializable {
private static final long serialVersionUID = 1L;
/**
* id
*/
@TableId(type = IdType.ASSIGN_ID)
private String id;
/**
* 核素名称
*/
@Excel(name = "核素名称", width = 15)
private String nuclideName;
/**
* 核素用途
*/
@Excel(name = "核素用途", width = 15)
private Integer useType;
/**
* 核素类型
*/
@Excel(name = "核素类型", width = 8)
private String nuclideType;
/**
* 创建时间
*/
@JsonFormat(timezone = "GMT+8", pattern = "yyyy-MM-dd HH:mm:ss")
@DateTimeFormat(pattern = "yyyy-MM-dd HH:mm:ss")
private Date createTime;
/**
* 更新人
*/
private String createBy;
}

View File

@ -0,0 +1,27 @@
package org.jeecg.entity;
public enum SystemType {
/**
* 颗粒物
*/
PARTICULATE("P"),
/**
* β-γ
*/
BETA("B"),
/**
* γ
*/
GAMMA("G");
private String type;
SystemType(String type) {
this.type = type;
}
public String getType(){
return this.type;
}
}

View File

@ -55,7 +55,7 @@ public interface GardsSampleStatAnalysisMapper extends BaseMapper<GardsSampleDat
* @param endTime 结束时间
* @return 返回List<NuclideActConcIntvl>
*/
List<NuclideActConcIntvl> getRnautoPNuclideActConcIntvl(String sampleType,String station,String nuclideName, @Param("startTime") String startTime, @Param("endTime") String endTime);
List<NuclideActConcIntvl> getRnautoPNuclideActConcIntvl(String sampleType, String station, String nuclideName, @Param("startTime") String startTime, @Param("endTime") String endTime);
/*** 获取样品中元素的浓度活度MDC信息
* 查询RNAUTO.GARDS_XE_RESULTS中的活度浓度MDC信息
@ -64,7 +64,7 @@ public interface GardsSampleStatAnalysisMapper extends BaseMapper<GardsSampleDat
* @param endTime
* @return
*/
List<NuclideActConcIntvl> getRnautoNuclideActConcIntvl(String sampleType,String station,String nuclideName, @Param("startTime") String startTime, @Param("endTime") String endTime);
List<NuclideActConcIntvl> getRnautoNuclideActConcIntvl(String sampleType, String station, String nuclideName, @Param("startTime") String startTime, @Param("endTime") String endTime);
//endregion
@ -78,7 +78,7 @@ public interface GardsSampleStatAnalysisMapper extends BaseMapper<GardsSampleDat
* @param endTime 结束时间
* @return 返回List<NuclideActConcIntvl>
*/
List<NuclideActConcIntvl> getRnmanPNuclideActConcIntvl(String sampleType,String station,String nuclideName, @Param("startTime") String startTime, @Param("endTime") String endTime);
List<NuclideActConcIntvl> getRnmanPNuclideActConcIntvl(String sampleType, String station, String nuclideName, @Param("startTime") String startTime, @Param("endTime") String endTime);
/*** 获取样品中元素的浓度活度MDC信息
* 查询RNAUTO.GARDS_XE_RESULTS中的活度浓度MDC信息
@ -87,7 +87,7 @@ public interface GardsSampleStatAnalysisMapper extends BaseMapper<GardsSampleDat
* @param endTime
* @return
*/
List<NuclideActConcIntvl> getRnmanNuclideActConcIntvl(String sampleType,String station,String nuclideName, @Param("startTime") String startTime, @Param("endTime") String endTime);
List<NuclideActConcIntvl> getRnmanNuclideActConcIntvl(String sampleType, String station, String nuclideName, @Param("startTime") String startTime, @Param("endTime") String endTime);
//endregion
@ -119,10 +119,10 @@ public interface GardsSampleStatAnalysisMapper extends BaseMapper<GardsSampleDat
List<SampleLevelData> getRnManNuclideTimeSeriesAnalysis(String station, @Param("startTime") String startTime, @Param("endTime") String endTime);
/**
* 精确查询单个站点 + 单个核素
* @param stationId 站点ID (必填)
*
* @param stationId 站点ID (必填)
* @param nuclideName 核素名称 (必填)
* @return 匹配的记录列表
*/
@ -136,7 +136,8 @@ public interface GardsSampleStatAnalysisMapper extends BaseMapper<GardsSampleDat
/**
* 多站点 + 多核素查询
* @param stationIds 站点ID集合 (非空)
*
* @param stationIds 站点ID集合 (非空)
* @param nuclideNames 核素名称集合 (非空)
* @return 匹配的记录列表
*/
@ -150,7 +151,8 @@ public interface GardsSampleStatAnalysisMapper extends BaseMapper<GardsSampleDat
/**
* 多站点 + 单核素查询
* @param stationIds 站点ID集合 (非空)
*
* @param stationIds 站点ID集合 (非空)
* @param nuclideName 单个核素名称 (必填)
* @return 匹配的记录列表
*/
@ -164,7 +166,8 @@ public interface GardsSampleStatAnalysisMapper extends BaseMapper<GardsSampleDat
/**
* 单站点 + 多核素查询
* @param stationId 单个站点ID (必填)
*
* @param stationId 单个站点ID (必填)
* @param nuclideNames 核素名称集合 (非空)
* @return 匹配的记录列表
*/
@ -179,10 +182,11 @@ public interface GardsSampleStatAnalysisMapper extends BaseMapper<GardsSampleDat
/**
* 动态条件查询所有参数均可为空
* @param stationIds 站点ID集合 (可选)
*
* @param stationIds 站点ID集合 (可选)
* @param nuclideNames 核素名称集合 (可选)
* @param startTime 开始时间 (可选)
* @param endTime 结束时间 (可选)
* @param startTime 开始时间 (可选)
* @param endTime 结束时间 (可选)
* @return 匹配的记录列表
*/
List<GardsThresholdResultHis> selectByCondition(
@ -194,32 +198,24 @@ public interface GardsSampleStatAnalysisMapper extends BaseMapper<GardsSampleDat
);
//endregion
//region 核素活度浓度对比分析
List<NuclideActConcIntvl> getRnAutoAnalyzeNuclideActivityConc(String sampleType, String nuclideName, @Param("stationIds") Integer[] stationIds, @Param("startTime") String startTime, @Param("endTime") String endTime);
List<NuclideActConcIntvl> getRnAutoAnalyzeNuclideActivityConc(@Param("sampleType")String sampleType, @Param("nuclideName")String nuclideName, @Param("stationIds") Integer[] stationIds, @Param("startTime") String startTime, @Param("endTime") String endTime);
List<NuclideActConcIntvl> getRnManAnalyzeNuclideActivityConc(String sampleType, String nuclideName, @Param("stationIds") Integer[] stationIds, @Param("startTime") String startTime, @Param("endTime") String endTime);
List<NuclideActConcIntvl> getRnManAnalyzeNuclideActivityConc(@Param("sampleType")String sampleType, @Param("nuclideName")String nuclideName, @Param("stationIds") Integer[] stationIds, @Param("startTime") String startTime, @Param("endTime") String endTime);
//endregion
//region 样品监测结果
List<StationInfoData> getRnAutoSampleResult(String sampleType, @Param("startTime") String startTime, @Param("endTime") String endTime);
List<StationInfoData> getRnAutoSampleResult( String sampleType, @Param("startTime") String startTime, @Param("endTime") String endTime);
List<StationInfoData> getRnManSampleResult(String sampleType, @Param("startTime") String startTime, @Param("endTime") String endTime);
//endregion
//region 查询台站信息
List<GardsStations> findStationListByMenuName(@Param("systemType")String systemType);
//endregion
}

View File

@ -0,0 +1,9 @@
package org.jeecg.mapper;
import com.baomidou.mybatisplus.core.mapper.BaseMapper;
import org.apache.ibatis.annotations.Mapper;
import org.jeecg.entity.SysDefaultNuclide;
@Mapper
public interface SysDefaultNuclideMapper extends BaseMapper<SysDefaultNuclide>
{
}

View File

@ -2,19 +2,15 @@
<!DOCTYPE mapper PUBLIC "-//mybatis.org//DTD Mapper 3.0//EN" "http://mybatis.org/dtd/mybatis-3-mapper.dtd">
<mapper namespace="org.jeecg.mapper.GardsSampleStatAnalysisMapper">
<select id="getSampleStatAnalysis" resultType="org.jeecg.modules.base.entity.original.GardsSampleData">
SELECT
SAMPLE_ID,
STATION_ID,
ACQUISITION_START,
GRADING
FROM
ORIGINAL.GARDS_SAMPLE_DATA
WHERE
STATION_CODE = #{ stationCode }
AND ACQUISITION_START BETWEEN TO_DATE(#{ startTime }, 'YYYY-MM-DD HH24:MI:SS')
AND TO_DATE(#{ endTime }, 'YYYY-MM-DD HH24:MI:SS')
ORDER BY
ACQUISITION_START
SELECT SAMPLE_ID,
STATION_ID,
ACQUISITION_START,
GRADING
FROM ORIGINAL.GARDS_SAMPLE_DATA
WHERE STATION_CODE = #{ stationCode }
AND ACQUISITION_START BETWEEN TO_DATE(#{ startTime }, 'YYYY-MM-DD HH24:MI:SS')
AND TO_DATE(#{ endTime }, 'YYYY-MM-DD HH24:MI:SS')
ORDER BY ACQUISITION_START
</select>
<select id="selectByStationIds" resultType="org.jeecg.entity.ThresholdMetric">
SELECT
@ -36,38 +32,32 @@
</select>
<select id="getRnAutoSampleGradeAnalysis" resultType="org.jeecg.entity.SampleLevelData">
SELECT
t1.SAMPLE_ID,
t1.COLLECT_STOP,
t1.STATION_ID,
t2.CATEGORY
FROM
ORIGINAL.GARDS_SAMPLE_DATA t1
LEFT JOIN RNAUTO.GARDS_ANALYSES t2
ON t1.SAMPLE_ID = t2.SAMPLE_ID
WHERE
t1.STATION_ID=#{station}
AND t1.SAMPLE_TYPE=#{sampleType}
AND t1.COLLECT_STOP BETWEEN TO_DATE(#{startTime}, 'YYYY-MM-DD hh24:mi:ss')
AND TO_DATE(#{endTime}, 'YYYY-MM-DD hh24:mi:ss')
SELECT t1.SAMPLE_ID,
t1.COLLECT_STOP,
t1.STATION_ID,
t2.CATEGORY
FROM ORIGINAL.GARDS_SAMPLE_DATA t1
LEFT JOIN RNAUTO.GARDS_ANALYSES t2
ON t1.SAMPLE_ID = t2.SAMPLE_ID
WHERE t1.STATION_ID = #{station}
AND t1.SAMPLE_TYPE = #{sampleType}
AND t1.COLLECT_STOP BETWEEN TO_DATE(#{startTime}, 'YYYY-MM-DD hh24:mi:ss')
AND TO_DATE(#{endTime}, 'YYYY-MM-DD hh24:mi:ss')
</select>
<select id="getRnManSampleGradeAnalysis" resultType="org.jeecg.entity.SampleLevelData">
SELECT
t1.SAMPLE_ID,
t1.COLLECT_STOP,
t1.STATION_ID,
t2.CATEGORY
FROM
ORIGINAL.GARDS_SAMPLE_DATA t1
LEFT JOIN RNMAN.GARDS_ANALYSES t2
ON t1.SAMPLE_ID = t2.SAMPLE_ID
WHERE
t1.STATION_ID=#{station}
AND t1.SAMPLE_TYPE=#{sampleType}
AND t1.COLLECT_STOP BETWEEN TO_DATE(#{startTime}, 'YYYY-MM-DD hh24:mi:ss')
AND TO_DATE(#{endTime}, 'YYYY-MM-DD hh24:mi:ss')
SELECT t1.SAMPLE_ID,
t1.COLLECT_STOP,
t1.STATION_ID,
t2.CATEGORY
FROM ORIGINAL.GARDS_SAMPLE_DATA t1
LEFT JOIN RNMAN.GARDS_ANALYSES t2
ON t1.SAMPLE_ID = t2.SAMPLE_ID
WHERE t1.STATION_ID = #{station}
AND t1.SAMPLE_TYPE = #{sampleType}
AND t1.COLLECT_STOP BETWEEN TO_DATE(#{startTime}, 'YYYY-MM-DD hh24:mi:ss')
AND TO_DATE(#{endTime}, 'YYYY-MM-DD hh24:mi:ss')
</select>
@ -77,131 +67,121 @@
<select id="getRnAutoSampleLevel" resultType="org.jeecg.entity.SampleLevelData">
select a.SAMPLE_ID,
a.COLLECT_STOP,
b.CATEGORY
from
ORIGINAL.GARDS_SAMPLE_DATA a,
RNAUTO.GARDS_ANALYSES b
WHERE a.SAMPLE_ID=b.SAMPLE_ID
AND a.STATION_ID=#{station}
AND a.COLLECT_STOP BETWEEN TO_DATE(#{startTime}, 'YYYY-MM-DD hh24:mi:ss')
AND TO_DATE(#{endTime}, 'YYYY-MM-DD hh24:mi:ss')
a.COLLECT_STOP,
b.CATEGORY
from ORIGINAL.GARDS_SAMPLE_DATA a,
RNAUTO.GARDS_ANALYSES b
WHERE a.SAMPLE_ID = b.SAMPLE_ID
AND a.STATION_ID = #{station}
AND a.COLLECT_STOP BETWEEN TO_DATE(#{startTime}, 'YYYY-MM-DD hh24:mi:ss')
AND TO_DATE(#{endTime}, 'YYYY-MM-DD hh24:mi:ss')
</select>
<select id="getRnManSampleLevel" resultType="org.jeecg.entity.SampleLevelData">
select a.SAMPLE_ID,
a.COLLECT_STOP,
b.CATEGORY
from
ORIGINAL.GARDS_SAMPLE_DATA a,
RNMAN.GARDS_ANALYSES b
WHERE a.SAMPLE_ID=b.SAMPLE_ID
AND a.STATION_ID=#{station}
AND a.COLLECT_STOP BETWEEN TO_DATE(#{startTime}, 'YYYY-MM-DD hh24:mi:ss')
AND TO_DATE(#{endTime}, 'YYYY-MM-DD hh24:mi:ss')
a.COLLECT_STOP,
b.CATEGORY
from ORIGINAL.GARDS_SAMPLE_DATA a,
RNMAN.GARDS_ANALYSES b
WHERE a.SAMPLE_ID = b.SAMPLE_ID
AND a.STATION_ID = #{station}
AND a.COLLECT_STOP BETWEEN TO_DATE(#{startTime}, 'YYYY-MM-DD hh24:mi:ss')
AND TO_DATE(#{endTime}, 'YYYY-MM-DD hh24:mi:ss')
</select>
<!-- 获取样品中元素的浓度活度、MDC信息-->
<select id="getRnautoPNuclideActConcIntvl" resultType="org.jeecg.entity.NuclideActConcIntvl">
SELECT
a.SAMPLE_ID,
a.COLLECT_STOP,
a.SAMPLE_TYPE,
a.STATION_ID,
a.STATUS,
b.NUCLIDENAME NUCLIDE_NAME,
b.CONCENTRATION AS conc,
b.MDC,
b.ACTIV_KEY_ERR AS concErr
FROM
ORIGINAL.GARDS_SAMPLE_DATA a
INNER JOIN
RNAUTO.GARDS_NUCL_IDED b
ON a.SAMPLE_ID = b.SAMPLE_ID
WHERE
a.SAMPLE_TYPE = #{sampleType}
AND a.STATION_ID=#{station}
AND b.NUCLIDENAME=#{nuclideName}
AND a.COLLECT_START BETWEEN TO_DATE(#{startTime}, 'YYYY-MM-DD hh24:mi:ss')
AND TO_DATE(#{endTime}, 'YYYY-MM-DD hh24:mi:ss')
SELECT a.SAMPLE_ID,
a.COLLECT_STOP,
a.SAMPLE_TYPE,
a.STATION_ID,
a.STATUS,
c.CATEGORY,
b.NUCLIDENAME NUCLIDE_NAME,
b.CONCENTRATION AS conc,
b.MDC,
b.ACTIV_KEY_ERR AS concErr
FROM ORIGINAL.GARDS_SAMPLE_DATA a
INNER JOIN
RNAUTO.GARDS_ANALYSES c
ON a.SAMPLE_ID = c.SAMPLE_ID
INNER JOIN
RNAUTO.GARDS_NUCL_IDED b
ON a.SAMPLE_ID = b.SAMPLE_ID
WHERE a.SAMPLE_TYPE = #{sampleType}
AND a.STATION_ID = #{station}
AND b.NUCLIDENAME = #{nuclideName}
AND a.COLLECT_START BETWEEN TO_DATE(#{startTime}, 'YYYY-MM-DD hh24:mi:ss')
AND TO_DATE(#{endTime}, 'YYYY-MM-DD hh24:mi:ss')
</select>
<!-- 获取样品类型=气体 元素的浓度活度、MDC信息-->
<select id="getRnautoNuclideActConcIntvl" resultType="org.jeecg.entity.NuclideActConcIntvl">
SELECT
a.SAMPLE_ID,
a.COLLECT_STOP,
a.SAMPLE_TYPE,
a.STATION_ID,
a.STATUS,
b.NUCLIDE_NAME,
b.CONC,
b.MDC,
b.CONC_ERR
FROM
ORIGINAL.GARDS_SAMPLE_DATA a
INNER JOIN
RNAUTO.GARDS_XE_RESULTS b
ON a.SAMPLE_ID = b.SAMPLE_ID
WHERE
a.SAMPLE_TYPE = '#{sampleType}'
AND a.STATION_ID=#{station}
AND b.NUCLIDENAME=#{nuclideName}
AND a.COLLECT_START BETWEEN TO_DATE(#{startTime}, 'YYYY-MM-DD hh24:mi:ss')
AND TO_DATE(#{endTime}, 'YYYY-MM-DD hh24:mi:ss')
SELECT a.SAMPLE_ID,
a.COLLECT_STOP,
a.SAMPLE_TYPE,
a.STATION_ID,
a.STATUS,
b.NUCLIDE_NAME,
b.CONC,
b.MDC,
b.CONC_ERR
FROM ORIGINAL.GARDS_SAMPLE_DATA a
INNER JOIN
RNAUTO.GARDS_XE_RESULTS b
ON a.SAMPLE_ID = b.SAMPLE_ID
WHERE a.SAMPLE_TYPE = '#{sampleType}'
AND a.STATION_ID = #{station}
AND b.NUCLIDENAME = #{nuclideName}
AND a.COLLECT_START BETWEEN TO_DATE(#{startTime}, 'YYYY-MM-DD hh24:mi:ss')
AND TO_DATE(#{endTime}, 'YYYY-MM-DD hh24:mi:ss')
</select>
<!-- 数据源 RNMAN-->
<!-- 获取样品中元素的浓度活度、MDC信息-->
<select id="getRnmanPNuclideActConcIntvl" resultType="org.jeecg.entity.NuclideActConcIntvl">
SELECT
a.SAMPLE_ID,
a.COLLECT_STOP,
a.SAMPLE_TYPE,
a.STATION_ID,
a.STATUS,
b.NUCLIDENAME NUCLIDE_NAME,
b.CONCENTRATION AS conc,
b.MDC,
b.ACTIV_KEY_ERR AS concErr
FROM
ORIGINAL.GARDS_SAMPLE_DATA a
INNER JOIN
RNMAN.GARDS_NUCL_IDED b
ON a.SAMPLE_ID = b.SAMPLE_ID
WHERE
a.SAMPLE_TYPE = '#{sampleType}'
AND a.STATION_ID=#{station}
AND b.NUCLIDENAME=#{nuclideName}
AND a.COLLECT_START BETWEEN TO_DATE(#{startTime}, 'YYYY-MM-DD hh24:mi:ss')
AND TO_DATE(#{endTime}, 'YYYY-MM-DD hh24:mi:ss')
SELECT a.SAMPLE_ID,
a.COLLECT_STOP,
a.SAMPLE_TYPE,
a.STATION_ID,
a.STATUS,
b.NUCLIDENAME NUCLIDE_NAME,
b.CONCENTRATION AS conc,
b.MDC,
b.ACTIV_KEY_ERR AS concErr
FROM ORIGINAL.GARDS_SAMPLE_DATA a
INNER JOIN
RNMAN.GARDS_NUCL_IDED b
ON a.SAMPLE_ID = b.SAMPLE_ID
WHERE a.SAMPLE_TYPE = '#{sampleType}'
AND a.STATION_ID = #{station}
AND b.NUCLIDENAME = #{nuclideName}
AND a.COLLECT_START BETWEEN TO_DATE(#{startTime}, 'YYYY-MM-DD hh24:mi:ss')
AND TO_DATE(#{endTime}, 'YYYY-MM-DD hh24:mi:ss')
</select>
<!-- 获取样品类型=气体 元素的浓度活度、MDC信息-->
<select id="getRnmanNuclideActConcIntvl" resultType="org.jeecg.entity.NuclideActConcIntvl">
SELECT
a.SAMPLE_ID,
a.COLLECT_STOP,
a.SAMPLE_TYPE,
a.STATION_ID,
a.STATUS,
b.NUCLIDE_NAME,
b.CONC,
b.MDC,
b.CONC_ERR
FROM
ORIGINAL.GARDS_SAMPLE_DATA a
INNER JOIN
RNMAN.GARDS_XE_RESULTS b
ON a.SAMPLE_ID = b.SAMPLE_ID
WHERE
a.SAMPLE_TYPE = '#{sampleType}'
AND a.STATION_ID=#{station}
AND b.NUCLIDENAME=#{nuclideName}
AND a.COLLECT_START BETWEEN TO_DATE(#{startTime}, 'YYYY-MM-DD hh24:mi:ss')
AND TO_DATE(#{endTime}, 'YYYY-MM-DD hh24:mi:ss')
SELECT a.SAMPLE_ID,
a.COLLECT_STOP,
a.SAMPLE_TYPE,
a.STATION_ID,
a.STATUS,
b.NUCLIDE_NAME,
b.CONC,
b.MDC,
b.CONC_ERR
FROM ORIGINAL.GARDS_SAMPLE_DATA a
INNER JOIN
RNMAN.GARDS_XE_RESULTS b
ON a.SAMPLE_ID = b.SAMPLE_ID
WHERE a.SAMPLE_TYPE = '#{sampleType}'
AND a.STATION_ID = #{station}
AND b.NUCLIDENAME = #{nuclideName}
AND a.COLLECT_START BETWEEN TO_DATE(#{startTime}, 'YYYY-MM-DD hh24:mi:ss')
AND TO_DATE(#{endTime}, 'YYYY-MM-DD hh24:mi:ss')
</select>
@ -241,77 +221,72 @@
AND t1.SAMPLE_ID = t3.SAMPLE_ID
WHERE t1.STATION_ID = #{station}
AND t1.STATUS != 'F'
AND t1.COLLECT_STOP BETWEEN TO_DATE(#{startTime}, 'YYYY-MM-DD hh24:mi:ss')
AND TO_DATE(#{endTime}, 'YYYY-MM-DD hh24:mi:ss')
AND t1.COLLECT_STOP BETWEEN TO_DATE(#{startTime}
, 'YYYY-MM-DD hh24:mi:ss')
AND TO_DATE(#{endTime}
, 'YYYY-MM-DD hh24:mi:ss')
</select>
<!-- RnAuto 核素等级时序分析-->
<select id="getRnAutoNuclideTimeSeriesAnalysis" resultType="org.jeecg.entity.SampleLevelData">
SELECT
A.SAMPLE_ID AS "sampleId",
A.COLLECT_STOP AS "collectStop",
B.CATEGORY AS "grading"
FROM
ORIGINAL.GARDS_SAMPLE_DATA A
INNER JOIN
RNAUTO.GARDS_ANALYSES B
ON A.SAMPLE_ID = B.SAMPLE_ID
WHERE
A.STATION_ID = #{station}
AND A.COLLECT_STOP BETWEEN TO_DATE(#{startTime}, 'YYYY-MM-DD hh24:mi:ss')
AND TO_DATE(#{endTime}, 'YYYY-MM-DD hh24:mi:ss')
SELECT A.SAMPLE_ID AS "sampleId",
A.COLLECT_STOP AS "collectStop",
B.CATEGORY AS "grading"
FROM ORIGINAL.GARDS_SAMPLE_DATA A
INNER JOIN
RNAUTO.GARDS_ANALYSES B
ON A.SAMPLE_ID = B.SAMPLE_ID
WHERE A.STATION_ID = #{station}
AND A.COLLECT_STOP BETWEEN TO_DATE(#{startTime}, 'YYYY-MM-DD hh24:mi:ss')
AND TO_DATE(#{endTime}, 'YYYY-MM-DD hh24:mi:ss')
</select>
<!-- RnMan 获取样品中识别到的核素集合-->
<select id="getRnManIdentifiedNuclides" resultType="org.jeecg.entity.NuclideActConcIntvl">
SELECT
t1.SAMPLE_ID AS sampleId,
t1.COLLECT_STOP AS collectStop,
t1.SAMPLE_TYPE AS sampleType,
t1.STATION_ID AS stationId,
t1.STATUS AS status,
CASE
WHEN t1.SAMPLE_TYPE = 'P' THEN
t2.NUCLIDENAME
WHEN t1.SAMPLE_TYPE = 'B' THEN
t3.NUCLIDE_NAME
END AS NUCLIDE_NAME,
CASE
WHEN t1.SAMPLE_TYPE = 'P' THEN
TO_NUMBER(REGEXP_REPLACE(t2.CONCENTRATION, '[^0-9.Ee-]', ''))
WHEN t1.SAMPLE_TYPE = 'B' THEN
t3.CONC
END AS conc
FROM
ORIGINAL.GARDS_SAMPLE_DATA t1
LEFT JOIN RNMAN.GARDS_NUCL_IDED t2 ON t1.SAMPLE_TYPE = 'P'
AND t1.SAMPLE_ID = t2.SAMPLE_ID
LEFT JOIN RNMAN.GARDS_XE_RESULTS t3 ON t1.SAMPLE_TYPE = 'B'
AND t1.SAMPLE_ID = t3.SAMPLE_ID
WHERE
t1.STATION_ID = #{station}
AND t1.STATUS != 'F'
AND t1.COLLECT_STOP BETWEEN TO_DATE(#{startTime}, 'YYYY-MM-DD hh24:mi:ss')
AND TO_DATE(#{endTime}, 'YYYY-MM-DD hh24:mi:ss')
SELECT t1.SAMPLE_ID AS sampleId,
t1.COLLECT_STOP AS collectStop,
t1.SAMPLE_TYPE AS sampleType,
t1.STATION_ID AS stationId,
t1.STATUS AS status,
CASE
WHEN t1.SAMPLE_TYPE = 'P' THEN
t2.NUCLIDENAME
WHEN t1.SAMPLE_TYPE = 'B' THEN
t3.NUCLIDE_NAME
END AS NUCLIDE_NAME,
CASE
WHEN t1.SAMPLE_TYPE = 'P' THEN
TO_NUMBER(REGEXP_REPLACE(t2.CONCENTRATION, '[^0-9.Ee-]', ''))
WHEN t1.SAMPLE_TYPE = 'B' THEN
t3.CONC
END AS conc
FROM ORIGINAL.GARDS_SAMPLE_DATA t1
LEFT JOIN RNMAN.GARDS_NUCL_IDED t2 ON t1.SAMPLE_TYPE = 'P'
AND t1.SAMPLE_ID = t2.SAMPLE_ID
LEFT JOIN RNMAN.GARDS_XE_RESULTS t3 ON t1.SAMPLE_TYPE = 'B'
AND t1.SAMPLE_ID = t3.SAMPLE_ID
WHERE t1.STATION_ID = #{station}
AND t1.STATUS != 'F'
AND t1.COLLECT_STOP BETWEEN TO_DATE(#{startTime}
, 'YYYY-MM-DD hh24:mi:ss')
AND TO_DATE(#{endTime}
, 'YYYY-MM-DD hh24:mi:ss')
</select>
<!-- RnMan 核素等级时序分析-->
<select id="getRnManNuclideTimeSeriesAnalysis" resultType="org.jeecg.entity.SampleLevelData">
SELECT
A.SAMPLE_ID AS "sampleId",
A.COLLECT_STOP AS "collectStop",
B.CATEGORY AS "grading"
FROM
ORIGINAL.GARDS_SAMPLE_DATA A
INNER JOIN
RNMAN.GARDS_ANALYSES B
ON A.SAMPLE_ID = B.SAMPLE_ID
WHERE
A.STATION_ID = #{station}
AND A.COLLECT_STOP BETWEEN TO_DATE(#{startTime}, 'YYYY-MM-DD hh24:mi:ss')
AND TO_DATE(#{endTime}, 'YYYY-MM-DD hh24:mi:ss')
SELECT A.SAMPLE_ID AS "sampleId",
A.COLLECT_STOP AS "collectStop",
B.CATEGORY AS "grading"
FROM ORIGINAL.GARDS_SAMPLE_DATA A
INNER JOIN
RNMAN.GARDS_ANALYSES B
ON A.SAMPLE_ID = B.SAMPLE_ID
WHERE A.STATION_ID = #{station}
AND A.COLLECT_STOP BETWEEN TO_DATE(#{startTime}, 'YYYY-MM-DD hh24:mi:ss')
AND TO_DATE(#{endTime}, 'YYYY-MM-DD hh24:mi:ss')
</select>
<select id="selectByStationAndNuclide" resultType="org.jeecg.entity.GardsThresholdResultHis">
@ -358,7 +333,8 @@
<foreach collection="nuclideNames" item="nuclideName" open="(" separator="," close=")">
#{nuclideName}
</foreach>
AND CALCULATION_TIME BETWEEN TO_DATE(#{ startTime }, 'YYYY-MM-DD HH24:MI:SS') AND TO_DATE(#{ endTime }, 'YYYY-MM-DD HH24:MI:SS')
AND CALCULATION_TIME BETWEEN TO_DATE(#{ startTime }, 'YYYY-MM-DD HH24:MI:SS') AND TO_DATE(#{ endTime },
'YYYY-MM-DD HH24:MI:SS')
ORDER BY CALCULATION_TIME DESC
</select>
@ -388,147 +364,181 @@
</select>
<!-- 核素活度浓度对比分析-->
<select id="getRnAutoAnalyzeNuclideActivityConc" resultType="org.jeecg.entity.NuclideActConcIntvl">
<if test='sampleType == "P"'>
SELECT
t1.SAMPLE_ID AS sampleId,
t1.COLLECT_STOP AS collectStop,
t1.SAMPLE_TYPE AS sampleType,
t1.STATION_ID AS stationId,
t1.STATUS AS status,
TO_NUMBER(REGEXP_REPLACE(t2.MDC, '[^0-9.Ee-]', '')) AS MDC,
t2.NUCLIDENAME AS NUCLIDE_NAME,
TO_NUMBER(REGEXP_REPLACE(t2.CONCENTRATION, '[^0-9.Ee-]', '')) AS conc
FROM
ORIGINAL.GARDS_SAMPLE_DATA t1
LEFT JOIN RNAUTO.GARDS_NUCL_IDED t2 ON
t1.SAMPLE_ID = t2.SAMPLE_ID
<where>
t1.STATION_ID IN
<foreach item="stationId" collection="stationIds" open="(" close=")" separator=",">
'${stationId}'
</foreach>
AND t1.SAMPLE_TYPE=#{sampleType}
AND t2.NUCLIDENAME = #{nuclideName}
AND t1.COLLECT_STOP BETWEEN TO_DATE(#{ startTime }, 'YYYY-MM-DD HH24:MI:SS')
AND TO_DATE(#{ endTime }, 'YYYY-MM-DD HH24:MI:SS')
</where>
</if>
<if test='sampleType == "B"'>
SELECT
t1.SAMPLE_ID AS sampleId,
t1.COLLECT_STOP AS collectStop,
t1.SAMPLE_TYPE AS sampleType,
t1.STATION_ID AS stationId,
t1.STATUS AS status,
t3.MDC AS MDC,
t3.NUCLIDE_NAME AS NUCLIDE_NAME,
t3.CONC AS conc
FROM
ORIGINAL.GARDS_SAMPLE_DATA t1
LEFT JOIN RNAUTO.GARDS_XE_RESULTS t3 ON
t1.SAMPLE_ID = t3.SAMPLE_ID
<where>
t1.STATION_ID IN
<foreach item="stationId" collection="stationIds" open="(" close=")" separator=",">
'${stationId}'
</foreach>
AND t1.SAMPLE_TYPE=#{sampleType}
AND t3.NUCLIDE_NAME = #{nuclideName}
AND t1.COLLECT_STOP BETWEEN TO_DATE(#{ startTime }, 'YYYY-MM-DD HH24:MI:SS')
AND TO_DATE(#{ endTime }, 'YYYY-MM-DD HH24:MI:SS')
</where>
</if>
SELECT
t1.SAMPLE_ID AS sampleId,
t1.COLLECT_STOP AS collectStop,
t1.SAMPLE_TYPE AS sampleType,
t1.STATION_ID AS stationId,
t1.STATUS AS status,
CASE
WHEN t1.SAMPLE_TYPE = 'P' THEN
t2.NUCLIDENAME
WHEN t1.SAMPLE_TYPE = 'B' THEN
t3.NUCLIDE_NAME
END AS NUCLIDE_NAME,
CASE
WHEN t1.SAMPLE_TYPE = 'P' THEN
TO_NUMBER(REGEXP_REPLACE(t2.CONCENTRATION, '[^0-9.Ee-]', ''))
WHEN t1.SAMPLE_TYPE = 'B' THEN
t3.CONC
END AS conc
FROM
ORIGINAL.GARDS_SAMPLE_DATA t1
LEFT JOIN RNAUTO.GARDS_NUCL_IDED t2 ON t1.SAMPLE_TYPE = 'P'
AND t1.SAMPLE_ID = t2.SAMPLE_ID
LEFT JOIN RNAUTO.GARDS_XE_RESULTS t3 ON t1.SAMPLE_TYPE = 'B'
AND t1.SAMPLE_ID = t3.SAMPLE_ID
<where>
t1.STATION_ID IN
<foreach item="stationId" collection="stationIds" open="(" close=")" separator=",">
'${stationId}'
</foreach>
AND t1.SAMPLE_TYPE=#{sampleType}
AND t1.COLLECT_STOP BETWEEN TO_DATE(#{ startTime }, 'YYYY-MM-DD HH24:MI:SS')
AND TO_DATE(#{ endTime }, 'YYYY-MM-DD HH24:MI:SS')
</where>
</select>
<select id="getRnManAnalyzeNuclideActivityConc" resultType="org.jeecg.entity.NuclideActConcIntvl">
SELECT
t1.SAMPLE_ID AS sampleId,
t1.COLLECT_STOP AS collectStop,
t1.SAMPLE_TYPE AS sampleType,
t1.STATION_ID AS stationId,
t1.STATUS AS status,
CASE
WHEN t1.SAMPLE_TYPE = 'P' THEN
t2.NUCLIDENAME
WHEN t1.SAMPLE_TYPE = 'B' THEN
t3.NUCLIDE_NAME
END AS NUCLIDE_NAME,
CASE
WHEN t1.SAMPLE_TYPE = 'P' THEN
TO_NUMBER(REGEXP_REPLACE(t2.CONCENTRATION, '[^0-9.Ee-]', ''))
WHEN t1.SAMPLE_TYPE = 'B' THEN
t3.CONC
END AS conc
FROM
ORIGINAL.GARDS_SAMPLE_DATA t1
LEFT JOIN RNMAN.GARDS_NUCL_IDED t2 ON t1.SAMPLE_TYPE = 'P'
AND t1.SAMPLE_ID = t2.SAMPLE_ID
LEFT JOIN RNMAN.GARDS_XE_RESULTS t3 ON t1.SAMPLE_TYPE = 'B'
AND t1.SAMPLE_ID = t3.SAMPLE_ID
<where>
t1.STATION_ID IN
<foreach item="stationId" collection="stationIds" open="(" close=")" separator=",">
'${stationId}'
</foreach>
AND t1.SAMPLE_TYPE=#{sampleType}
AND t1.COLLECT_STOP BETWEEN TO_DATE(#{ startTime }, 'YYYY-MM-DD HH24:MI:SS')
AND TO_DATE(#{ endTime }, 'YYYY-MM-DD HH24:MI:SS')
</where>
<if test='sampleType == "P"'>
SELECT
t1.SAMPLE_ID AS sampleId,
t1.COLLECT_STOP AS collectStop,
t1.SAMPLE_TYPE AS sampleType,
t1.STATION_ID AS stationId,
t1.STATUS AS status,
TO_NUMBER(REGEXP_REPLACE(t2.MDC, '[^0-9.Ee-]', '')) AS MDC,
t2.NUCLIDENAME AS NUCLIDE_NAME,
TO_NUMBER(REGEXP_REPLACE(t2.CONCENTRATION, '[^0-9.Ee-]', '')) AS conc
FROM
ORIGINAL.GARDS_SAMPLE_DATA t1
LEFT JOIN RNMAN.GARDS_NUCL_IDED t2 ON
t1.SAMPLE_ID = t2.SAMPLE_ID
<where>
t1.STATION_ID IN
<foreach item="stationId" collection="stationIds" open="(" close=")" separator=",">
'${stationId}'
</foreach>
AND t1.SAMPLE_TYPE=#{sampleType}
AND t2.NUCLIDENAME = #{nuclideName}
AND t1.COLLECT_STOP BETWEEN TO_DATE(#{ startTime }, 'YYYY-MM-DD HH24:MI:SS')
AND TO_DATE(#{ endTime }, 'YYYY-MM-DD HH24:MI:SS')
</where>
</if>
<if test='sampleType == "B"'>
SELECT
t1.SAMPLE_ID AS sampleId,
t1.COLLECT_STOP AS collectStop,
t1.SAMPLE_TYPE AS sampleType,
t1.STATION_ID AS stationId,
t1.STATUS AS status,
t3.MDC AS MDC,
t3.NUCLIDE_NAME AS NUCLIDE_NAME,
t3.CONC AS conc
FROM
ORIGINAL.GARDS_SAMPLE_DATA t1
LEFT JOIN RNMAN.GARDS_XE_RESULTS t3 ON
t1.SAMPLE_ID = t3.SAMPLE_ID
<where>
t1.STATION_ID IN
<foreach item="stationId" collection="stationIds" open="(" close=")" separator=",">
'${stationId}'
</foreach>
AND t1.SAMPLE_TYPE=#{sampleType}
AND t3.NUCLIDE_NAME = #{nuclideName}
AND t1.COLLECT_STOP BETWEEN TO_DATE(#{ startTime }, 'YYYY-MM-DD HH24:MI:SS')
AND TO_DATE(#{ endTime }, 'YYYY-MM-DD HH24:MI:SS')
</where>
</if>
</select>
<!--样品监测结果-->
<!-- 先查出台站信息-->
<!-- 再根据台站级别信息-->
<!-- 先查出台站信息-->
<!-- 再根据台站级别信息-->
<select id="getRnAutoSampleResult" resultType="org.jeecg.entity.StationInfoData">
SELECT
t1.SAMPLE_ID,
t1.COLLECT_STOP,
t1.STATION_ID,
c1.STATION_CODE,
c1.COUNTRY_CODE,
c1.TYPE,
c1.LON,
c1.LAT,
c1.STATUS,
c1.DESCRIPTION,
t2.CATEGORY
FROM
ORIGINAL.GARDS_SAMPLE_DATA t1
LEFT JOIN RNAUTO.GARDS_ANALYSES t2
ON t1.SAMPLE_ID = t2.SAMPLE_ID
LEFT JOIN CONFIGURATION.GARDS_STATIONS c1
ON t1.STATION_ID=c1.STATION_ID
WHERE
t1.SAMPLE_TYPE=#{sampleType}
AND t1.COLLECT_STOP BETWEEN TO_DATE(#{startTime}, 'YYYY-MM-DD hh24:mi:ss')
AND TO_DATE(#{endTime}, 'YYYY-MM-DD hh24:mi:ss')
ORDER BY
t1.COLLECT_STOP ASC
SELECT t1.SAMPLE_ID,
t1.COLLECT_STOP,
t1.STATION_ID,
c1.STATION_CODE,
c1.COUNTRY_CODE,
c1.TYPE,
c1.LON,
c1.LAT,
c1.STATUS,
c1.DESCRIPTION,
t2.CATEGORY
FROM ORIGINAL.GARDS_SAMPLE_DATA t1
LEFT JOIN RNAUTO.GARDS_ANALYSES t2
ON t1.SAMPLE_ID = t2.SAMPLE_ID
LEFT JOIN CONFIGURATION.GARDS_STATIONS c1
ON t1.STATION_ID = c1.STATION_ID
WHERE t1.SAMPLE_TYPE = #{sampleType}
AND t1.COLLECT_STOP BETWEEN TO_DATE(#{startTime}, 'YYYY-MM-DD hh24:mi:ss')
AND TO_DATE(#{endTime}, 'YYYY-MM-DD hh24:mi:ss')
ORDER BY t1.COLLECT_STOP ASC
</select>
<select id="getRnManSampleResult" resultType="org.jeecg.entity.StationInfoData">
SELECT
t1.SAMPLE_ID,
t1.COLLECT_STOP,
t1.STATION_ID,
c1.STATION_CODE,
c1.COUNTRY_CODE,
c1.TYPE,
c1.LON,
c1.LAT,
c1.STATUS,
c1.DESCRIPTION,
t2.CATEGORY
FROM
ORIGINAL.GARDS_SAMPLE_DATA t1
LEFT JOIN RNMAN.GARDS_ANALYSES t2
ON t1.SAMPLE_ID = t2.SAMPLE_ID
LEFT JOIN CONFIGURATION.GARDS_STATIONS c1
ON t1.STATION_ID=c1.STATION_ID
WHERE
t1.SAMPLE_TYPE=#{sampleType}
AND t1.COLLECT_STOP BETWEEN TO_DATE(#{startTime}, 'YYYY-MM-DD hh24:mi:ss')
AND TO_DATE(#{endTime}, 'YYYY-MM-DD hh24:mi:ss')
ORDER BY
t1.COLLECT_STOP ASC
SELECT t1.SAMPLE_ID,
t1.COLLECT_STOP,
t1.STATION_ID,
c1.STATION_CODE,
c1.COUNTRY_CODE,
c1.TYPE,
c1.LON,
c1.LAT,
c1.STATUS,
c1.DESCRIPTION,
t2.CATEGORY
FROM ORIGINAL.GARDS_SAMPLE_DATA t1
LEFT JOIN RNMAN.GARDS_ANALYSES t2
ON t1.SAMPLE_ID = t2.SAMPLE_ID
LEFT JOIN CONFIGURATION.GARDS_STATIONS c1
ON t1.STATION_ID = c1.STATION_ID
WHERE t1.SAMPLE_TYPE = #{sampleType}
AND t1.COLLECT_STOP BETWEEN TO_DATE(#{startTime}, 'YYYY-MM-DD hh24:mi:ss')
AND TO_DATE(#{endTime}, 'YYYY-MM-DD hh24:mi:ss')
ORDER BY t1.COLLECT_STOP ASC
</select>
<select id="findStationListByMenuName" resultType="org.jeecg.entity.GardsStations">
SELECT * FROM CONFIGURATION.GARDS_STATIONS
<where>
TYPE IN
<if test='systemType == "P"'>
('Manual', 'CINDER', 'RASA', 'LAB')
</if>
<if test='systemType == "B"'>
('SAUNA', 'ARIX-4', 'ARIX-2', 'SPALAX')
</if>
</where>
</select>
</mapper>

View File

@ -2,6 +2,8 @@ package org.jeecg.service;
import com.baomidou.mybatisplus.extension.service.IService;
import org.jeecg.common.api.vo.Result;
import org.jeecg.entity.GardsStations;
import org.jeecg.entity.SysDefaultNuclide;
import org.jeecg.modules.base.entity.original.GardsSampleData;
import org.springframework.format.annotation.DateTimeFormat;
import org.springframework.web.bind.annotation.RequestParam;
@ -24,4 +26,8 @@ public interface ISampleStatAnalysisService extends IService<GardsSampleData> {
Result getNuclideActivityConcAnalyze(String sampleType, Integer[] stationIds, String nuclideName, Integer dataSource, Date startDate, Date endDate);
List<GardsStations> findStationListByMenuName(String systemType);
List<SysDefaultNuclide> findNuclideList(String systemType);
}

View File

@ -1,18 +1,20 @@
package org.jeecg.service.impl;
import com.baomidou.dynamic.datasource.annotation.DS;
import com.baomidou.mybatisplus.core.conditions.query.LambdaQueryWrapper;
import com.baomidou.mybatisplus.core.toolkit.StringUtils;
import com.baomidou.mybatisplus.extension.service.impl.ServiceImpl;
import org.apache.commons.collections.CollectionUtils;
import org.jeecg.common.api.vo.Result;
import org.jeecg.common.constant.CommonConstant;
import org.jeecg.common.util.DateUtils;
import org.jeecg.entity.GardsThresholdResultHis;
import org.jeecg.entity.*;
import org.jeecg.mapper.SysDefaultNuclideMapper;
import org.jeecg.modules.base.entity.original.GardsSampleData;
import org.jeecg.entity.NuclideActConcIntvl;
import org.jeecg.entity.SampleLevelData;
import org.jeecg.entity.StationInfoData;
import org.jeecg.mapper.GardsSampleStatAnalysisMapper;
import org.jeecg.service.ISampleStatAnalysisService;
import org.jeecg.util.DistributionAnalysisToolkit;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
import java.time.ZoneId;
@ -24,57 +26,59 @@ import java.util.stream.Collectors;
@Service
@DS("ora")
public class SampleStatAnalysisService extends ServiceImpl<GardsSampleStatAnalysisMapper, GardsSampleData> implements ISampleStatAnalysisService {
@Autowired
private SysDefaultNuclideMapper defaultNuclideMapper;
public Result getSampleMonitorResult(String sampleType, Integer dataSource, Date startDate, Date endDate) {
Result result = new Result();
//声明返回用的结果map
Map<String, Object> resultMap = new HashMap<>();
List<StationInfoData> StationInfoDataList = new ArrayList<>();
try {
result.setCode(CommonConstant.SC_OK_200);
//声明返回用的结果map
Map<String, Object> resultMap = new HashMap<>();
List<StationInfoData> StationInfoDataList = new ArrayList<>();
//region 局部变量
if (StringUtils.isBlank(sampleType)) {
result.error500("SampleType Code cannot be null");
//region 局部变量
if (StringUtils.isBlank(sampleType)) {
result.error500("SampleType Code cannot be null");
return result;
}
if (Objects.isNull(startDate)) {
result.error500("The start time cannot be empty");
return result;
}
String startTime = DateUtils.formatDate(startDate, "yyyy-MM-dd") + " 00:00:00";
if (Objects.isNull(endDate)) {
result.error500("The end time cannot be empty");
return result;
}
String endTime = DateUtils.formatDate(endDate, "yyyy-MM-dd") + " 23:59:59";
//endregion
switch (dataSource) {
case 1:
StationInfoDataList = this.baseMapper.getRnAutoSampleResult(sampleType, startTime, endTime);
break;
case 2:
StationInfoDataList = this.baseMapper.getRnManSampleResult(sampleType, startTime, endTime);
break;
}
//时间段内有多少和台站
Map<String, List<StationInfoData>> groupedByMonth = StationInfoDataList.stream()
.collect(Collectors.groupingBy(station ->
station.getCollectStop().toInstant()
.atZone(ZoneId.of("UTC"))
.format(DateTimeFormatter.ofPattern("yyyy-MM-dd"))
));
resultMap.put("SampleMonitorResultList", groupedByMonth);
result.setSuccess(true);
result.setResult(resultMap);
return result;
} catch (Exception e) {
result.error500("样品监测回放错误:" + e.getMessage());
return result;
}
if (Objects.isNull(startDate)) {
result.error500("The start time cannot be empty");
return result;
}
String startTime = DateUtils.formatDate(startDate, "yyyy-MM-dd") + " 00:00:00";
if (Objects.isNull(endDate)) {
result.error500("The end time cannot be empty");
return result;
}
String endTime = DateUtils.formatDate(endDate, "yyyy-MM-dd") + " 23:59:59";
//endregion
switch (dataSource) {
case 1:
StationInfoDataList = this.baseMapper.getRnAutoSampleResult(sampleType, startTime, endTime);
break;
case 2:
StationInfoDataList = this.baseMapper.getRnManSampleResult(sampleType, startTime, endTime);
break;
}
//时间段内有多少和台站
Map<String, List<StationInfoData>> groupedByMonth = StationInfoDataList.stream()
.collect(Collectors.groupingBy(station ->
station.getCollectStop().toInstant()
.atZone(ZoneId.of("UTC"))
.format(DateTimeFormatter.ofPattern("yyyy-MM-dd"))
));
resultMap.put("SampleMonitorResultList", groupedByMonth);
result.setSuccess(true);
result.setResult(resultMap);
return result;
}
@ -96,55 +100,62 @@ public class SampleStatAnalysisService extends ServiceImpl<GardsSampleStatAnalys
String schemaName = dataSource == 1 ? "RNAUTO" : "RNMAN";
//region 局部变量
Result result = new Result();
if (StringUtils.isBlank(stationCode)) {
result.error500("Station Code cannot be null");
try {
result.setCode(CommonConstant.SC_OK_200);
if (StringUtils.isBlank(stationCode)) {
result.error500("Station Code cannot be null");
return result;
}
if (Objects.isNull(startDate)) {
result.error500("The start time cannot be empty");
return result;
}
String startTime = DateUtils.formatDate(startDate, "yyyy-MM-dd") + " 00:00:00";
if (Objects.isNull(endDate)) {
result.error500("The end time cannot be empty");
return result;
}
String endTime = DateUtils.formatDate(endDate, "yyyy-MM-dd") + " 23:59:59";
//endregion
switch (dataSource) {
//RNAUTO
case 1:
//获取样品中识别到的核素集合
nuclideActConcIntvlList = this.baseMapper.getRnAutoIdentifiedNuclides(stationCode, startTime, endTime);
//核素等级时序分析
sampleLevelDataList = this.baseMapper.getRnAutoNuclideTimeSeriesAnalysis(stationCode, startTime, endTime);
break;
//RNMAN
case 2:
//获取样品中识别到的核素集合
nuclideActConcIntvlList = this.baseMapper.getRnManIdentifiedNuclides(stationCode, startTime, endTime);
//核素等级时序分析
sampleLevelDataList = this.baseMapper.getRnManNuclideTimeSeriesAnalysis(stationCode, startTime, endTime);
break;
}
//key=核素名称,value=获取样品中识别到的核素集合
Map<String, List<NuclideActConcIntvl>> groupedByNuclideName = nuclideActConcIntvlList.stream()
.filter(p -> p.getNuclideName() != null)
.collect(Collectors.groupingBy(NuclideActConcIntvl::getNuclideName));
//查询级别 getSample
List<String> nuclideNames = new ArrayList<>(groupedByNuclideName.keySet());
if (CollectionUtils.isNotEmpty(nuclideNames)) {
//通过台站ID核素名称查找阈值
thresholdResultHisDataList = this.baseMapper.selectByStationAndNuclides(schemaName, Integer.valueOf(stationCode), nuclideNames, startTime, endTime);
}
resultMap.put("nuclideActConcIntvlList", groupedByNuclideName);
resultMap.put("sampleLevelDataList", sampleLevelDataList);
resultMap.put("thresholdResultHisDataList", thresholdResultHisDataList);
result.setSuccess(true);
result.setResult(resultMap);
return result;
} catch (Exception e) {
result.error500("样品统计分析错误:" + e.getMessage());
return result;
}
if (Objects.isNull(startDate)) {
result.error500("The start time cannot be empty");
return result;
}
String startTime = DateUtils.formatDate(startDate, "yyyy-MM-dd") + " 00:00:00";
if (Objects.isNull(endDate)) {
result.error500("The end time cannot be empty");
return result;
}
String endTime = DateUtils.formatDate(endDate, "yyyy-MM-dd") + " 23:59:59";
//endregion
switch (dataSource) {
//RNAUTO
case 1:
//获取样品中识别到的核素集合
nuclideActConcIntvlList = this.baseMapper.getRnAutoIdentifiedNuclides(stationCode, startTime, endTime);
//核素等级时序分析
sampleLevelDataList = this.baseMapper.getRnAutoNuclideTimeSeriesAnalysis(stationCode, startTime, endTime);
break;
//RNMAN
case 2:
//获取样品中识别到的核素集合
nuclideActConcIntvlList = this.baseMapper.getRnManIdentifiedNuclides(stationCode, startTime, endTime);
//核素等级时序分析
sampleLevelDataList = this.baseMapper.getRnManNuclideTimeSeriesAnalysis(stationCode, startTime, endTime);
break;
}
//key=核素名称,value=获取样品中识别到的核素集合
Map<String, List<NuclideActConcIntvl>> groupedByNuclideName = nuclideActConcIntvlList.stream()
.filter(p->p.getNuclideName()!=null)
.collect(Collectors.groupingBy(NuclideActConcIntvl::getNuclideName));
//查询级别 getSample
List<String> nuclideNames = new ArrayList<>(groupedByNuclideName.keySet());
//通过台站ID核素名称查找阈值
thresholdResultHisDataList = this.baseMapper.selectByStationAndNuclides(schemaName, Integer.valueOf(stationCode), nuclideNames, startTime, endTime);
resultMap.put("nuclideActConcIntvlList", groupedByNuclideName);
resultMap.put("sampleLevelDataList", sampleLevelDataList);
resultMap.put("thresholdResultHisDataList", thresholdResultHisDataList);
result.setSuccess(true);
result.setResult(resultMap);
return result;
}
/**
@ -160,42 +171,46 @@ public class SampleStatAnalysisService extends ServiceImpl<GardsSampleStatAnalys
List<SampleLevelData> sampleDataList = new ArrayList<>();
//region 局部变量
Result result = new Result();
if (StringUtils.isBlank(sampleType)) {
result.error500("SampleType Code cannot be null");
try {
result.setCode(CommonConstant.SC_OK_200);
if (StringUtils.isBlank(sampleType)) {
result.error500("SampleType Code cannot be null");
return result;
}
if (StringUtils.isBlank(station)) {
result.error500("Station Code cannot be null");
return result;
}
if (Objects.isNull(startDate)) {
result.error500("The start time cannot be empty");
return result;
}
String startTime = DateUtils.formatDate(startDate, "yyyy-MM-dd") + " 00:00:00";
if (Objects.isNull(endDate)) {
result.error500("The end time cannot be empty");
return result;
}
String endTime = DateUtils.formatDate(endDate, "yyyy-MM-dd") + " 23:59:59";
//endregion
switch (dataSource) {
case 1:
sampleDataList = this.baseMapper.getRnAutoSampleGradeAnalysis(sampleType, station, startTime, endTime);
break;
case 2:
sampleDataList = this.baseMapper.getRnManSampleGradeAnalysis(sampleType, station, startTime, endTime);
break;
}
resultMap.put("sampleDataList", sampleDataList);
result.setSuccess(true);
result.setResult(resultMap);
return result;
} catch (Exception e) {
result.error500("样品统计分析错误:" + e.getMessage());
return result;
}
if (StringUtils.isBlank(station)) {
result.error500("Station Code cannot be null");
return result;
}
if (Objects.isNull(startDate)) {
result.error500("The start time cannot be empty");
return result;
}
String startTime = DateUtils.formatDate(startDate, "yyyy-MM-dd") + " 00:00:00";
if (Objects.isNull(endDate)) {
result.error500("The end time cannot be empty");
return result;
}
String endTime = DateUtils.formatDate(endDate, "yyyy-MM-dd") + " 23:59:59";
//endregion
switch (dataSource) {
case 1:
sampleDataList = this.baseMapper.getRnAutoSampleGradeAnalysis(sampleType, station, startTime, endTime);
break;
case 2:
sampleDataList = this.baseMapper.getRnManSampleGradeAnalysis(sampleType, station, startTime, endTime);
break;
}
resultMap.put("sampleDataList", sampleDataList);
result.setSuccess(true);
result.setResult(resultMap);
return result;
}
/*** 样品活度浓度区间频率分析
@ -215,85 +230,92 @@ public class SampleStatAnalysisService extends ServiceImpl<GardsSampleStatAnalys
List<NuclideActConcIntvl> nuclideActConcIntvls = new ArrayList<>();
//region 局部变量
Result result = new Result();
if (StringUtils.isBlank(sampleType)) {
result.error500("SampleType Code cannot be null");
try {
result.setCode(CommonConstant.SC_OK_200);
if (StringUtils.isBlank(sampleType)) {
result.error500("SampleType Code cannot be null");
return result;
}
if (StringUtils.isBlank(nuclideName)) {
result.error500("nuclideName Code cannot be null");
return result;
}
if (StringUtils.isBlank(station)) {
result.error500("Station Code cannot be null");
return result;
}
if (Objects.isNull(startDate)) {
result.error500("The start time cannot be empty");
return result;
}
String startTime = DateUtils.formatDate(startDate, "yyyy-MM-dd") + " 00:00:00";
if (Objects.isNull(endDate)) {
result.error500("The end time cannot be empty");
return result;
}
String endTime = DateUtils.formatDate(endDate, "yyyy-MM-dd") + " 23:59:59";
//endregion
//根据数据源样品类型查询样品的浓度
switch (sampleType) {
case "P":
switch (dataSource) {
//RNAUTO
case 1:
nuclideActConcIntvls = this.baseMapper.getRnautoPNuclideActConcIntvl(sampleType, station, nuclideName, startTime, endTime);
break;
//RNMAN
case 2:
nuclideActConcIntvls = this.baseMapper.getRnautoNuclideActConcIntvl(sampleType, station, nuclideName, startTime, endTime);
break;
}
break;
case "B":
switch (dataSource) {
case 1:
nuclideActConcIntvls = this.baseMapper.getRnmanPNuclideActConcIntvl(sampleType, station, nuclideName, startTime, endTime);
break;
case 2:
nuclideActConcIntvls = this.baseMapper.getRnmanNuclideActConcIntvl(sampleType, station, nuclideName, startTime, endTime);
break;
}
break;
}
//获取浓度出现的次数
//获取浓度值集合
List<Double> data = DistributionAnalysisToolkit.convertConcToDoubleList(nuclideActConcIntvls);
// 设置区间参数
double start = 0; // 区间起始值
double step = 200; // 区间步长宽度
// 1. 区间统计
List<DistributionAnalysisToolkit.IntervalStat> stats = DistributionAnalysisToolkit.calculateIntervalStats(nuclideActConcIntvls, start, step);
// 3. 累积分布函数
List<DistributionAnalysisToolkit.CDFPoint> cdfPoints = DistributionAnalysisToolkit.calculateCDF(data);
// 4. 核密度估计
List<DistributionAnalysisToolkit.KDEPoint> kdePoints = DistributionAnalysisToolkit.autoKDE(data, DistributionAnalysisToolkit.GAUSSIAN_KERNEL);
//获取所有浓度的累积
List<Double> cumulative = DistributionAnalysisToolkit.cumulativeSum(data);
//获取95%累积线
double percentile95 = DistributionAnalysisToolkit.calculate95thPercentile(data);
resultMap.put("stats", stats);
resultMap.put("cdfPoints", cdfPoints);
resultMap.put("kdePoints", kdePoints);
resultMap.put("cumulative", cumulative);
resultMap.put("percentile95", percentile95);
result.setSuccess(true);
result.setResult(resultMap);
return result;
} catch (Exception e) {
result.error500("样品活度浓度区间频率分析错误:" + e.getMessage());
return result;
}
if (StringUtils.isBlank(nuclideName)) {
result.error500("nuclideName Code cannot be null");
return result;
}
if (StringUtils.isBlank(station)) {
result.error500("Station Code cannot be null");
return result;
}
if (Objects.isNull(startDate)) {
result.error500("The start time cannot be empty");
return result;
}
String startTime = DateUtils.formatDate(startDate, "yyyy-MM-dd") + " 00:00:00";
if (Objects.isNull(endDate)) {
result.error500("The end time cannot be empty");
return result;
}
String endTime = DateUtils.formatDate(endDate, "yyyy-MM-dd") + " 23:59:59";
//endregion
//根据数据源样品类型查询样品的浓度
switch (sampleType) {
case "P":
switch (dataSource) {
//RNAUTO
case 1:
nuclideActConcIntvls = this.baseMapper.getRnautoPNuclideActConcIntvl(sampleType, station, nuclideName, startTime, endTime);
break;
//RNMAN
case 2:
nuclideActConcIntvls = this.baseMapper.getRnautoNuclideActConcIntvl(sampleType, station, nuclideName, startTime, endTime);
break;
}
break;
case "B":
switch (dataSource) {
case 1:
nuclideActConcIntvls = this.baseMapper.getRnmanPNuclideActConcIntvl(sampleType, station, nuclideName, startTime, endTime);
break;
case 2:
nuclideActConcIntvls = this.baseMapper.getRnmanNuclideActConcIntvl(sampleType, station, nuclideName, startTime, endTime);
break;
}
break;
}
//获取浓度出现的次数
//获取浓度值集合
List<Double> data = DistributionAnalysisToolkit.convertConcToDoubleList(nuclideActConcIntvls);
// 设置区间参数
double start = 0; // 区间起始值
double step = 200; // 区间步长宽度
// 1. 区间统计
List<DistributionAnalysisToolkit.IntervalStat> stats = DistributionAnalysisToolkit.calculateIntervalStats(data, start, step);
// 3. 累积分布函数
List<DistributionAnalysisToolkit.CDFPoint> cdfPoints = DistributionAnalysisToolkit.calculateCDF(data);
// 4. 核密度估计
List<DistributionAnalysisToolkit.KDEPoint> kdePoints = DistributionAnalysisToolkit.autoKDE(data, DistributionAnalysisToolkit.GAUSSIAN_KERNEL);
//获取所有浓度的累积
List<Double> cumulative = DistributionAnalysisToolkit.cumulativeSum(data);
//获取95%累积线
double percentile95 = DistributionAnalysisToolkit.calculate95thPercentile(data);
resultMap.put("stats", stats);
resultMap.put("cdfPoints", cdfPoints);
resultMap.put("kdePoints", kdePoints);
resultMap.put("cumulative", cumulative);
resultMap.put("percentile95", percentile95);
result.setSuccess(true);
result.setResult(resultMap);
return result;
}
/*** 核素活度浓度时序分析
@ -311,72 +333,80 @@ public class SampleStatAnalysisService extends ServiceImpl<GardsSampleStatAnalys
Result result = new Result();
//声明返回用的结果map
Map<String, Object> resultMap = new HashMap<>();
//region 局部变量
try {
result.setCode(CommonConstant.SC_OK_200);
if (StringUtils.isBlank(sampleType)) {
result.error500("SampleType Code cannot be null");
return result;
}
if (StringUtils.isBlank(nuclideName)) {
result.error500("nuclideName Code cannot be null");
return result;
}
if (StringUtils.isBlank(station)) {
result.error500("Station Code cannot be null");
return result;
}
if (Objects.isNull(startDate)) {
result.error500("The start time cannot be empty");
return result;
}
String startTime = DateUtils.formatDate(startDate, "yyyy-MM-dd") + " 00:00:00";
if (Objects.isNull(endDate)) {
result.error500("The end time cannot be empty");
return result;
}
String endTime = DateUtils.formatDate(endDate, "yyyy-MM-dd") + " 23:59:59";
//endregion
//获取样品阈值级别和阈值
List<SampleLevelData> sampleDatas = new ArrayList<>();
//核素的阈值
List<GardsThresholdResultHis> thresholdResultHisList = new ArrayList<>();
List<NuclideActConcIntvl> nuclideActConcIntvls = new ArrayList<>();
nuclideActConcIntvls = switch (sampleType) {
case "P" -> {
sampleDatas = this.baseMapper.getRnAutoSampleLevel(station, startTime, endTime);
yield switch (dataSource) {
//RNAUTO
case 1 ->
this.baseMapper.getRnautoPNuclideActConcIntvl(sampleType, station, nuclideName, startTime, endTime);
//RNMAN
case 2 ->
this.baseMapper.getRnautoNuclideActConcIntvl(sampleType, station, nuclideName, startTime, endTime);
default -> nuclideActConcIntvls;
};
//region 局部变量
if (StringUtils.isBlank(sampleType)) {
result.error500("SampleType Code cannot be null");
return result;
}
case "B" -> {
sampleDatas = this.baseMapper.getRnManSampleLevel(station, startTime, endTime);
yield switch (dataSource) {
case 1 ->
this.baseMapper.getRnmanPNuclideActConcIntvl(sampleType, station, nuclideName, startTime, endTime);
case 2 ->
this.baseMapper.getRnmanNuclideActConcIntvl(sampleType, station, nuclideName, startTime, endTime);
default -> nuclideActConcIntvls;
};
if (StringUtils.isBlank(nuclideName)) {
result.error500("nuclideName Code cannot be null");
return result;
}
default -> nuclideActConcIntvls;
};
String schemaName = dataSource == 1 ? "RNAUTO" : "RNMAN";
if (StringUtils.isBlank(station)) {
result.error500("Station Code cannot be null");
return result;
}
if (Objects.isNull(startDate)) {
result.error500("The start time cannot be empty");
return result;
}
String startTime = DateUtils.formatDate(startDate, "yyyy-MM-dd") + " 00:00:00";
if (Objects.isNull(endDate)) {
result.error500("The end time cannot be empty");
return result;
}
String endTime = DateUtils.formatDate(endDate, "yyyy-MM-dd") + " 23:59:59";
//endregion
//获取样品阈值级别和阈值
List<SampleLevelData> sampleDatas = new ArrayList<>();
//核素的阈值
List<GardsThresholdResultHis> thresholdResultHisList = new ArrayList<>();
List<NuclideActConcIntvl> nuclideActConcIntvls = new ArrayList<>();
nuclideActConcIntvls = switch (sampleType) {
case "P" -> {
sampleDatas = this.baseMapper.getRnAutoSampleLevel(station, startTime, endTime);
yield switch (dataSource) {
//RNAUTO
case 1 ->
this.baseMapper.getRnautoPNuclideActConcIntvl(sampleType, station, nuclideName, startTime, endTime);
//RNMAN
case 2 ->
this.baseMapper.getRnautoNuclideActConcIntvl(sampleType, station, nuclideName, startTime, endTime);
default -> nuclideActConcIntvls;
};
}
case "B" -> {
sampleDatas = this.baseMapper.getRnManSampleLevel(station, startTime, endTime);
yield switch (dataSource) {
case 1 ->
this.baseMapper.getRnmanPNuclideActConcIntvl(sampleType, station, nuclideName, startTime, endTime);
case 2 ->
this.baseMapper.getRnmanNuclideActConcIntvl(sampleType, station, nuclideName, startTime, endTime);
default -> nuclideActConcIntvls;
};
}
default -> nuclideActConcIntvls;
};
String schemaName = dataSource == 1 ? "RNAUTO" : "RNMAN";
thresholdResultHisList = this.baseMapper.selectByCondition(schemaName, Arrays.asList(Integer.valueOf(station))
, Arrays.asList(nuclideName), startTime, endTime);
resultMap.put("sampleDataList", sampleDatas);
resultMap.put("nuclideInfoList", nuclideActConcIntvls);
resultMap.put("thresholdResultHisList", thresholdResultHisList);
thresholdResultHisList = this.baseMapper.selectByCondition(schemaName, Arrays.asList(Integer.valueOf(station))
, Arrays.asList(nuclideName), startTime, endTime);
resultMap.put("sampleDataList", sampleDatas);
resultMap.put("nuclideInfoList", nuclideActConcIntvls);
resultMap.put("thresholdResultHisList", thresholdResultHisList);
result.setSuccess(true);
result.setResult(resultMap);
return result;
} catch (Exception e) {
result.error500("核素活度浓度时序分析错误:" + e.getMessage());
return result;
}
result.setSuccess(true);
result.setResult(resultMap);
return result;
}
/*** 核素活度浓度对比分析
@ -390,43 +420,72 @@ public class SampleStatAnalysisService extends ServiceImpl<GardsSampleStatAnalys
* @return
*/
public Result getNuclideActivityConcAnalyze(String sampleType, Integer[] stationIds, String nuclideName, Integer dataSource, Date startDate, Date endDate) {
Result result = new Result();
Map<String, Object> resultMap = new HashMap<>();
List<NuclideActConcIntvl> nuclideActConcIntvls = new ArrayList<>();
//region 局部变量
if (Objects.isNull(startDate)) {
result.error500("The start time cannot be empty");
return result;
}
String startTime = DateUtils.formatDate(startDate, "yyyy-MM-dd") + " 00:00:00";
if (Objects.isNull(endDate)) {
result.error500("The end time cannot be empty");
return result;
}
String endTime = DateUtils.formatDate(endDate, "yyyy-MM-dd") + " 23:59:59";
if (Objects.isNull(stationIds)) {
Result result = new Result();
try {
result.setCode(CommonConstant.SC_OK_200);
//region 局部变量
if (Objects.isNull(startDate)) {
result.error500("The start time cannot be empty");
return result;
}
String startTime = DateUtils.formatDate(startDate, "yyyy-MM-dd") + " 00:00:00";
if (Objects.isNull(endDate)) {
result.error500("The end time cannot be empty");
return result;
}
String endTime = DateUtils.formatDate(endDate, "yyyy-MM-dd") + " 23:59:59";
if (Objects.isNull(stationIds)) {
result.setSuccess(true);
result.setResult(Collections.emptyList());
return result;
}
//endregion
switch (dataSource) {
case 1:
nuclideActConcIntvls = this.baseMapper.getRnAutoAnalyzeNuclideActivityConc(sampleType, nuclideName, stationIds, startTime, endTime);
break;
case 2:
nuclideActConcIntvls = this.baseMapper.getRnManAnalyzeNuclideActivityConc(sampleType, nuclideName, stationIds, startTime, endTime);
break;
}
resultMap.put("nuclideInfoList", nuclideActConcIntvls);
result.setSuccess(true);
result.setResult(Collections.emptyList());
result.setResult(resultMap);
return result;
} catch (Exception e) {
result.error500("核素活度浓度对比分析错误:" + e.getMessage());
return result;
}
//endregion
switch (dataSource) {
case 1:
nuclideActConcIntvls = this.baseMapper.getRnAutoAnalyzeNuclideActivityConc(sampleType, nuclideName, stationIds, startTime, endTime);
break;
case 2:
nuclideActConcIntvls = this.baseMapper.getRnManAnalyzeNuclideActivityConc(sampleType, nuclideName, stationIds, startTime, endTime);
break;
}
resultMap.put("nuclideInfoList", nuclideActConcIntvls);
result.setSuccess(true);
result.setResult(resultMap);
return result;
}
@Override
public List<GardsStations> findStationListByMenuName(String systemType) {
List<GardsStations> gardsStations = new LinkedList<>();
//获取台站信息
gardsStations = this.baseMapper.findStationListByMenuName(systemType);
return gardsStations;
}
@Override
@DS("master")
public List<SysDefaultNuclide> findNuclideList(String systemType) {
LambdaQueryWrapper<SysDefaultNuclide> queryWrapper = new LambdaQueryWrapper<>();
queryWrapper.eq(SysDefaultNuclide::getUseType, 4);
if (systemType.equals("B")) {
queryWrapper.eq(SysDefaultNuclide::getNuclideType, SystemType.BETA.getType());
} else if (systemType.equals("G")) {
queryWrapper.eq(SysDefaultNuclide::getNuclideType, SystemType.GAMMA.getType());
} else if (systemType.equals("P")) {
queryWrapper.eq(SysDefaultNuclide::getNuclideType, SystemType.PARTICULATE.getType());
}
List<SysDefaultNuclide> defaultNuclides = defaultNuclideMapper.selectList(queryWrapper);
return defaultNuclides;
}
}

View File

@ -1,5 +1,6 @@
package org.jeecg.util;
import lombok.Data;
import org.jeecg.entity.NuclideActConcIntvl;
import java.util.*;
@ -9,20 +10,24 @@ public class DistributionAnalysisToolkit {
/**
* 区间统计结果
*/
@Data
public static class IntervalStat {
private final String interval;
private final int count;
private final List<Double> values;
public IntervalStat(String interval, int count, List<Double> values) {
private final Map<Integer, Integer> levelDistribution;
public IntervalStat(String interval, List<NuclideActConcIntvl> nuclideData) {
values=new ArrayList<>();
levelDistribution=new TreeMap<>();
this.interval = interval;
this.count = count;
this.values = values;
this.count = nuclideData.size();
for (NuclideActConcIntvl nuclide : nuclideData) {
//获取浓度值
values.add(nuclide.getConc());
//计算获取级别
levelDistribution.merge(nuclide.getCategory(),1,Integer::sum);
}
}
public String getInterval() { return interval; }
public int getCount() { return count; }
public List<Double> getValues() { return values; }
}
/**
@ -59,22 +64,61 @@ public class DistributionAnalysisToolkit {
/**
* 数据区间统计
*
* @param data 原始数据
* @param nuclideData 原始数据
* @param start 起始值
* @param step 区间宽度
* @return 区间统计结果列表
*/
public static List<IntervalStat> calculateIntervalStats(List<Double> data, double start, double step) {
if (data == null || data.isEmpty()) {
//region
// public static List<IntervalStat> calculateIntervalStats(List<Double> data, double start, double step) {
// if (data == null || data.isEmpty()) {
// throw new IllegalArgumentException("数据不能为空");
// }
//
// // 计算结束边界
// double max = Collections.max(data);
// double end = Math.ceil(max / step) * step + step;
//
// // 初始化区间映射
// Map<String, List<Double>> intervalMap = new TreeMap<>();
// for (double lower = start; lower < end; lower += step) {
// double upper = lower + step;
// String key = String.format("[%.1f, %.1f)", lower, upper);
// intervalMap.put(key, new ArrayList<>());
// }
//
// // 分配数据到区间
// for (double value : data) {
// double lower = Math.floor(value / step) * step;
// String key = String.format("[%.1f, %.1f)", lower, lower + step);
// intervalMap.get(key).add(value);
// }
//
// // 转换为统计结果对象
// List<IntervalStat> stats = new ArrayList<>();
// for (Map.Entry<String, List<Double>> entry : intervalMap.entrySet()) {
// stats.add(new IntervalStat(entry.getKey(), entry.getValue().size(), entry.getValue()));
// }
//
// return stats;
// }
//
//endregion
public static List<IntervalStat> calculateIntervalStats(List<NuclideActConcIntvl> nuclideData, double start, double step) {
if (nuclideData == null || nuclideData.isEmpty()) {
throw new IllegalArgumentException("数据不能为空");
}
// 计算结束边界
double max = Collections.max(data);
double end = Math.ceil(max / step) * step + step;
double maxConc = nuclideData.stream()
.mapToDouble(NuclideActConcIntvl::getConc)
.max()
.orElse(0.0);
double end = Math.ceil(maxConc / step) * step + step;
// 初始化区间映射
Map<String, List<Double>> intervalMap = new TreeMap<>();
Map<String, List<NuclideActConcIntvl>> intervalMap = new TreeMap<>();
for (double lower = start; lower < end; lower += step) {
double upper = lower + step;
String key = String.format("[%.1f, %.1f)", lower, upper);
@ -82,22 +126,27 @@ public class DistributionAnalysisToolkit {
}
// 分配数据到区间
for (double value : data) {
for (NuclideActConcIntvl nuclide : nuclideData) {
double value=nuclide.getConc();
double lower = Math.floor(value / step) * step;
String key = String.format("[%.1f, %.1f)", lower, lower + step);
intervalMap.get(key).add(value);
intervalMap.get(key).add(nuclide);
}
// 转换为统计结果对象
List<IntervalStat> stats = new ArrayList<>();
for (Map.Entry<String, List<Double>> entry : intervalMap.entrySet()) {
stats.add(new IntervalStat(entry.getKey(), entry.getValue().size(), entry.getValue()));
for (Map.Entry<String,List<NuclideActConcIntvl>> entry : intervalMap.entrySet()) {
stats.add(new IntervalStat(entry.getKey(), entry.getValue()));
}
return stats;
}
/**
* 计算95%累积线
*

View File

@ -83,8 +83,9 @@ public class SourceRebuildTaskController{
@AutoLog(value = "启动任务")
@Operation(summary = "启动任务")
@PutMapping("runTask")
public Result<?> runTask(@NotNull(message = "任务ID不能为空") Integer taskId){
sourceRebuildTaskService.runTask(taskId);
public Result<?> runTask(@RequestBody @Validated(value = UpdateGroup.class) SourceRebuildTask sourceRebuildTask){
sourceRebuildTaskService.update(sourceRebuildTask);
sourceRebuildTaskService.runTask(sourceRebuildTask.getId());
return Result.OK();
}
}

View File

@ -63,7 +63,8 @@ public class TaskMonitoringDataController {
@AutoLog(value = "导入源项重建监测数据")
@Operation(summary = "导入源项重建监测数据")
@PostMapping("importTaskMonitoringData")
public Result<?> importTaskMonitoringData(HttpServletRequest request, HttpServletResponse response){
public Result<?> importTaskMonitoringData(HttpServletRequest request){
Integer taskId = Integer.valueOf(request.getParameter("taskId"));
MultipartHttpServletRequest multipartRequest = (MultipartHttpServletRequest) request;
Map<String, MultipartFile> fileMap = multipartRequest.getFileMap();
for (Map.Entry<String, MultipartFile> entity : fileMap.entrySet()) {
@ -76,8 +77,7 @@ public class TaskMonitoringDataController {
try {
ExcelImportCheckUtil.check(file.getInputStream(), SourceRebuildMonitoringData.class, params);
List<SourceRebuildMonitoringData> list = ExcelImportUtil.importExcel(file.getInputStream(), SourceRebuildMonitoringData.class, params);
System.out.println(list);
monitoringDataService.cteate(list);
monitoringDataService.cteate(taskId,list);
return Result.ok("文件导入成功!");
} catch (Exception e) {
log.error(e.getMessage(), e);

View File

@ -12,8 +12,9 @@ public interface SourceRebuildMonitoringDataService extends IService<SourceRebui
/**
* 保存源项重建任务监测数据
* @param monitoringDatas
* @param taskId
*/
void cteate(List<SourceRebuildMonitoringData> monitoringDatas);
void cteate(Integer taskId,List<SourceRebuildMonitoringData> monitoringDatas);
/**
* 删除源项重建任务监测数据

View File

@ -1,5 +1,6 @@
package org.jeecg.service.impl;
import cn.hutool.core.collection.CollUtil;
import com.baomidou.mybatisplus.core.conditions.query.LambdaQueryWrapper;
import com.baomidou.mybatisplus.extension.service.impl.ServiceImpl;
import lombok.RequiredArgsConstructor;
@ -19,12 +20,21 @@ public class SourceRebuildMonitoringDataServiceImpl extends ServiceImpl<SourceRe
/**
* 保存源项重建任务监测数据
*
* @param monitoringDatas
* @param taskId
*/
@Transactional(rollbackFor = RuntimeException.class)
@Override
public void cteate(List<SourceRebuildMonitoringData> monitoringDatas) {
public void cteate(Integer taskId,List<SourceRebuildMonitoringData> monitoringDatas) {
//先删除旧的然后保存新的监测数据
LambdaQueryWrapper<SourceRebuildMonitoringData> queryWrapper = new LambdaQueryWrapper<>();
queryWrapper.eq(SourceRebuildMonitoringData::getTaskId, taskId);
this.remove(queryWrapper);
if(CollUtil.isNotEmpty(monitoringDatas)){
for(SourceRebuildMonitoringData monitoringData : monitoringDatas){
monitoringData.setTaskId(taskId);
}
}
this.saveBatch(monitoringDatas);
}

View File

@ -179,15 +179,15 @@ public class SourceRebuildTaskServiceImpl extends ServiceImpl<SourceRebuildTaskM
throw new RuntimeException("监测数据为空,请补充监测数据");
}
SourceRebuildTaskExec sourceRebuildTaskExec = new SourceRebuildTaskExec();
sourceRebuildTaskExec.init(
systemStorageProperties,
taskMonitoringDatas,
sourceRebuildTask,
sourceRebuildTaskLogService,
sourceRebuildParams,this);
sourceRebuildTaskExec.setName(sourceRebuildTask.getId()+"_任务执行线程");
sourceRebuildTaskExec.start();
// SourceRebuildTaskExec sourceRebuildTaskExec = new SourceRebuildTaskExec();
// sourceRebuildTaskExec.init(
// systemStorageProperties,
// taskMonitoringDatas,
// sourceRebuildTask,
// sourceRebuildTaskLogService,
// sourceRebuildParams,this);
// sourceRebuildTaskExec.setName(sourceRebuildTask.getId()+"_任务执行线程");
// sourceRebuildTaskExec.start();
}
/**

View File

@ -124,6 +124,16 @@ public class TaskResultDataServiceImpl implements TaskResultDataService {
if (mdcResult.isNumeric()) {
activityConcComparResult.setMdcValues(mdcResult.asDoubles());
}
String obsErrorRdsPath = task.getResultAddress()+"/obs_error.RDS";
REXP obsErrorResult = conn.eval("readRDS('" + obsErrorRdsPath + "')");
if (obsErrorResult.isNumeric()) {
activityConcComparResult.setObsErrorValues(obsErrorResult.asDoubles());
}
String modErrorRdsPath = task.getResultAddress()+"/mod_error.RDS";
REXP modErrorResult = conn.eval("readRDS('" + modErrorRdsPath + "')");
if (modErrorResult.isNumeric()) {
activityConcComparResult.setModErrorValues(modErrorResult.asDoubles());
}
String stationsTxtPath = task.getResultAddress()+"/statnames.txt";
REXP stationsResult = conn.eval("readLines('" + stationsTxtPath + "')");
if (stationsResult.isString()) {

View File

@ -22,6 +22,17 @@ public class ActivityConcComparResult implements Serializable {
* mdc值
*/
private double[] mdcValues;
/**
* 观测值不确定度
*/
private double[] obsErrorValues;
/**
* 模拟值不确定度
*/
private double[] modErrorValues;
/**
* 台站列表
*/

View File

@ -1,54 +1,81 @@
//package org.jeecg.modules.monitor.controller;
//
//import lombok.RequiredArgsConstructor;
//import org.jeecg.common.api.vo.Result;
//import org.jeecg.common.aspect.annotation.AutoLog;
//import org.jeecg.modules.monitor.service.DatabaseMonitorService;
//import org.springframework.web.bind.annotation.GetMapping;
//import org.springframework.web.bind.annotation.RequestMapping;
//import org.springframework.web.bind.annotation.RestController;
//
//@RestController
//@RequestMapping("monitor")
//@RequiredArgsConstructor
//public class DatabaseMonitorController {
//
// private final DatabaseMonitorService databaseMonitorService;
//
// @AutoLog(value = "获取CPU信息")
// @GetMapping("getCpuInfo")
// public Result<?> getCpuInfo() {
// return Result.OK(databaseMonitorService.getCpuInfo());
// }
//
// @AutoLog(value = "获取内存监测数据")
// @GetMapping("getMemoryInfo")
// public Result<?> getMemoryInfo() {
// return Result.OK(databaseMonitorService.getMemoryInfo());
// }
//
// @AutoLog(value = "获取表空间信息")
// @GetMapping("getTableSpaceInfo")
// public Result<?> getNetworkInfo() {
// return Result.OK(databaseMonitorService.getTableSpaceInfo());
// }
//
// @AutoLog(value = "获取活跃连接数信息")
// @GetMapping("getActiveSessionInfo")
// public Result<?> getDiskInfo() {
// return Result.OK(databaseMonitorService.getActiveSessionInfo());
// }
//
// @AutoLog(value = "获取死锁信息")
// @GetMapping("getDeadlocksInfo")
// public Result<?> getDeadlocksInfo() {
// return Result.OK(databaseMonitorService.getDeadlocksInfo());
// }
//
// @AutoLog(value = "获取数据库占比信息")
// @GetMapping("getDatabaseProportionInfo")
// public Result<?> getDatabaseProportionInfo() {
// return Result.OK(databaseMonitorService.getDatabaseProportionInfo());
// }
//
//}
package org.jeecg.modules.monitor.controller;
import jakarta.validation.constraints.NotBlank;
import lombok.RequiredArgsConstructor;
import org.jeecg.common.api.vo.Result;
import org.jeecg.common.aspect.annotation.AutoLog;
import org.jeecg.modules.monitor.service.DatabaseMonitorService;
import org.springframework.validation.annotation.Validated;
import org.springframework.web.bind.annotation.GetMapping;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RestController;
@Validated
@RestController
@RequestMapping("monitor")
@RequiredArgsConstructor
public class DatabaseMonitorController {
private final DatabaseMonitorService databaseMonitorService;
@AutoLog(value = "查询当前时刻的CPU使用信息")
@GetMapping("getDBCpuInfo")
public Result<?> getDBCpuInfo() {
return Result.OK(databaseMonitorService.getCpuInfo());
}
@AutoLog(value = "查询过去指定时间范围内的CPU使用信息")
@GetMapping("getDBCpuInfoList")
public Result<?> getDBCpuInfoList(@NotBlank(message = "查询条件不能为空") String conditions) {
return Result.OK(databaseMonitorService.getCpuInfoList(conditions));
}
@AutoLog(value = "查询当前时刻的内存使用信息")
@GetMapping("getDBMemoryInfo")
public Result<?> getDBMemoryInfo() {
return Result.OK(databaseMonitorService.getDBMemoryInfo());
}
@AutoLog(value = "查询过去指定时间范围内的内存使用信息")
@GetMapping("getDBMemoryInfoList")
public Result<?> getDBMemoryInfoList(@NotBlank(message = "查询条件不能为空") String conditions) {
return Result.OK(databaseMonitorService.getMemoryInfoList(conditions));
}
@AutoLog(value = "查询数据库表空间信息")
@GetMapping("getDBTableSpaceInfo")
public Result<?> getDBTableSpaceInfo() {
return Result.OK(databaseMonitorService.getTableSpaceInfo());
}
@AutoLog(value = "查询当前时刻的数据库活跃连接数使用信息")
@GetMapping("getDBActiveSessionInfo")
public Result<?> getDBActiveSessionInfo() {
return Result.OK(databaseMonitorService.getActiveSessionInfo());
}
@AutoLog(value = "查询过去指定时间范围内的数据库活跃连接数信息")
@GetMapping("getDBActiveSessionInfoList")
public Result<?> getDBActiveSessionInfoList(@NotBlank(message = "查询条件不能为空") String conditions) {
return Result.OK(databaseMonitorService.getActiveSessionInfoList(conditions));
}
@AutoLog(value = "查询当前时刻的数据库死锁信息")
@GetMapping("getDBDeadlocksInfo")
public Result<?> getDeadlocksInfo() {
return Result.OK(databaseMonitorService.getDeadlocksInfo());
}
@AutoLog(value = "查询过去指定时间范围内的数据库死锁信息")
@GetMapping("getDeadlocksInfoList")
public Result<?> getDeadlocksInfoList(@NotBlank(message = "查询条件不能为空") String conditions) {
return Result.OK(databaseMonitorService.getDeadlocksInfoList(conditions));
}
@AutoLog(value = "查询数据库占比信息")
@GetMapping("getDBProportionInfo")
public Result<?> getDatabaseProportionInfo() {
return Result.OK(databaseMonitorService.getDatabaseProportionInfo());
}
}

View File

@ -20,8 +20,8 @@ public class ServiceMonitorController {
@AutoLog(value = "查询当前时刻的CPU使用率")
@GetMapping("getCpuInfo")
public Result<?> getCpuInfo() {
return Result.OK(hostMonitorService.getCpuInfo());
public Result<?> getCpuInfo(@NotBlank(message = "查询条件不能为空") String conditions) {
return Result.OK(hostMonitorService.getCpuInfo(conditions));
}
@AutoLog(value = "查询过去指定时间范围内的CPU使用率")
@ -56,8 +56,8 @@ public class ServiceMonitorController {
@AutoLog(value = "获取网络带宽监测数据")
@GetMapping("getNetworkInfo")
public Result<?> getNetworkInfo() {
return Result.OK(hostMonitorService.getNetworkInfo());
public Result<?> getNetworkInfo(@NotBlank(message = "查询条件不能为空") String conditions) {
return Result.OK(hostMonitorService.getNetworkInfo(conditions));
}
@AutoLog(value = "获取网络带宽监测数据")

View File

@ -12,5 +12,5 @@ public interface DatabaseMonitorMapper extends BaseMapper {
"pg_database_size(datname) AS dataSize " +
"FROM pg_database " +
"WHERE datistemplate = false")
List<DatabaseProportionInfo> getActiveSessionInfo();
List<DatabaseProportionInfo> getDatabaseProportionInfo();
}

View File

@ -9,31 +9,54 @@ public interface DatabaseMonitorService {
/**
* 获取CPU信息
* @return
*/
Map<String,Object> getCpuInfo();
Map<String, Object> getCpuInfo();
/**
* 获取CPU信息列表
*/
List<Map<String, Object>> getCpuInfoList(String conditions);
/**
* 获取内存信息
*/
Map<String,Object> getMemoryInfo();
Map<String, Object> getDBMemoryInfo();
/**
* 获取内存信息列表
*/
List<Map<String, Object>> getMemoryInfoList(String conditions);
/**
* 获取表空间信息
* @return
*/
Map<String,Object> getTableSpaceInfo();
List<Map<String, Object>> getTableSpaceInfo();
/**
* 获取活跃连接数信息
* @return
*/
Map<String,Object> getActiveSessionInfo();
Map<String, Object> getActiveSessionInfo();
/**
* 获取活跃连接数信息列表
* @return
*/
List<Map<String,Object>> getActiveSessionInfoList(String conditions);
/**
* 获取死锁信息
* @return
*/
Map<String,Object> getDeadlocksInfo();
Map<String, Object> getDeadlocksInfo();
/**
* 获取死锁信息列表
* @return
*/
List<Map<String,Object>> getDeadlocksInfoList(String conditions);
/**
* 获取数据库占比信息

View File

@ -9,7 +9,7 @@ public interface HostMonitorService {
/**
* 获取CPU信息
*/
Map<String,Object> getCpuInfo();
Map<String,Object> getCpuInfo(String conditions);
/**
* 获取CPU信息列表
@ -40,7 +40,7 @@ public interface HostMonitorService {
/**
* 获取网络信息
*/
Map<String,Object> getNetworkInfo();
Map<String,Object> getNetworkInfo(String conditions);
/**
* 获取网络信息列表

View File

@ -1,33 +1,200 @@
package org.jeecg.modules.monitor.service.impl;
import cn.hutool.core.collection.CollUtil;
import cn.hutool.core.date.DateUtil;
import lombok.RequiredArgsConstructor;
import lombok.extern.slf4j.Slf4j;
import org.jeecg.common.constant.enums.PrometheusDBQueryTypeEnum;
import org.jeecg.common.constant.enums.PrometheusHostQueryTypeEnum;
import org.jeecg.common.properties.PrometheusServerProperties;
import org.jeecg.modules.monitor.mapper.DatabaseMonitorMapper;
import org.jeecg.modules.monitor.service.DatabaseMonitorService;
import org.jeecg.modules.monitor.vo.DatabaseProportionInfo;
import org.jeecg.modules.monitor.vo.PrometheusResponse;
import org.springframework.stereotype.Service;
import java.util.List;
import java.util.Map;
import org.springframework.web.reactive.function.client.WebClient;
import org.springframework.web.util.UriComponentsBuilder;
import java.math.BigDecimal;
import java.math.RoundingMode;
import java.net.URI;
import java.sql.Connection;
import java.sql.DriverManager;
import java.sql.ResultSet;
import java.sql.Statement;
import java.time.Instant;
import java.util.*;
@Slf4j
@Service
@RequiredArgsConstructor
public class DatabaseMonitorServiceImpl implements DatabaseMonitorService {
private final WebClient webClient;
private final DatabaseMonitorMapper databaseMonitorMapper;
private final PrometheusServerProperties serverProperties;
/**
* 获取CPU信息
*
* @return
*/
@Override
public Map<String, Object> getCpuInfo() {
return Map.of();
Map<String,Object> result = new HashMap<>();
try {
//Prometheus 服务器地址
String url = serverProperties.getServerUrl();
//目标主机实例node-exporter 的地址
String instance = serverProperties.getDbInstance();
//查询CPU使用时间
String cpuQuery = "avg(rate(process_cpu_seconds_total{release=\"\", instance=\""+instance+"\"}[5m]) * 1000)";
PrometheusResponse response = webClient.get()
.uri(buildUri(url,cpuQuery))
.retrieve()
.bodyToMono(PrometheusResponse.class)
.block();
if(Objects.nonNull(response) &&
Objects.nonNull(response.getData()) &&
CollUtil.isNotEmpty(response.getData().getResult())
) {
PrometheusResponse.Result cpuInfo = response.getData().getResult().get(0);
if(CollUtil.isNotEmpty(cpuInfo.getValue())) {
Date date = new Date(cpuInfo.getValue().get(0).longValue()*1000);
Double useRate = BigDecimal.valueOf(cpuInfo.getValue().get(1)).setScale(2, RoundingMode.HALF_UP).doubleValue();
result.put("date", DateUtil.format(date, "MM/dd HH:mm:ss"));
result.put("cpuTime", useRate);
}
}
}catch (Exception e){
log.error("获取数据库CPU使用时间信息错误请检查Prometheus服务是否正常启动或Java请求参数是否正确,详细堆栈错误为:{}",e.getMessage());
}
return result;
}
/**
* 获取CPU信息列表
*/
@Override
public List<Map<String, Object>> getCpuInfoList(String conditions) {
List<Map<String, Object>> result = new ArrayList<>();
try {
//Prometheus 服务器地址
String url = serverProperties.getServerUrl();
//目标主机实例node-exporter 的地址
String instance = serverProperties.getDbInstance();
//查询数据库CPU使用
String cpuInfoQuery = "avg(rate(process_cpu_seconds_total{release=\"\", instance=\""+instance+"\"}[5m]) * 1000)";
PrometheusDBQueryTypeEnum queryTypeEnum = PrometheusDBQueryTypeEnum.getQueryTypeEnum(conditions);
long end = Instant.now().getEpochSecond();
long start = end - queryTypeEnum.getLastSecond();
String step = queryTypeEnum.getStep();
PrometheusResponse response = webClient.get()
.uri(buildUri(url,cpuInfoQuery,start,end,step))
.retrieve()
.bodyToMono(PrometheusResponse.class)
.block();
if(Objects.nonNull(response) &&
Objects.nonNull(response.getData()) &&
CollUtil.isNotEmpty(response.getData().getResult())
) {
PrometheusResponse.Result activeSessionInfoList = response.getData().getResult().get(0);
if(CollUtil.isNotEmpty(activeSessionInfoList.getValues())) {
List<List<Double>> pointDatas = activeSessionInfoList.getValues();
for(List<Double> pointData : pointDatas) {
Map<String,Object> pointDataMap = new HashMap<>();
Date date = new Date(pointData.get(0).longValue()*1000);
Double cpuTime = BigDecimal.valueOf(pointData.get(1)).setScale(2, RoundingMode.HALF_UP).doubleValue();
pointDataMap.put("date", DateUtil.format(date, "MM/dd HH:mm:ss"));
pointDataMap.put("cpuTime", cpuTime);
result.add(pointDataMap);
}
}
}
}catch (Exception e){
log.error("获取数据库CPU使用时间信息错误请检查Prometheus服务是否正常启动或Java请求参数是否正确,详细堆栈错误为:{}",e.getMessage());
}
return result;
}
/**
* 获取内存信息
*/
@Override
public Map<String, Object> getMemoryInfo() {
return Map.of();
public Map<String, Object> getDBMemoryInfo() {
Map<String,Object> result = new HashMap<>();
try {
//Prometheus 服务器地址
String url = serverProperties.getServerUrl();
//目标主机实例node-exporter 的地址
String instance = serverProperties.getDbInstance();
//查询内存利用率
String cpuQuery = "avg(rate(process_resident_memory_bytes{release=\"\", instance=\""+instance+"\"}[5m]))";
PrometheusResponse response = webClient.get()
.uri(buildUri(url,cpuQuery))
.retrieve()
.bodyToMono(PrometheusResponse.class)
.block();
if(Objects.nonNull(response) &&
Objects.nonNull(response.getData()) &&
CollUtil.isNotEmpty(response.getData().getResult())
) {
PrometheusResponse.Result memInfo = response.getData().getResult().get(0);
if(CollUtil.isNotEmpty(memInfo.getValue())) {
Date date = new Date(memInfo.getValue().get(0).longValue()*1000);
Double useRate = BigDecimal.valueOf(memInfo.getValue().get(1)).setScale(2, RoundingMode.HALF_UP).doubleValue();
result.put("date", DateUtil.format(date, "MM/dd HH:mm:ss"));
result.put("memUsage", useRate);
}
}
}catch (Exception e){
log.error("获取数据库内存使用情况信息错误请检查Prometheus服务是否正常启动或Java请求参数是否正确,详细堆栈错误为:{}",e.getMessage());
}
return result;
}
/**
* 获取内存信息列表
*/
@Override
public List<Map<String, Object>> getMemoryInfoList(String conditions) {
List<Map<String, Object>> result = new ArrayList<>();
try {
//Prometheus 服务器地址
String url = serverProperties.getServerUrl();
//目标主机实例node-exporter 的地址
String instance = serverProperties.getDbInstance();
//查询数据库内存使用
String cpuInfoQuery = "avg(rate(process_resident_memory_bytes{release=\"\", instance=\""+instance+"\"}[5m]))";
PrometheusDBQueryTypeEnum queryTypeEnum = PrometheusDBQueryTypeEnum.getQueryTypeEnum(conditions);
long end = Instant.now().getEpochSecond();
long start = end - queryTypeEnum.getLastSecond();
String step = queryTypeEnum.getStep();
PrometheusResponse response = webClient.get()
.uri(buildUri(url,cpuInfoQuery,start,end,step))
.retrieve()
.bodyToMono(PrometheusResponse.class)
.block();
if(Objects.nonNull(response) &&
Objects.nonNull(response.getData()) &&
CollUtil.isNotEmpty(response.getData().getResult())
) {
PrometheusResponse.Result activeSessionInfoList = response.getData().getResult().get(0);
if(CollUtil.isNotEmpty(activeSessionInfoList.getValues())) {
List<List<Double>> pointDatas = activeSessionInfoList.getValues();
for(List<Double> pointData : pointDatas) {
Map<String,Object> pointDataMap = new HashMap<>();
Date date = new Date(pointData.get(0).longValue()*1000);
Double memUsage = BigDecimal.valueOf(pointData.get(1)).setScale(2, RoundingMode.HALF_UP).doubleValue();
pointDataMap.put("date", DateUtil.format(date, "MM/dd HH:mm:ss"));
pointDataMap.put("memUsage", memUsage);
result.add(pointDataMap);
}
}
}
}catch (Exception e){
log.error("获取数据库内存使用情况信息错误请检查Prometheus服务是否正常启动或Java请求参数是否正确,详细堆栈错误为:{}",e.getMessage());
}
return result;
}
/**
@ -36,8 +203,31 @@ public class DatabaseMonitorServiceImpl implements DatabaseMonitorService {
* @return
*/
@Override
public Map<String, Object> getTableSpaceInfo() {
return Map.of();
public List<Map<String, Object>> getTableSpaceInfo() {
List<Map<String, Object>> results = new ArrayList<>();
String url = "jdbc:postgresql://"+serverProperties.getPgHost()+":"+serverProperties.getPgDBPort()+"/"+serverProperties.getPgAdmin()+"?stringtype=unspecified&tcpKeepAlive=true&ApplicationName=YourAppName";
String user = serverProperties.getPgAdmin();
String password = serverProperties.getPgAdminPwd();
String sql = "" +
"SELECT " +
" spcname AS tablespace_name," +
" pg_size_pretty(pg_tablespace_size(spcname)) AS size " +
"FROM pg_tablespace " +
"ORDER BY pg_tablespace_size(spcname) DESC";
try (Connection conn = DriverManager.getConnection(url, user, password);
Statement stmt = conn.createStatement();
ResultSet rs = stmt.executeQuery(sql)) {
while (rs.next()) {
Map<String, Object> row = new HashMap<>();
row.put("tablespaceName", rs.getString("tablespace_name"));
row.put("size", rs.getString("size"));
results.add(row);
}
} catch (Exception e) {
e.printStackTrace();
}
return results;
}
/**
@ -47,7 +237,85 @@ public class DatabaseMonitorServiceImpl implements DatabaseMonitorService {
*/
@Override
public Map<String, Object> getActiveSessionInfo() {
return Map.of();
Map<String,Object> result = new HashMap<>();
try {
//Prometheus 服务器地址
String url = serverProperties.getServerUrl();
//目标主机实例node-exporter 的地址
String instance = serverProperties.getDbInstance();
//监测的数据库
String monitorDBNames = serverProperties.getMonitorDBNames();
//查询活跃连接数
String activeSessionQuery = "pg_stat_activity_count{datname=~\"("+monitorDBNames+")\", instance=~\""+instance+"\", state=\"active\"} !=0";
PrometheusResponse response = webClient.get()
.uri(buildUri(url,activeSessionQuery))
.retrieve()
.bodyToMono(PrometheusResponse.class)
.block();
if(Objects.nonNull(response) &&
Objects.nonNull(response.getData()) &&
CollUtil.isNotEmpty(response.getData().getResult())
) {
PrometheusResponse.Result activeSessionInfo = response.getData().getResult().get(0);
if(CollUtil.isNotEmpty(activeSessionInfo.getValue())) {
Date date = new Date(activeSessionInfo.getValue().get(0).longValue()*1000);
Double useRate = BigDecimal.valueOf(activeSessionInfo.getValue().get(1)).setScale(2, RoundingMode.HALF_UP).doubleValue();
result.put("date", DateUtil.format(date, "MM/dd HH:mm:ss"));
result.put("activeSessions", useRate);
}
}
}catch (Exception e){
log.error("获取数据库活跃连接数信息错误请检查Prometheus服务是否正常启动或Java请求参数是否正确,详细堆栈错误为:{}",e.getMessage());
}
return result;
}
/**
* 获取活跃连接数信息列表
* @return
*/
@Override
public List<Map<String,Object>> getActiveSessionInfoList(String conditions) {
List<Map<String, Object>> result = new ArrayList<>();
try {
//Prometheus 服务器地址
String url = serverProperties.getServerUrl();
//目标主机实例node-exporter 的地址
String instance = serverProperties.getDbInstance();
//监测的数据库
String monitorDBNames = serverProperties.getMonitorDBNames();
//查询数据库活跃连接数
String activeSessionQuery = "pg_stat_activity_count{datname=~\"("+monitorDBNames+")\", instance=~\""+instance+"\", state=\"active\"} !=0";
PrometheusDBQueryTypeEnum queryTypeEnum = PrometheusDBQueryTypeEnum.getQueryTypeEnum(conditions);
long end = Instant.now().getEpochSecond();
long start = end - queryTypeEnum.getLastSecond();
String step = queryTypeEnum.getStep();
PrometheusResponse response = webClient.get()
.uri(buildUri(url,activeSessionQuery,start,end,step))
.retrieve()
.bodyToMono(PrometheusResponse.class)
.block();
if(Objects.nonNull(response) &&
Objects.nonNull(response.getData()) &&
CollUtil.isNotEmpty(response.getData().getResult())
) {
PrometheusResponse.Result activeSessionInfoList = response.getData().getResult().get(0);
if(CollUtil.isNotEmpty(activeSessionInfoList.getValues())) {
List<List<Double>> pointDatas = activeSessionInfoList.getValues();
for(List<Double> pointData : pointDatas) {
Map<String,Object> pointDataMap = new HashMap<>();
Date date = new Date(pointData.get(0).longValue()*1000);
Double useRate = BigDecimal.valueOf(pointData.get(1)).setScale(2, RoundingMode.HALF_UP).doubleValue();
pointDataMap.put("date", DateUtil.format(date, "MM/dd HH:mm:ss"));
pointDataMap.put("activeSessions", useRate);
result.add(pointDataMap);
}
}
}
}catch (Exception e){
log.error("获取数据库活跃连接数信息错误请检查Prometheus服务是否正常启动或Java请求参数是否正确,详细堆栈错误为:{}",e.getMessage());
}
return result;
}
/**
@ -57,8 +325,83 @@ public class DatabaseMonitorServiceImpl implements DatabaseMonitorService {
*/
@Override
public Map<String, Object> getDeadlocksInfo() {
//irate(pg_stat_database_deadlocks{instance="192.168.186.143:9187", datname=~"postgres"}[5m])
return Map.of();
Map<String,Object> result = new HashMap<>();
try {
//Prometheus 服务器地址
String url = serverProperties.getServerUrl();
//目标主机实例node-exporter 的地址
String instance = serverProperties.getDbInstance();
//查询数据库死锁信息
String deadlocksQuery = "irate(pg_stat_database_deadlocks{instance=\""+instance+"\", datname=~\"postgres\"}[5m])";
PrometheusResponse response = webClient.get()
.uri(buildUri(url,deadlocksQuery))
.retrieve()
.bodyToMono(PrometheusResponse.class)
.block();
if(Objects.nonNull(response) &&
Objects.nonNull(response.getData()) &&
CollUtil.isNotEmpty(response.getData().getResult())
) {
PrometheusResponse.Result deadlocksInfo = response.getData().getResult().get(0);
if(CollUtil.isNotEmpty(deadlocksInfo.getValue())) {
Date date = new Date(deadlocksInfo.getValue().get(0).longValue()*1000);
Double useRate = BigDecimal.valueOf(deadlocksInfo.getValue().get(1)).setScale(2, RoundingMode.HALF_UP).doubleValue();
result.put("date", DateUtil.format(date, "MM/dd HH:mm:ss"));
result.put("count", useRate);
}
}
}catch (Exception e){
log.error("获取数据库死锁信息错误请检查Prometheus服务是否正常启动或Java请求参数是否正确,详细堆栈错误为:{}",e.getMessage());
}
return result;
}
/**
* 获取死锁信息
*
* @return
*/
@Override
public List<Map<String,Object>> getDeadlocksInfoList(String conditions) {
//
List<Map<String, Object>> result = new ArrayList<>();
try {
//Prometheus 服务器地址
String url = serverProperties.getServerUrl();
//目标主机实例node-exporter 的地址
String instance = serverProperties.getDbInstance();
//查询数据库死锁信息
String deadlocksQuery = "irate(pg_stat_database_deadlocks{instance=\""+instance+"\", datname=~\"postgres\"}[5m])";
PrometheusDBQueryTypeEnum queryTypeEnum = PrometheusDBQueryTypeEnum.getQueryTypeEnum(conditions);
long end = Instant.now().getEpochSecond();
long start = end - queryTypeEnum.getLastSecond();
String step = queryTypeEnum.getStep();
PrometheusResponse response = webClient.get()
.uri(buildUri(url,deadlocksQuery,start,end,step))
.retrieve()
.bodyToMono(PrometheusResponse.class)
.block();
if(Objects.nonNull(response) &&
Objects.nonNull(response.getData()) &&
CollUtil.isNotEmpty(response.getData().getResult())
) {
PrometheusResponse.Result activeSessionInfoList = response.getData().getResult().get(0);
if(CollUtil.isNotEmpty(activeSessionInfoList.getValues())) {
List<List<Double>> pointDatas = activeSessionInfoList.getValues();
for(List<Double> pointData : pointDatas) {
Map<String,Object> pointDataMap = new HashMap<>();
Date date = new Date(pointData.get(0).longValue()*1000);
Double count = BigDecimal.valueOf(pointData.get(1)).setScale(2, RoundingMode.HALF_UP).doubleValue();
pointDataMap.put("date", DateUtil.format(date, "MM/dd HH:mm:ss"));
pointDataMap.put("count", count);
result.add(pointDataMap);
}
}
}
}catch (Exception e){
log.error("获取数据库死锁信息错误请检查Prometheus服务是否正常启动或Java请求参数是否正确,详细堆栈错误为:{}",e.getMessage());
}
return result;
}
/**
@ -67,6 +410,41 @@ public class DatabaseMonitorServiceImpl implements DatabaseMonitorService {
*/
@Override
public List<DatabaseProportionInfo> getDatabaseProportionInfo() {
return databaseMonitorMapper.getActiveSessionInfo();
return databaseMonitorMapper.getDatabaseProportionInfo();
}
/**
* 构建URI
* @param url
* @param query
* @return
*/
private URI buildUri(String url, String query){
URI uri = UriComponentsBuilder.fromHttpUrl(url + "/api/v1/query")
.queryParam("query", query)
.build()
.toUri();
return uri;
}
/**
* 构建URI
* @param url
* @param query
* @return
*/
private URI buildUri(String url,String query,Long start,Long end,String step){
String uriAddr = String.format(
"%s/api/v1/query_range?query=%s&start=%d&end=%d&step=%s",
url,
query,
start,
end,
step
);
URI uri = UriComponentsBuilder.fromHttpUrl(uriAddr)
.build()
.toUri();
return uri;
}
}

View File

@ -4,7 +4,7 @@ import cn.hutool.core.collection.CollUtil;
import cn.hutool.core.date.DateUtil;
import lombok.RequiredArgsConstructor;
import lombok.extern.slf4j.Slf4j;
import org.jeecg.common.constant.enums.PrometheusQueryTypeEnum;
import org.jeecg.common.constant.enums.PrometheusHostQueryTypeEnum;
import org.jeecg.common.properties.PrometheusServerProperties;
import org.jeecg.modules.monitor.service.HostMonitorService;
import org.jeecg.modules.monitor.vo.PrometheusResponse;
@ -32,7 +32,7 @@ public class HostMonitorServiceImpl implements HostMonitorService {
* 获取CPU信息
*/
@Override
public Map<String,Object> getCpuInfo() {
public Map<String,Object> getCpuInfo(String conditions) {
Map<String,Object> result = new HashMap<>();
try {
//Prometheus 服务器地址
@ -40,7 +40,9 @@ public class HostMonitorServiceImpl implements HostMonitorService {
//目标主机实例node-exporter 的地址
String instance = serverProperties.getInstance();
//查询CPU利用率
String cpuQuery = "100 * (1 - avg(rate(node_cpu_seconds_total{mode=\"idle\", instance=\""+instance+"\"}[15s])))";
PrometheusHostQueryTypeEnum queryTypeEnum = PrometheusHostQueryTypeEnum.getQueryTypeEnum(conditions);
String exprTime = queryTypeEnum.getExprTime();
String cpuQuery = "100 * (1 - avg(rate(node_cpu_seconds_total{mode=\"idle\", instance=\""+instance+"\"}["+exprTime+"])))";
PrometheusResponse response = webClient.get()
.uri(buildUri(url,cpuQuery))
.retrieve()
@ -76,11 +78,12 @@ public class HostMonitorServiceImpl implements HostMonitorService {
//目标主机实例node-exporter 的地址
String instance = serverProperties.getInstance();
//查询CPU利用率
String cpuQuery = "100 * (1 - avg(rate(node_cpu_seconds_total{mode=\"idle\", instance=\""+instance+"\"}[15s])))";
PrometheusQueryTypeEnum queryTypeEnum = PrometheusQueryTypeEnum.getQueryTypeEnum(conditions);
PrometheusHostQueryTypeEnum queryTypeEnum = PrometheusHostQueryTypeEnum.getQueryTypeEnum(conditions);
long end = Instant.now().getEpochSecond();
long start = end - queryTypeEnum.getLastSecond();
String step = queryTypeEnum.getStep();
String exprTime = queryTypeEnum.getExprTime();
String cpuQuery = "100 * (1 - avg(rate(node_cpu_seconds_total{mode=\"idle\", instance=\""+instance+"\"}["+exprTime+"])))";
PrometheusResponse response = webClient.get()
.uri(buildUri(url,cpuQuery,start,end,step))
.retrieve()
@ -228,7 +231,7 @@ public class HostMonitorServiceImpl implements HostMonitorService {
String instance = serverProperties.getInstance();
//使用率
String usageRateQuery = "(1 - (node_memory_MemAvailable_bytes{instance=\""+instance+"\"} / node_memory_MemTotal_bytes{instance=\""+instance+"\"})) * 100";
PrometheusQueryTypeEnum queryTypeEnum = PrometheusQueryTypeEnum.getQueryTypeEnum(conditions);
PrometheusHostQueryTypeEnum queryTypeEnum = PrometheusHostQueryTypeEnum.getQueryTypeEnum(conditions);
long end = Instant.now().getEpochSecond();
long start = end - queryTypeEnum.getLastSecond();
String step = queryTypeEnum.getStep();
@ -296,15 +299,17 @@ public class HostMonitorServiceImpl implements HostMonitorService {
* 获取网络信息
*/
@Override
public Map<String,Object> getNetworkInfo() {
public Map<String,Object> getNetworkInfo(String conditions) {
Map<String,Object> result = new HashMap<>();
try {
//Prometheus 服务器地址
String url = serverProperties.getServerUrl();
//目标主机实例node-exporter 的地址
String instance = serverProperties.getInstance();
PrometheusHostQueryTypeEnum queryTypeEnum = PrometheusHostQueryTypeEnum.getQueryTypeEnum(conditions);
String exprTime = queryTypeEnum.getExprTime();
//接收带宽 (Kbps)
String receiveKbpsQuery = "rate(node_network_receive_bytes_total{instance=\"" + instance + "\", device=\""+serverProperties.getNetworkCardName()+"\"}[15s]) * 8 / 1000";
String receiveKbpsQuery = "rate(node_network_receive_bytes_total{instance=\"" + instance + "\", device=\""+serverProperties.getNetworkCardName()+"\"}["+exprTime+"]) * 8 / 1000";
PrometheusResponse receiveKbpsResponse = webClient.get()
.uri(buildUri(url,receiveKbpsQuery))
.retrieve()
@ -324,7 +329,7 @@ public class HostMonitorServiceImpl implements HostMonitorService {
}
//发送带宽 (Kbps)
String transmitKbpsQuery = "rate(node_network_transmit_bytes_total{instance=\"" + instance + "\", device=\""+serverProperties.getNetworkCardName()+"\"}[15s]) * 8 / 1000";
String transmitKbpsQuery = "rate(node_network_transmit_bytes_total{instance=\"" + instance + "\", device=\""+serverProperties.getNetworkCardName()+"\"}["+exprTime+"]) * 8 / 1000";
PrometheusResponse transmitKbpsResponse = webClient.get()
.uri(buildUri(url,transmitKbpsQuery))
.retrieve()
@ -362,12 +367,13 @@ public class HostMonitorServiceImpl implements HostMonitorService {
//目标主机实例node-exporter 的地址
String instance = serverProperties.getInstance();
//构建查询参数
PrometheusQueryTypeEnum queryTypeEnum = PrometheusQueryTypeEnum.getQueryTypeEnum(conditions);
PrometheusHostQueryTypeEnum queryTypeEnum = PrometheusHostQueryTypeEnum.getQueryTypeEnum(conditions);
long end = Instant.now().getEpochSecond();
long start = end - queryTypeEnum.getLastSecond();
String step = queryTypeEnum.getStep();
String exprTime = queryTypeEnum.getExprTime();
//接收带宽 (Kbps)
String receiveKbpsQuery = "rate(node_network_receive_bytes_total{instance=\"" + instance + "\", device=\""+serverProperties.getNetworkCardName()+"\"}[15s]) * 8 / 1000";
String receiveKbpsQuery = "rate(node_network_receive_bytes_total{instance=\"" + instance + "\", device=\""+serverProperties.getNetworkCardName()+"\"}["+exprTime+"]) * 8 / 1000";
PrometheusResponse receiveKbpsResponse = webClient.get()
.uri(buildUri(url,receiveKbpsQuery,start,end,step))
.retrieve()
@ -394,7 +400,7 @@ public class HostMonitorServiceImpl implements HostMonitorService {
}
//发送带宽 (Kbps)
String transmitKbpsQuery = "rate(node_network_transmit_bytes_total{instance=\"" + instance + "\", device=\""+serverProperties.getNetworkCardName()+"\"}[15s]) * 8 / 1000";
String transmitKbpsQuery = "rate(node_network_transmit_bytes_total{instance=\"" + instance + "\", device=\""+serverProperties.getNetworkCardName()+"\"}["+exprTime+"]) * 8 / 1000";
PrometheusResponse transmitKbpsResponse = webClient.get()
.uri(buildUri(url,transmitKbpsQuery,start,end,step))
.retrieve()
@ -493,10 +499,4 @@ public class HostMonitorServiceImpl implements HostMonitorService {
.toUri();
return uri;
}
//&start=%d&end=%d&step=%s
public static void main(String[] args) {
Date date = new Date(1758868629*1000L);
System.out.println(DateUtil.format(date, "yyyy-MM-dd HH:mm:ss"));
}
}

View File

@ -65,8 +65,7 @@ public class SysBaseApiImpl implements ISysBaseAPI {
if (oConvertUtils.isEmpty(username)) {
return null;
}
LoginUser user = authUserService.getEncodeUserInfo(username);
return user;
return authUserService.getEncodeUserInfo(username);
}
@Override

View File

@ -43,7 +43,7 @@ public class WeatherDataController {
@AutoLog(value = "分页查询气象文件数据")
@Operation(summary = "分页查询气象文件数据")
@GetMapping("page")
public Result<?> page(PageRequest pageRequest, String fileExt, Integer dataSource,
public Result<?> page(PageRequest pageRequest, String fileExt, String dataSource,
@DateTimeFormat(iso = DateTimeFormat.ISO.DATE) LocalDate startDate,
@DateTimeFormat(iso = DateTimeFormat.ISO.DATE) LocalDate endDate) {
IPage<WeatherData> page = weatherDataService.page(pageRequest,fileExt,dataSource,startDate,endDate);
@ -67,9 +67,10 @@ public class WeatherDataController {
public Result<?> uploadFile(FileVo fileVo){
if (!fileVo.getFileExt().equals(WeatherFileSuffixEnum.GRIB.getValue()) && !fileVo.getFileExt().equals(WeatherFileSuffixEnum.GRIB2.getValue())){
throw new RuntimeException("不支持当前上传的文件类型!");
}else{
FileUploadResultVo resultVo = weatherDataService.uploadFile(fileVo);
return Result.ok(resultVo);
}
FileUploadResultVo resultVo = weatherDataService.uploadFile(fileVo);
return Result.ok(resultVo);
}
/**
@ -113,36 +114,34 @@ public class WeatherDataController {
public static void main(String[] args) {
//reftime_ISO
String filePath = "F:\\工作\\五木\\放射性核素监测数据综合分析及氙本底源解析系统\\其他资料\\气象数据\\中国CRA40再分析数据\\CRA40\\20250524\\CRA40_AVO_2025052418_GLB_0P25_HOUR_V1_0_0.grib2";
String filePath1 = "F:\\工作\\五木\\放射性核素监测数据综合分析及氙本底源解析系统\\其他资料\\气象数据\\中国CRA40再分析数据\\GRAPES\\2024110100\\Z_NAFP_C_BABJ_20241101000000_P_NWPC-GRAPES-GFS-HNEHE-00000.grib2";
String filePath2 = "F:\\工作\\五木\\放射性核素监测数据综合分析及氙本底源解析系统\\其他资料\\气象数据\\盘古模型预测数据\\panguweather_2025073106.grib";
// try {
// String md5 = calculateMD5(filePath);
// System.out.println("MD5: " + md5);
// } catch (IOException e) {
// e.printStackTrace();
// }
try (NetcdfFile ncFile = NetcdfFile.open(filePath2)) {
String filePath = "F:\\工作\\五木\\放射性核素监测数据综合分析及氙本底源解析系统\\其他资料\\气象数据\\盘古模型预测数据\\panguweather_2025073118.grib";
try {
String md5 = calculateMD5(filePath);
System.out.println("MD5: " + md5);
} catch (IOException e) {
e.printStackTrace();
}
// try (NetcdfFile ncFile = NetcdfFile.open(filePath2)) {
// Variable variable = ncFile.findVariable("reftime_ISO");
// if (variable != null) {
// Array data = variable.read();
// System.out.println(variable.getFullName());
// System.out.println(data.getObject(0));
// }
int index = 0;
for (Variable variable : ncFile.getVariables()) {
if (variable != null) {
Array data = variable.read();
System.out.println(variable.getFullName());
System.out.println(data);
if (index == 7) {
break;
}
index++;
}
}
}catch (Exception e){
}
// int index = 0;
// for (Variable variable : ncFile.getVariables()) {
// if (variable != null) {
// Array data = variable.read();
// System.out.println(variable.getFullName());
// System.out.println(data);
// if (index == 7) {
// break;
// }
// index++;
// }
// }
// }catch (Exception e){
//
// }
}
}

View File

@ -27,7 +27,7 @@ public interface WeatherDataService {
* @param endDate
* @return
*/
IPage<WeatherData> page(PageRequest pageRequest, String fileExt, Integer dataSource, LocalDate startDate, LocalDate endDate);
IPage<WeatherData> page(PageRequest pageRequest, String fileExt, String dataSource, LocalDate startDate, LocalDate endDate);
/**
* 验证文件是否存在

View File

@ -1,5 +1,6 @@
package org.jeecg.service.impl;
import cn.hutool.core.collection.CollUtil;
import cn.hutool.core.io.FileUtil;
import com.baomidou.mybatisplus.core.conditions.query.LambdaQueryWrapper;
import com.baomidou.mybatisplus.core.metadata.IPage;
@ -46,6 +47,7 @@ import java.time.LocalDateTime;
import java.time.ZoneId;
import java.time.format.DateTimeFormatter;
import java.util.*;
import java.util.stream.Collectors;
import static org.jeecg.common.constant.LatLonSizeConstants.*;
@ -161,7 +163,7 @@ public class WeatherDataServiceImpl extends ServiceImpl<WeatherDataMapper, Weath
* @return
*/
@Override
public IPage<WeatherData> page(PageRequest pageRequest, String fileExt, Integer dataSource, LocalDate startDate, LocalDate endDate) {
public IPage<WeatherData> page(PageRequest pageRequest, String fileExt, String dataSource, LocalDate startDate, LocalDate endDate) {
LocalDateTime startTime = null;
if(Objects.nonNull(startDate)){
startTime = LocalDateTime.of(startDate.getYear(), startDate.getMonth(), startDate.getDayOfMonth(), 0, 0, 0);
@ -171,7 +173,14 @@ public class WeatherDataServiceImpl extends ServiceImpl<WeatherDataMapper, Weath
endTime = LocalDateTime.of(endDate.getYear(), endDate.getMonth(), endDate.getDayOfMonth(), 23, 59, 59);
}
LambdaQueryWrapper<WeatherData> queryWrapper = new LambdaQueryWrapper<>();
queryWrapper.eq(Objects.nonNull(dataSource),WeatherData::getDataSource, dataSource);
List<Integer> dataSources = null;
if (StringUtils.isNotBlank(dataSource)) {
dataSources = Arrays.stream(dataSource.split(",")).map(Integer::parseInt).collect(Collectors.toList());
}else {
dataSources = new ArrayList<>();
dataSources.add(Integer.parseInt(fileExt));
}
queryWrapper.in(CollUtil.isNotEmpty(dataSources),WeatherData::getDataSource,dataSources);
queryWrapper.between((Objects.nonNull(startTime) && Objects.nonNull(endTime)),WeatherData::getDataStartTime,startTime,endTime);
queryWrapper.eq(StringUtils.isNotBlank(fileExt),WeatherData::getFileExt, fileExt);
queryWrapper.select(WeatherData::getId,WeatherData::getFileName,WeatherData::getFileSize,WeatherData::getDataSource,WeatherData::getFileExt,WeatherData::getDataStartTime,WeatherData::getFilePath);
@ -249,6 +258,7 @@ public class WeatherDataServiceImpl extends ServiceImpl<WeatherDataMapper, Weath
this.transactionManager.commit(transactionStatus);
}catch (Exception e) {
flag = false;
e.printStackTrace();
}
if(fileVo.isFileShare()) {
@ -268,12 +278,13 @@ public class WeatherDataServiceImpl extends ServiceImpl<WeatherDataMapper, Weath
File dataFile = new File(storagePath);
if(dataFile.exists() && dataFile.length()>0){
//获取文件数据开始日期
String reftime = NcUtil.getReftime(dataFile.getAbsolutePath());
if(StringUtils.isBlank(reftime)) {
throw new JeecgFileUploadException("解析气象文件起始时间数据异常,此文件可能损坏");
}
Instant instant = Instant.parse(reftime);
LocalDateTime utcDateTime = LocalDateTime.ofInstant(instant, ZoneId.of("UTC"));
// String reftime = "2025-10-16 00:00:00";//NcUtil.getReftime(dataFile.getAbsolutePath());
// if(StringUtils.isBlank(reftime)) {
// throw new JeecgFileUploadException("解析气象文件起始时间数据异常,此文件可能损坏");
// }
// Instant instant = Instant.parse(reftime);
// LocalDateTime utcDateTime = LocalDateTime.ofInstant(instant, ZoneId.of("UTC"));
LocalDateTime utcDateTime = LocalDateTime.now();
queryResult.setDataStartTime(utcDateTime);
//计算文件大小M
BigDecimal divideVal = new BigDecimal("1024");

View File

@ -42,7 +42,7 @@ spring:
#Sentinel配置
sentinel:
transport:
dashboard: jeecg-boot-sentinel:9000
dashboard: stas-sentinel:9000
# 支持链路限流
web-context-unify: false
filter:

View File

@ -1,7 +1,7 @@
<?xml version="1.0" encoding="UTF-8"?>
<configuration debug="false">
<!--定义日志文件的存储地址 -->
<property name="LOG_HOME" value="../logs" />
<property name="LOG_HOME" value="logs" />
<!--<property name="COLOR_PATTERN" value="%black(%contextName-) %red(%d{yyyy-MM-dd HH:mm:ss}) %green([%thread]) %highlight(%-5level) %boldMagenta( %replace(%caller{1}){'\t|Caller.{1}0|\r\n', ''})- %gray(%msg%xEx%n)" />-->
<!-- 控制台输出 -->
@ -17,7 +17,7 @@
<appender name="FILE" class="ch.qos.logback.core.rolling.RollingFileAppender">
<rollingPolicy class="ch.qos.logback.core.rolling.SizeAndTimeBasedRollingPolicy">
<!--日志文件输出的文件名 -->
<FileNamePattern>${LOG_HOME}/jeecg-gateway-%d{yyyy-MM-dd}.%i.log</FileNamePattern>
<FileNamePattern>${LOG_HOME}/stas-gateway-%d{yyyy-MM-dd}.%i.log</FileNamePattern>
<!--日志文件保留天数 -->
<MaxHistory>30</MaxHistory>
<maxFileSize>10MB</maxFileSize>

View File

@ -0,0 +1,44 @@
<?xml version="1.0" encoding="UTF-8"?>
<configuration debug="false">
<!--定义日志文件的存储地址 -->
<property name="LOG_HOME" value="logs" />
<!--<property name="COLOR_PATTERN" value="%black(%contextName-) %red(%d{yyyy-MM-dd HH:mm:ss}) %green([%thread]) %highlight(%-5level) %boldMagenta( %replace(%caller{1}){'\t|Caller.{1}0|\r\n', ''})- %gray(%msg%xEx%n)" />-->
<!-- 控制台输出 -->
<appender name="STDOUT" class="ch.qos.logback.core.ConsoleAppender">
<encoder class="ch.qos.logback.classic.encoder.PatternLayoutEncoder">
<!--格式化输出:%d表示日期%thread表示线程名%-5level级别从左显示5个字符宽度%msg日志消息%n是换行符
<pattern>%d{yyyy-MM-dd HH:mm:ss.SSS} [%thread] %-5level %logger{50}:%L - %msg%n</pattern>-->
<pattern>%d{yyyy-MM-dd HH:mm:ss.SSS} [%thread] %highlight(%-5level) %cyan(%logger{50}:%L) - %msg%n</pattern>
</encoder>
</appender>
<!-- 按照每天生成日志文件 -->
<appender name="FILE" class="ch.qos.logback.core.rolling.RollingFileAppender">
<rollingPolicy class="ch.qos.logback.core.rolling.SizeAndTimeBasedRollingPolicy">
<!--日志文件输出的文件名 -->
<FileNamePattern>${LOG_HOME}/stas-data-analyze-%d{yyyy-MM-dd}.%i.log</FileNamePattern>
<!--日志文件保留天数 -->
<MaxHistory>30</MaxHistory>
<maxFileSize>10MB</maxFileSize>
</rollingPolicy>
<encoder class="ch.qos.logback.classic.encoder.PatternLayoutEncoder">
<!--格式化输出:%d表示日期%thread表示线程名%-5level级别从左显示5个字符宽度%msg日志消息%n是换行符 -->
<pattern>%d{yyyy-MM-dd HH:mm:ss.SSS} [%thread] %-5level %logger{50}:%L - %msg%n</pattern>
</encoder>
</appender>
<!--myibatis log configure -->
<logger name="com.apache.ibatis" level="TRACE" />
<logger name="java.sql.Connection" level="DEBUG" />
<logger name="java.sql.Statement" level="DEBUG" />
<logger name="java.sql.PreparedStatement" level="DEBUG" />
<!-- 日志输出级别 -->
<root level="INFO">
<appender-ref ref="STDOUT" />
<appender-ref ref="FILE" />
</root>
</configuration>

View File

@ -1,7 +1,7 @@
<?xml version="1.0" encoding="UTF-8"?>
<configuration debug="false">
<!--定义日志文件的存储地址 -->
<property name="LOG_HOME" value="../logs" />
<property name="LOG_HOME" value="logs" />
<!--<property name="COLOR_PATTERN" value="%black(%contextName-) %red(%d{yyyy-MM-dd HH:mm:ss}) %green([%thread]) %highlight(%-5level) %boldMagenta( %replace(%caller{1}){'\t|Caller.{1}0|\r\n', ''})- %gray(%msg%xEx%n)" />-->
<!-- 控制台输出 -->
@ -17,7 +17,7 @@
<appender name="FILE" class="ch.qos.logback.core.rolling.RollingFileAppender">
<rollingPolicy class="ch.qos.logback.core.rolling.SizeAndTimeBasedRollingPolicy">
<!--日志文件输出的文件名 -->
<FileNamePattern>${LOG_HOME}/jeecg-system-%d{yyyy-MM-dd}.%i.log</FileNamePattern>
<FileNamePattern>${LOG_HOME}/stas-event-%d{yyyy-MM-dd}.%i.log</FileNamePattern>
<!--日志文件保留天数 -->
<MaxHistory>30</MaxHistory>
<maxFileSize>10MB</maxFileSize>
@ -28,37 +28,6 @@
</encoder>
</appender>
<!-- 生成 error html格式日志开始 -->
<appender name="HTML" class="ch.qos.logback.core.FileAppender">
<filter class="ch.qos.logback.classic.filter.ThresholdFilter">
<!--设置日志级别,过滤掉info日志,只输入error日志-->
<level>ERROR</level>
</filter>
<encoder class="ch.qos.logback.core.encoder.LayoutWrappingEncoder">
<layout class="ch.qos.logback.classic.html.HTMLLayout">
<pattern>%p%d%msg%M%F{32}%L</pattern>
</layout>
</encoder>
<file>${LOG_HOME}/error-log.html</file>
</appender>
<!-- 生成 error html格式日志结束 -->
<!-- 每天生成一个html格式的日志开始 -->
<appender name="FILE_HTML" class="ch.qos.logback.core.rolling.RollingFileAppender">
<rollingPolicy class="ch.qos.logback.core.rolling.SizeAndTimeBasedRollingPolicy">
<!--日志文件输出的文件名 -->
<FileNamePattern>${LOG_HOME}/jeecg-system-%d{yyyy-MM-dd}.%i.html</FileNamePattern>
<!--日志文件保留天数 -->
<MaxHistory>30</MaxHistory>
<MaxFileSize>10MB</MaxFileSize>
</rollingPolicy>
<encoder class="ch.qos.logback.core.encoder.LayoutWrappingEncoder">
<layout class="ch.qos.logback.classic.html.HTMLLayout">
<pattern>%p%d%msg%M%F{32}%L</pattern>
</layout>
</encoder>
</appender>
<!-- 每天生成一个html格式的日志结束 -->
<!--myibatis log configure -->
<logger name="com.apache.ibatis" level="TRACE" />
@ -70,8 +39,6 @@
<root level="INFO">
<appender-ref ref="STDOUT" />
<appender-ref ref="FILE" />
<appender-ref ref="HTML" />
<appender-ref ref="FILE_HTML" />
</root>
</configuration>

View File

@ -1,7 +1,7 @@
<?xml version="1.0" encoding="UTF-8"?>
<configuration debug="false">
<!--定义日志文件的存储地址 -->
<property name="LOG_HOME" value="../logs" />
<property name="LOG_HOME" value="logs" />
<!--<property name="COLOR_PATTERN" value="%black(%contextName-) %red(%d{yyyy-MM-dd HH:mm:ss}) %green([%thread]) %highlight(%-5level) %boldMagenta( %replace(%caller{1}){'\t|Caller.{1}0|\r\n', ''})- %gray(%msg%xEx%n)" />-->
<!-- 控制台输出 -->
@ -17,7 +17,7 @@
<appender name="FILE" class="ch.qos.logback.core.rolling.RollingFileAppender">
<rollingPolicy class="ch.qos.logback.core.rolling.SizeAndTimeBasedRollingPolicy">
<!--日志文件输出的文件名 -->
<FileNamePattern>${LOG_HOME}/jeecg-system-%d{yyyy-MM-dd}.%i.log</FileNamePattern>
<FileNamePattern>${LOG_HOME}/stas-source-build-%d{yyyy-MM-dd}.%i.log</FileNamePattern>
<!--日志文件保留天数 -->
<MaxHistory>30</MaxHistory>
<maxFileSize>10MB</maxFileSize>
@ -28,37 +28,6 @@
</encoder>
</appender>
<!-- 生成 error html格式日志开始 -->
<appender name="HTML" class="ch.qos.logback.core.FileAppender">
<filter class="ch.qos.logback.classic.filter.ThresholdFilter">
<!--设置日志级别,过滤掉info日志,只输入error日志-->
<level>ERROR</level>
</filter>
<encoder class="ch.qos.logback.core.encoder.LayoutWrappingEncoder">
<layout class="ch.qos.logback.classic.html.HTMLLayout">
<pattern>%p%d%msg%M%F{32}%L</pattern>
</layout>
</encoder>
<file>${LOG_HOME}/error-log.html</file>
</appender>
<!-- 生成 error html格式日志结束 -->
<!-- 每天生成一个html格式的日志开始 -->
<appender name="FILE_HTML" class="ch.qos.logback.core.rolling.RollingFileAppender">
<rollingPolicy class="ch.qos.logback.core.rolling.SizeAndTimeBasedRollingPolicy">
<!--日志文件输出的文件名 -->
<FileNamePattern>${LOG_HOME}/jeecg-system-%d{yyyy-MM-dd}.%i.html</FileNamePattern>
<!--日志文件保留天数 -->
<MaxHistory>30</MaxHistory>
<MaxFileSize>10MB</MaxFileSize>
</rollingPolicy>
<encoder class="ch.qos.logback.core.encoder.LayoutWrappingEncoder">
<layout class="ch.qos.logback.classic.html.HTMLLayout">
<pattern>%p%d%msg%M%F{32}%L</pattern>
</layout>
</encoder>
</appender>
<!-- 每天生成一个html格式的日志结束 -->
<!--myibatis log configure -->
<logger name="com.apache.ibatis" level="TRACE" />
@ -70,8 +39,6 @@
<root level="INFO">
<appender-ref ref="STDOUT" />
<appender-ref ref="FILE" />
<appender-ref ref="HTML" />
<appender-ref ref="FILE_HTML" />
</root>
</configuration>

View File

@ -1,7 +1,7 @@
<?xml version="1.0" encoding="UTF-8"?>
<configuration debug="false">
<!--定义日志文件的存储地址 -->
<property name="LOG_HOME" value="../logs" />
<property name="LOG_HOME" value="logs" />
<!--<property name="COLOR_PATTERN" value="%black(%contextName-) %red(%d{yyyy-MM-dd HH:mm:ss}) %green([%thread]) %highlight(%-5level) %boldMagenta( %replace(%caller{1}){'\t|Caller.{1}0|\r\n', ''})- %gray(%msg%xEx%n)" />-->
<!-- 控制台输出 -->
@ -17,7 +17,7 @@
<appender name="FILE" class="ch.qos.logback.core.rolling.RollingFileAppender">
<rollingPolicy class="ch.qos.logback.core.rolling.SizeAndTimeBasedRollingPolicy">
<!--日志文件输出的文件名 -->
<FileNamePattern>${LOG_HOME}/jeecg-system-%d{yyyy-MM-dd}.%i.log</FileNamePattern>
<FileNamePattern>${LOG_HOME}/stas-sync-%d{yyyy-MM-dd}.%i.log</FileNamePattern>
<!--日志文件保留天数 -->
<MaxHistory>30</MaxHistory>
<maxFileSize>10MB</maxFileSize>
@ -28,37 +28,6 @@
</encoder>
</appender>
<!-- 生成 error html格式日志开始 -->
<appender name="HTML" class="ch.qos.logback.core.FileAppender">
<filter class="ch.qos.logback.classic.filter.ThresholdFilter">
<!--设置日志级别,过滤掉info日志,只输入error日志-->
<level>ERROR</level>
</filter>
<encoder class="ch.qos.logback.core.encoder.LayoutWrappingEncoder">
<layout class="ch.qos.logback.classic.html.HTMLLayout">
<pattern>%p%d%msg%M%F{32}%L</pattern>
</layout>
</encoder>
<file>${LOG_HOME}/error-log.html</file>
</appender>
<!-- 生成 error html格式日志结束 -->
<!-- 每天生成一个html格式的日志开始 -->
<appender name="FILE_HTML" class="ch.qos.logback.core.rolling.RollingFileAppender">
<rollingPolicy class="ch.qos.logback.core.rolling.SizeAndTimeBasedRollingPolicy">
<!--日志文件输出的文件名 -->
<FileNamePattern>${LOG_HOME}/jeecg-system-%d{yyyy-MM-dd}.%i.html</FileNamePattern>
<!--日志文件保留天数 -->
<MaxHistory>30</MaxHistory>
<MaxFileSize>10MB</MaxFileSize>
</rollingPolicy>
<encoder class="ch.qos.logback.core.encoder.LayoutWrappingEncoder">
<layout class="ch.qos.logback.classic.html.HTMLLayout">
<pattern>%p%d%msg%M%F{32}%L</pattern>
</layout>
</encoder>
</appender>
<!-- 每天生成一个html格式的日志结束 -->
<!--myibatis log configure -->
<logger name="com.apache.ibatis" level="TRACE" />
@ -70,8 +39,6 @@
<root level="INFO">
<appender-ref ref="STDOUT" />
<appender-ref ref="FILE" />
<appender-ref ref="HTML" />
<appender-ref ref="FILE_HTML" />
</root>
</configuration>

View File

@ -1,7 +1,7 @@
<?xml version="1.0" encoding="UTF-8"?>
<configuration debug="false">
<!--定义日志文件的存储地址 -->
<property name="LOG_HOME" value="../logs" />
<property name="LOG_HOME" value="logs" />
<!--<property name="COLOR_PATTERN" value="%black(%contextName-) %red(%d{yyyy-MM-dd HH:mm:ss}) %green([%thread]) %highlight(%-5level) %boldMagenta( %replace(%caller{1}){'\t|Caller.{1}0|\r\n', ''})- %gray(%msg%xEx%n)" />-->
<!-- 控制台输出 -->
@ -17,7 +17,7 @@
<appender name="FILE" class="ch.qos.logback.core.rolling.RollingFileAppender">
<rollingPolicy class="ch.qos.logback.core.rolling.SizeAndTimeBasedRollingPolicy">
<!--日志文件输出的文件名 -->
<FileNamePattern>${LOG_HOME}/jeecg-system-%d{yyyy-MM-dd}.%i.log</FileNamePattern>
<FileNamePattern>${LOG_HOME}/stas-system-%d{yyyy-MM-dd}.%i.log</FileNamePattern>
<!--日志文件保留天数 -->
<MaxHistory>30</MaxHistory>
<maxFileSize>10MB</maxFileSize>
@ -28,37 +28,6 @@
</encoder>
</appender>
<!-- 生成 error html格式日志开始 -->
<appender name="HTML" class="ch.qos.logback.core.FileAppender">
<filter class="ch.qos.logback.classic.filter.ThresholdFilter">
<!--设置日志级别,过滤掉info日志,只输入error日志-->
<level>ERROR</level>
</filter>
<encoder class="ch.qos.logback.core.encoder.LayoutWrappingEncoder">
<layout class="ch.qos.logback.classic.html.HTMLLayout">
<pattern>%p%d%msg%M%F{32}%L</pattern>
</layout>
</encoder>
<file>${LOG_HOME}/error-log.html</file>
</appender>
<!-- 生成 error html格式日志结束 -->
<!-- 每天生成一个html格式的日志开始 -->
<appender name="FILE_HTML" class="ch.qos.logback.core.rolling.RollingFileAppender">
<rollingPolicy class="ch.qos.logback.core.rolling.SizeAndTimeBasedRollingPolicy">
<!--日志文件输出的文件名 -->
<FileNamePattern>${LOG_HOME}/jeecg-system-%d{yyyy-MM-dd}.%i.html</FileNamePattern>
<!--日志文件保留天数 -->
<MaxHistory>30</MaxHistory>
<MaxFileSize>10MB</MaxFileSize>
</rollingPolicy>
<encoder class="ch.qos.logback.core.encoder.LayoutWrappingEncoder">
<layout class="ch.qos.logback.classic.html.HTMLLayout">
<pattern>%p%d%msg%M%F{32}%L</pattern>
</layout>
</encoder>
</appender>
<!-- 每天生成一个html格式的日志结束 -->
<!--myibatis log configure -->
<logger name="com.apache.ibatis" level="TRACE" />
@ -70,8 +39,6 @@
<root level="INFO">
<appender-ref ref="STDOUT" />
<appender-ref ref="FILE" />
<appender-ref ref="HTML" />
<appender-ref ref="FILE_HTML" />
</root>
</configuration>

View File

@ -1,5 +1,5 @@
server:
port: 8002
port: 8003
spring:
application:

View File

@ -1,7 +1,7 @@
<?xml version="1.0" encoding="UTF-8"?>
<configuration debug="false">
<!--定义日志文件的存储地址 -->
<property name="LOG_HOME" value="../logs" />
<property name="LOG_HOME" value="logs" />
<!--<property name="COLOR_PATTERN" value="%black(%contextName-) %red(%d{yyyy-MM-dd HH:mm:ss}) %green([%thread]) %highlight(%-5level) %boldMagenta( %replace(%caller{1}){'\t|Caller.{1}0|\r\n', ''})- %gray(%msg%xEx%n)" />-->
<!-- 控制台输出 -->
@ -17,7 +17,7 @@
<appender name="FILE" class="ch.qos.logback.core.rolling.RollingFileAppender">
<rollingPolicy class="ch.qos.logback.core.rolling.SizeAndTimeBasedRollingPolicy">
<!--日志文件输出的文件名 -->
<FileNamePattern>${LOG_HOME}/jeecg-system-%d{yyyy-MM-dd}.%i.log</FileNamePattern>
<FileNamePattern>${LOG_HOME}/stas-weather-%d{yyyy-MM-dd}.%i.log</FileNamePattern>
<!--日志文件保留天数 -->
<MaxHistory>30</MaxHistory>
<maxFileSize>10MB</maxFileSize>
@ -28,37 +28,6 @@
</encoder>
</appender>
<!-- 生成 error html格式日志开始 -->
<appender name="HTML" class="ch.qos.logback.core.FileAppender">
<filter class="ch.qos.logback.classic.filter.ThresholdFilter">
<!--设置日志级别,过滤掉info日志,只输入error日志-->
<level>ERROR</level>
</filter>
<encoder class="ch.qos.logback.core.encoder.LayoutWrappingEncoder">
<layout class="ch.qos.logback.classic.html.HTMLLayout">
<pattern>%p%d%msg%M%F{32}%L</pattern>
</layout>
</encoder>
<file>${LOG_HOME}/error-log.html</file>
</appender>
<!-- 生成 error html格式日志结束 -->
<!-- 每天生成一个html格式的日志开始 -->
<appender name="FILE_HTML" class="ch.qos.logback.core.rolling.RollingFileAppender">
<rollingPolicy class="ch.qos.logback.core.rolling.SizeAndTimeBasedRollingPolicy">
<!--日志文件输出的文件名 -->
<FileNamePattern>${LOG_HOME}/jeecg-system-%d{yyyy-MM-dd}.%i.html</FileNamePattern>
<!--日志文件保留天数 -->
<MaxHistory>30</MaxHistory>
<MaxFileSize>10MB</MaxFileSize>
</rollingPolicy>
<encoder class="ch.qos.logback.core.encoder.LayoutWrappingEncoder">
<layout class="ch.qos.logback.classic.html.HTMLLayout">
<pattern>%p%d%msg%M%F{32}%L</pattern>
</layout>
</encoder>
</appender>
<!-- 每天生成一个html格式的日志结束 -->
<!--myibatis log configure -->
<logger name="com.apache.ibatis" level="TRACE" />
@ -70,8 +39,6 @@
<root level="INFO">
<appender-ref ref="STDOUT" />
<appender-ref ref="FILE" />
<appender-ref ref="HTML" />
<appender-ref ref="FILE_HTML" />
</root>
</configuration>

View File

@ -535,7 +535,7 @@
<!--当前环境-->
<profile.name>dev</profile.name>
<!--Nacos服务地址-->
<config.server-addr>jeecg-boot-nacos:8848</config.server-addr>
<config.server-addr>stas-nacos:8848</config.server-addr>
<!--Nacos配置中心命名空间,用于支持多环境.这里必须使用ID不能使用名称,默认为空-->
<config.namespace></config.namespace>
<!--Nacos配置分组名称-->
@ -565,7 +565,7 @@
<!--当前环境-->
<profile.name>test</profile.name>
<!--Nacos服务地址-->
<config.server-addr>jeecg-boot-nacos:8848</config.server-addr>
<config.server-addr>stas-nacos:8848</config.server-addr>
<!--Nacos配置中心命名空间,用于支持多环境.这里必须使用ID不能使用名称,默认为空-->
<config.namespace></config.namespace>
<!--Nacos配置分组名称-->
@ -583,7 +583,7 @@
<!--当前环境-->
<profile.name>prod</profile.name>
<!--Nacos服务地址-->
<config.server-addr>jeecg-boot-nacos:8848</config.server-addr>
<config.server-addr>stas-nacos:8848</config.server-addr>
<!--Nacos配置中心命名空间,用于支持多环境.这里必须使用ID不能使用名称,默认为空-->
<config.namespace></config.namespace>
<!--Nacos配置分组名称-->