修改数据分析

This commit is contained in:
duwenyuan 2025-12-31 21:31:52 +08:00
parent 76a8d95664
commit d0e8848d43
2 changed files with 148 additions and 95 deletions

View File

@ -131,9 +131,9 @@
INNER JOIN
RNAUTO.GARDS_XE_RESULTS b
ON a.SAMPLE_ID = b.SAMPLE_ID
WHERE a.SAMPLE_TYPE = '#{sampleType}'
WHERE a.SAMPLE_TYPE = #{sampleType}
AND a.STATION_ID = #{station}
AND b.NUCLIDENAME = #{nuclideName}
AND b.NUCLIDE_NAME = #{nuclideName}
AND a.COLLECT_START BETWEEN TO_DATE(#{startTime}, 'YYYY-MM-DD HH24:MI:SS')
AND TO_DATE(#{endTime}, 'YYYY-MM-DD HH24:MI:SS')
</select>
@ -155,9 +155,9 @@
INNER JOIN
RNMAN.GARDS_NUCL_IDED b
ON a.SAMPLE_ID = b.SAMPLE_ID
WHERE a.SAMPLE_TYPE = '#{sampleType}'
WHERE a.SAMPLE_TYPE = #{sampleType}
AND a.STATION_ID = #{station}
AND b.NUCLIDENAME = #{nuclideName}
AND b.NUCLIDE_NAME = #{nuclideName}
AND a.COLLECT_START BETWEEN TO_DATE(#{startTime}, 'YYYY-MM-DD HH24:MI:SS')
AND TO_DATE(#{endTime}, 'YYYY-MM-DD HH24:MI:SS')
</select>

View File

@ -6,11 +6,8 @@ import com.fasterxml.jackson.core.type.TypeReference;
import com.baomidou.mybatisplus.core.conditions.query.LambdaQueryWrapper;
import com.baomidou.mybatisplus.core.toolkit.StringUtils;
import com.baomidou.mybatisplus.extension.service.impl.ServiceImpl;
import com.fasterxml.jackson.databind.DeserializationFeature;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.PropertyNamingStrategies;
import lombok.extern.slf4j.Slf4j;
import org.apache.commons.collections.CollectionUtils;
import org.jeecg.common.api.vo.Result;
import org.jeecg.common.constant.CommonConstant;
import org.jeecg.common.util.DateUtils;
@ -27,7 +24,6 @@ import org.jeecg.vo.StationInfoVO;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
import java.io.File;
import java.text.SimpleDateFormat;
import java.time.ZoneId;
import java.time.format.DateTimeFormatter;
@ -37,7 +33,9 @@ import java.util.stream.Collectors;
@Slf4j
@Service
@DS("ora")
public class SampleStatAnalysisService extends ServiceImpl<GardsSampleStatAnalysisMapper, GardsSampleData> implements ISampleStatAnalysisService {
public class SampleStatAnalysisService
extends ServiceImpl<GardsSampleStatAnalysisMapper, GardsSampleData>
implements ISampleStatAnalysisService {
private static final SimpleDateFormat SDF = new SimpleDateFormat("yyyy-MM-dd");
// 动态颜色调色板台站动态时自动循环
private static final String[] DETECTION_COLORS = {
@ -52,7 +50,8 @@ public class SampleStatAnalysisService extends ServiceImpl<GardsSampleStatAnalys
@Autowired
private SysDefaultNuclideMapper defaultNuclideMapper;
public Result getSampleMonitorResult(String sampleType, Integer dataSource, Date startDate, Date endDate) {
public Result getSampleMonitorResult(String sampleType, Integer dataSource, Date startDate,
Date endDate) {
Result result = new Result();
try {
result.setCode(CommonConstant.SC_OK_200);
@ -79,11 +78,13 @@ public class SampleStatAnalysisService extends ServiceImpl<GardsSampleStatAnalys
switch (dataSource) {
case 1:
StationInfoDataList = this.baseMapper.getRnAutoSampleResult(sampleType, startTime, endTime);
StationInfoDataList =
this.baseMapper.getRnAutoSampleResult(sampleType, startTime, endTime);
break;
case 2:
StationInfoDataList = this.baseMapper.getRnManSampleResult(sampleType, startTime, endTime);
StationInfoDataList =
this.baseMapper.getRnManSampleResult(sampleType, startTime, endTime);
break;
}
@ -115,9 +116,12 @@ public class SampleStatAnalysisService extends ServiceImpl<GardsSampleStatAnalys
//时间段内有多少和台站
Map<String, List<Map<String, Object>>> groupedByMonth =
sortedList.stream()
.filter(station -> station.getCollectStop() != null) // 过滤 collectStop null 的数据
.filter(station -> station.getStationCode() != null) // 过滤 stationCode null 的数据
.filter(station -> station.getCategory() != null) // 过滤 category null 的数据
.filter(station -> station.getCollectStop() !=
null) // 过滤 collectStop null 的数据
.filter(station -> station.getStationCode() !=
null) // 过滤 stationCode null 的数据
.filter(station -> station.getCategory() !=
null) // 过滤 category null 的数据
.collect(Collectors.groupingBy(
station -> station.getCollectStop().toInstant()
.atZone(ZoneId.of("UTC"))
@ -132,28 +136,37 @@ public class SampleStatAnalysisService extends ServiceImpl<GardsSampleStatAnalys
)
),
stationCodeToCategories -> {
List<Map<String, Object>> resultList = new ArrayList<>();
stationCodeToCategories.forEach((stationCode, categories) -> {
Map<String, Object> entry = new HashMap<>();
entry.put("stationCode", stationCode);
//entry.put("categorys", new HashSet<>(categories)); // 去重后的集合
List<Map<String, Object>> resultList =
new ArrayList<>();
stationCodeToCategories.forEach(
(stationCode, categories) -> {
Map<String, Object> entry =
new HashMap<>();
entry.put("stationCode", stationCode);
//entry.put("categorys", new HashSet<>(categories)); // 去重后的集合
// 统计 category 出现次数
Map<Integer, Long> categoryCount = categories.stream()
.filter(Objects::nonNull) // 再次过滤 null防御性编程
.collect(Collectors.groupingBy(
category -> category,
Collectors.counting()
));
Map<String, Long> levelCount = new HashMap<>();
categoryCount.forEach((category, count) -> {
String levelKey = "level" + category; // 例如1 "level1"
levelCount.put(levelKey, count);
});
entry.put("categoryCount", levelCount);
// 统计 category 出现次数
Map<Integer, Long> categoryCount =
categories.stream()
.filter(Objects::nonNull) // 再次过滤 null防御性编程
.collect(
Collectors.groupingBy(
category -> category,
Collectors.counting()
));
Map<String, Long> levelCount =
new HashMap<>();
categoryCount.forEach(
(category, count) -> {
String levelKey = "level" +
category; // 例如1 "level1"
levelCount.put(levelKey,
count);
});
entry.put("categoryCount", levelCount);
resultList.add(entry);
});
resultList.add(entry);
});
return resultList;
}
)
@ -178,7 +191,8 @@ public class SampleStatAnalysisService extends ServiceImpl<GardsSampleStatAnalys
* @return
*/
@Override
public Result getSampleStatAnalysis(String stationCode, Integer dataSource, Date startDate, Date endDate) {
public Result getSampleStatAnalysis(String stationCode, Integer dataSource, Date startDate,
Date endDate) {
//声明返回用的结果map
Map<String, Object> resultMap = new HashMap<>();
List<NuclideActConcIntvl> nuclideActConcIntvlList = new ArrayList<>();
@ -209,34 +223,44 @@ public class SampleStatAnalysisService extends ServiceImpl<GardsSampleStatAnalys
//RNAUTO
case 1:
//获取样品中识别到的核素集合
nuclideActConcIntvlList = this.baseMapper.getRnAutoIdentifiedNuclides(stationCode, startTime, endTime);
nuclideActConcIntvlList =
this.baseMapper.getRnAutoIdentifiedNuclides(stationCode, startTime,
endTime);
//核素等级时序分析
sampleLevelDataList = this.baseMapper.getRnAutoNuclideTimeSeriesAnalysis(stationCode, startTime, endTime);
sampleLevelDataList =
this.baseMapper.getRnAutoNuclideTimeSeriesAnalysis(stationCode,
startTime, endTime);
break;
//RNMAN
case 2:
//获取样品中识别到的核素集合
nuclideActConcIntvlList = this.baseMapper.getRnManIdentifiedNuclides(stationCode, startTime, endTime);
nuclideActConcIntvlList =
this.baseMapper.getRnManIdentifiedNuclides(stationCode, startTime,
endTime);
//核素等级时序分析
sampleLevelDataList = this.baseMapper.getRnManNuclideTimeSeriesAnalysis(stationCode, startTime, endTime);
sampleLevelDataList =
this.baseMapper.getRnManNuclideTimeSeriesAnalysis(stationCode,
startTime, endTime);
break;
}
//key=核素名称,value=获取样品中识别到的核素集合
Map<String, List<NuclideActConcIntvl>> groupedByNuclideName = nuclideActConcIntvlList.stream()
.filter(p -> p.getNuclideName() != null)
.collect(Collectors.groupingBy(NuclideActConcIntvl::getNuclideName));
Map<String, List<NuclideActConcIntvl>> sortedByCount = groupedByNuclideName.entrySet().stream()
.sorted(Map.Entry.<String, List<NuclideActConcIntvl>>comparingByValue(
Comparator.comparingInt(List::size)
).reversed()) // .reversed() 表示从多到少
.collect(Collectors.toMap(
Map.Entry::getKey,
Map.Entry::getValue,
(e1, e2) -> e1,
LinkedHashMap::new // 使用 LinkedHashMap
));
Map<String, List<NuclideActConcIntvl>> groupedByNuclideName =
nuclideActConcIntvlList.stream()
.filter(p -> p.getNuclideName() != null)
.collect(Collectors.groupingBy(NuclideActConcIntvl::getNuclideName));
Map<String, List<NuclideActConcIntvl>> sortedByCount =
groupedByNuclideName.entrySet().stream()
.sorted(Map.Entry.<String, List<NuclideActConcIntvl>>comparingByValue(
Comparator.comparingInt(List::size)
).reversed()) // .reversed() 表示从多到少
.collect(Collectors.toMap(
Map.Entry::getKey,
Map.Entry::getValue,
(e1, e2) -> e1,
LinkedHashMap::new // 使用 LinkedHashMap
));
resultMap.put("nuclideActConcIntvlList", sortedByCount);
result.setSuccess(true);
result.setResult(resultMap);
@ -255,6 +279,7 @@ public class SampleStatAnalysisService extends ServiceImpl<GardsSampleStatAnalys
/**
* 样品统计分析--核素浓度时序分析
*
* @param sampleType
* @param stationCode
* @param nuclideName
@ -264,7 +289,9 @@ public class SampleStatAnalysisService extends ServiceImpl<GardsSampleStatAnalys
* @return
*/
@Override
public Result getNuclideActConcChartData(String sampleType,String stationCode, String nuclideName, Integer dataSource, Date startDate, Date endDate) throws JsonProcessingException {
public Result getNuclideActConcChartData(String sampleType, String stationCode,
String nuclideName, Integer dataSource, Date startDate,
Date endDate) throws JsonProcessingException {
Result result = new Result();
result.setCode(CommonConstant.SC_OK_200);
if (StringUtils.isBlank(stationCode)) {
@ -290,7 +317,9 @@ public class SampleStatAnalysisService extends ServiceImpl<GardsSampleStatAnalys
Map<String, Object> dataMap = new LinkedHashMap<>();
String schemaName = dataSource == 1 ? "RNAUTO" : "RNMAN";
// 1. 模拟从数据库获取该核素的原始数据
List<NuclideActConcIntvl> rawList = this.baseMapper.getIdentifiedNuclides(schemaName, nuclideName, stationCode, startTime, endTime);
List<NuclideActConcIntvl> rawList =
this.baseMapper.getIdentifiedNuclides(schemaName, nuclideName, stationCode,
startTime, endTime);
// 2. 排序关键X轴是时间轴数据必须按时间升序
rawList.sort(Comparator.comparing(NuclideActConcIntvl::getCollectStop));
@ -308,16 +337,17 @@ public class SampleStatAnalysisService extends ServiceImpl<GardsSampleStatAnalys
for (NuclideActConcIntvl item : rawList) {
String timeStr = sdf.format(item.getCollectStop());
mdcList.add(new Object[]{timeStr, item.getMdc()});
thresholdList.add(new Object[]{timeStr, item.getThresholdValue()});
mdcList.add(new Object[] {timeStr, item.getMdc()});
thresholdList.add(new Object[] {timeStr, item.getThresholdValue()});
// 动态处理级别
int cat = item.getCategory();
String categoryStr = "category" + cat;
levelGroup.putIfAbsent(categoryStr, new ArrayList<>());
//double err = item.getConcErr() != null ? item.getConcErr() : 0;
double err =15;
levelGroup.get(categoryStr).add(new Object[]{timeStr, item.getConc(), err, err});
//TODO 测试数据
double err = 15;
levelGroup.get(categoryStr).add(new Object[] {timeStr, item.getConc(), err, err});
// if (cat == 3) {
// // 3级: 携带误差维度 [时间, 活度, ConcErr , $Conc+ConcErr]
// double err = item.getConcErr() != null ? item.getConcErr() : 0;
@ -347,7 +377,7 @@ public class SampleStatAnalysisService extends ServiceImpl<GardsSampleStatAnalys
//TODO 测试待删除
ObjectMapper objectMapper = new ObjectMapper();
//region 数据
String DataResult="{\n" +
String DataResult = "{\n" +
"\t\"success\": true,\n" +
"\t\"message\": \"\",\n" +
"\t\"code\": 200,\n" +
@ -3194,8 +3224,9 @@ public class SampleStatAnalysisService extends ServiceImpl<GardsSampleStatAnalys
"\t\"timestamp\": 1766671165792\n" +
"}";
//endregion
Result<Map<String, Object>> resultData = objectMapper.readValue(DataResult, new TypeReference<>() {
});
Result<Map<String, Object>> resultData =
objectMapper.readValue(DataResult, new TypeReference<>() {
});
result.setSuccess(true);
result.setResult(resultData);
@ -3203,16 +3234,14 @@ public class SampleStatAnalysisService extends ServiceImpl<GardsSampleStatAnalys
}
/**
* 获取指定时间范围内的样品等级
*
*
*/
@Override
public Result getSampleGradeAnalysis(String sampleType, String station, Date startDate, Date endDate, Integer dataSource) {
public Result getSampleGradeAnalysis(String sampleType, String station, Date startDate,
Date endDate, Integer dataSource) {
//声明返回用的结果map
Map<String, Object> resultMap = new HashMap<>();
@ -3244,11 +3273,15 @@ public class SampleStatAnalysisService extends ServiceImpl<GardsSampleStatAnalys
switch (dataSource) {
case 1:
sampleDataList = this.baseMapper.getRnAutoSampleGradeAnalysis(sampleType, station, startTime, endTime);
sampleDataList =
this.baseMapper.getRnAutoSampleGradeAnalysis(sampleType, station,
startTime, endTime);
break;
case 2:
sampleDataList = this.baseMapper.getRnManSampleGradeAnalysis(sampleType, station, startTime, endTime);
sampleDataList =
this.baseMapper.getRnManSampleGradeAnalysis(sampleType, station,
startTime, endTime);
break;
}
@ -3265,8 +3298,6 @@ public class SampleStatAnalysisService extends ServiceImpl<GardsSampleStatAnalys
}
/*** 样品活度浓度区间频率分析
* 样品活度浓度区间频率分析
* @param sampleType 样品类型
@ -3279,7 +3310,9 @@ public class SampleStatAnalysisService extends ServiceImpl<GardsSampleStatAnalys
*/
@Override
public Result getSampleActConcIntvlAnalysis(String sampleType, String station, String nuclideName, Integer dataSource, Date startDate, Date endDate) {
public Result getSampleActConcIntvlAnalysis(String sampleType, String station,
String nuclideName, Integer dataSource,
Date startDate, Date endDate) {
//声明返回用的结果map
Map<String, Object> resultMap = new HashMap<>();
List<NuclideActConcIntvl> nuclideActConcIntvls = new ArrayList<>();
@ -3317,21 +3350,30 @@ public class SampleStatAnalysisService extends ServiceImpl<GardsSampleStatAnalys
switch (dataSource) {
//RNAUTO
case 1:
nuclideActConcIntvls = this.baseMapper.getRnautoPNuclideActConcIntvl(sampleType, station, nuclideName, startTime, endTime);
nuclideActConcIntvls =
this.baseMapper.getRnautoPNuclideActConcIntvl(sampleType,
station, nuclideName, startTime, endTime);
break;
//RNMAN
case 2:
nuclideActConcIntvls = this.baseMapper.getRnautoNuclideActConcIntvl(sampleType, station, nuclideName, startTime, endTime);
nuclideActConcIntvls =
this.baseMapper.getRnmanPNuclideActConcIntvl(sampleType,
station, nuclideName, startTime, endTime);
break;
}
break;
case "B":
switch (dataSource) {
case 1:
nuclideActConcIntvls = this.baseMapper.getRnmanPNuclideActConcIntvl(sampleType, station, nuclideName, startTime, endTime);
nuclideActConcIntvls =
this.baseMapper.getRnautoNuclideActConcIntvl(sampleType,
station, nuclideName, startTime, endTime);
break;
case 2:
nuclideActConcIntvls = this.baseMapper.getRnmanNuclideActConcIntvl(sampleType, station, nuclideName, startTime, endTime);
nuclideActConcIntvls =
this.baseMapper.getRnmanNuclideActConcIntvl(sampleType, station,
nuclideName, startTime, endTime);
break;
}
@ -3340,26 +3382,27 @@ public class SampleStatAnalysisService extends ServiceImpl<GardsSampleStatAnalys
}
// File file=new File("C:\\Users\\cnndc\\Desktop\\数据导出\\无标题.json");
// ObjectMapper objectMapper = new ObjectMapper();
// Double/Integer处理 "3.776103e+03" 3776.103
// objectMapper.configure(DeserializationFeature.ACCEPT_EMPTY_STRING_AS_NULL_OBJECT, true);
// nuclideActConcIntvls= objectMapper.readValue(file, new TypeReference<List<NuclideActConcIntvl>>() {});
//获取浓度出现的次数
//获取浓度值集合
List<Double> data = DistributionAnalysisToolkit.convertConcToDoubleList(nuclideActConcIntvls);
List<Double> data =
DistributionAnalysisToolkit.convertConcToDoubleList(nuclideActConcIntvls);
// 设置区间参数
double start = 0; // 区间起始值
double step = 200; // 区间步长宽度
// 1. 区间统计
List<DistributionAnalysisToolkit.IntervalStat> stats = DistributionAnalysisToolkit.calculateIntervalStats(nuclideActConcIntvls, start, step);
List<DistributionAnalysisToolkit.IntervalStat> stats =
DistributionAnalysisToolkit.calculateIntervalStats(nuclideActConcIntvls, start,
step);
// 3. 累积分布函数
List<DistributionAnalysisToolkit.CDFPoint> cdfPoints = DistributionAnalysisToolkit.calculateCDF(data);
List<DistributionAnalysisToolkit.CDFPoint> cdfPoints =
DistributionAnalysisToolkit.calculateCDF(data);
// 4. 核密度估计
List<DistributionAnalysisToolkit.KDEPoint> kdePoints = DistributionAnalysisToolkit.autoKDE(data, DistributionAnalysisToolkit.GAUSSIAN_KERNEL);
List<DistributionAnalysisToolkit.KDEPoint> kdePoints =
DistributionAnalysisToolkit.autoKDE(data,
DistributionAnalysisToolkit.GAUSSIAN_KERNEL);
//获取所有浓度的累积
List<Double> cumulative = DistributionAnalysisToolkit.cumulativeSum(data);
//获取95%累积线
@ -3392,7 +3435,9 @@ public class SampleStatAnalysisService extends ServiceImpl<GardsSampleStatAnalys
* @return 返回核素活度浓度信息
*/
@Override
public Result getSampleActConcTimeSeqAnalysis(String sampleType, String station, String nuclideName, Integer dataSource, Date startDate, Date endDate) {
public Result getSampleActConcTimeSeqAnalysis(String sampleType, String station,
String nuclideName, Integer dataSource,
Date startDate, Date endDate) {
Result result = new Result();
//声明返回用的结果map
Map<String, Object> resultMap = new HashMap<>();
@ -3459,7 +3504,9 @@ public class SampleStatAnalysisService extends ServiceImpl<GardsSampleStatAnalys
String schemaName = dataSource == 1 ? "RNAUTO" : "RNMAN";
List<SampleLevelData> sampleDatass = this.baseMapper.getNuclideTimeSeriesAnalysis(schemaName, station, nuclideName, startTime, endTime);
List<SampleLevelData> sampleDatass =
this.baseMapper.getNuclideTimeSeriesAnalysis(schemaName, station, nuclideName,
startTime, endTime);
// thresholdResultHisList = this.baseMapper.selectByCondition(schemaName, Arrays.asList(Integer.valueOf(station))
@ -3490,7 +3537,9 @@ public class SampleStatAnalysisService extends ServiceImpl<GardsSampleStatAnalys
* @param endDate 结束时间
* @return
*/
public Result getNuclideActivityConcAnalyze(String sampleType, Integer[] stationIds, String nuclideName, Integer dataSource, Date startDate, Date endDate) {
public Result getNuclideActivityConcAnalyze(String sampleType, Integer[] stationIds,
String nuclideName, Integer dataSource,
Date startDate, Date endDate) {
Map<String, Object> resultMap = new HashMap<>();
List<NuclideActConcIntvl> nuclideActConcIntvls = new ArrayList<>();
@ -3519,17 +3568,22 @@ public class SampleStatAnalysisService extends ServiceImpl<GardsSampleStatAnalys
//endregion
switch (dataSource) {
case 1:
nuclideActConcIntvls = this.baseMapper.getRnAutoAnalyzeNuclideActivityConc(sampleType, nuclideName, stationIds, startTime, endTime);
nuclideActConcIntvls =
this.baseMapper.getRnAutoAnalyzeNuclideActivityConc(sampleType,
nuclideName, stationIds, startTime, endTime);
break;
case 2:
nuclideActConcIntvls = this.baseMapper.getRnManAnalyzeNuclideActivityConc(sampleType, nuclideName, stationIds, startTime, endTime);
nuclideActConcIntvls =
this.baseMapper.getRnManAnalyzeNuclideActivityConc(sampleType,
nuclideName, stationIds, startTime, endTime);
break;
}
//resultMap.put("nuclideInfoList", nuclideActConcIntvls);
resultMap.put("nuclideInfoList", convertToChartVO(nuclideActConcIntvls, startDate, endDate, unit));
resultMap.put("nuclideInfoList",
convertToChartVO(nuclideActConcIntvls, startDate, endDate, unit));
result.setSuccess(true);
result.setResult(resultMap);
return result;
@ -3539,7 +3593,8 @@ public class SampleStatAnalysisService extends ServiceImpl<GardsSampleStatAnalys
}
}
private NuclideActConcIntvlVO convertToChartVO(List<NuclideActConcIntvl> nuclideActConcIntvls, Date startDate, Date endDate, String unit) {
private NuclideActConcIntvlVO convertToChartVO(List<NuclideActConcIntvl> nuclideActConcIntvls,
Date startDate, Date endDate, String unit) {
if (nuclideActConcIntvls == null || nuclideActConcIntvls.isEmpty()) {
return new NuclideActConcIntvlVO();
}
@ -3568,7 +3623,8 @@ public class SampleStatAnalysisService extends ServiceImpl<GardsSampleStatAnalys
String stationCode = records.stream()
.findFirst()
.map(record -> record.getStationId() != null ? record.getStationId().toString() : null)
.map(record -> record.getStationId() != null ?
record.getStationId().toString() : null)
.orElse("UNKNOWN");
series.setStationCode(stationCode);
series.setStationName(displayCode); // 可后续扩展
@ -3651,9 +3707,6 @@ public class SampleStatAnalysisService extends ServiceImpl<GardsSampleStatAnalys
}
public static double findMaxValue(List<NuclideActConcIntvl> list) {
if (list == null || list.isEmpty()) {
// 处理空集合情况返回一个默认值