同步分支mdc

This commit is contained in:
qiaoqinzheng 2024-01-18 17:50:52 +08:00
parent 2f1f658d33
commit df15a907e9
6 changed files with 203 additions and 29 deletions

View File

@ -260,4 +260,9 @@ public class SpectrumAnalysesController {
spectrumAnalysisService.saveToTxt(rrrLogInfo, request, response); spectrumAnalysisService.saveToTxt(rrrLogInfo, request, response);
} }
@GetMapping("xeComparison")
public Result xeComparison(String sampleFileName, HttpServletRequest request) {
return spectrumAnalysisService.xeComparison(sampleFileName, request);
}
} }

View File

@ -1,7 +1,8 @@
package org.jeecg.modules.service; package org.jeecg.modules.service;
import org.jeecg.modules.entity.vo.PeakInfo; import org.jeecg.modules.entity.GardsXeResultsView;
import org.jeecg.modules.entity.vo.TableNuclideActivity; import org.jeecg.modules.entity.vo.TableNuclideActivity;
import org.jeecg.modules.entity.vo.TablePeak;
import java.sql.Connection; import java.sql.Connection;
import java.util.List; import java.util.List;
@ -12,10 +13,12 @@ public interface IDataService {
Connection connectOverSea(); Connection connectOverSea();
List<PeakInfo> viewPeaks(String siteDetCode, String spectralQualifier, Integer stationId, String collectStart, String acquisitionStop, Connection conn); List<TablePeak> viewPeaks(String siteDetCode, String spectralQualifier, Integer stationId, String collectStart, String acquisitionStop, Connection conn);
List<TableNuclideActivity> viewNucl(String siteDetCode, String spectralQualifier, Integer stationId, String collectStart, String acquisitionStop, Connection conn); List<TableNuclideActivity> viewNucl(String siteDetCode, String spectralQualifier, Integer stationId, String collectStart, String acquisitionStop, Connection conn);
List<GardsXeResultsView> viewBetaXeResult(String siteDetCode, String spectralQualifier, Integer stationId, String collectStart, String acquisitionStop, Connection conn);
void viewStations(); void viewStations();
} }

View File

@ -91,4 +91,7 @@ public interface ISpectrumAnalysisService {
void saveToExcel(RRRLogInfo rrrLogInfo, HttpServletRequest request, HttpServletResponse response); void saveToExcel(RRRLogInfo rrrLogInfo, HttpServletRequest request, HttpServletResponse response);
void saveToTxt(RRRLogInfo rrrLogInfo, HttpServletRequest request, HttpServletResponse response); void saveToTxt(RRRLogInfo rrrLogInfo, HttpServletRequest request, HttpServletResponse response);
Result xeComparison(String sampleFileName, HttpServletRequest request);
} }

View File

@ -2,10 +2,13 @@ package org.jeecg.modules.service.impl;
import com.baomidou.mybatisplus.core.toolkit.CollectionUtils; import com.baomidou.mybatisplus.core.toolkit.CollectionUtils;
import com.baomidou.mybatisplus.core.toolkit.StringPool; import com.baomidou.mybatisplus.core.toolkit.StringPool;
import com.baomidou.mybatisplus.core.toolkit.StringUtils;
import org.jeecg.common.util.NumberFormatUtil; import org.jeecg.common.util.NumberFormatUtil;
import org.jeecg.common.util.RedisUtil; import org.jeecg.common.util.RedisUtil;
import org.jeecg.modules.entity.GardsXeResultsView;
import org.jeecg.modules.entity.vo.PeakInfo; import org.jeecg.modules.entity.vo.PeakInfo;
import org.jeecg.modules.entity.vo.TableNuclideActivity; import org.jeecg.modules.entity.vo.TableNuclideActivity;
import org.jeecg.modules.entity.vo.TablePeak;
import org.jeecg.modules.service.IDataService; import org.jeecg.modules.service.IDataService;
import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Value; import org.springframework.beans.factory.annotation.Value;
@ -62,10 +65,10 @@ public class DataServiceImpl implements IDataService {
} }
@Override @Override
public List<PeakInfo> viewPeaks(String siteDetCode, String spectralQualifier, Integer stationId, String collectStart, String acquisitionStop, Connection conn) { public List<TablePeak> viewPeaks(String siteDetCode, String spectralQualifier, Integer stationId, String collectStart, String acquisitionStop, Connection conn) {
Statement statement = null; Statement statement = null;
Integer sampleId = null; Integer sampleId = null;
List<PeakInfo> peakInfoList = new LinkedList<>(); List<TablePeak> peakInfoList = new LinkedList<>();
try { try {
statement = conn.createStatement(); statement = conn.createStatement();
//拼接sql根据探测器编码台站id采样开始时间采集结束时间 查询 sampleId //拼接sql根据探测器编码台站id采样开始时间采集结束时间 查询 sampleId
@ -99,21 +102,22 @@ public class DataServiceImpl implements IDataService {
double significance = executeQuery.getDouble("DETECTABILITY"); double significance = executeQuery.getDouble("DETECTABILITY");
int peakId = executeQuery.getInt("PEAK_ID"); int peakId = executeQuery.getInt("PEAK_ID");
//声明一个PeakInfo实体类存储查询出的数据 //声明一个PeakInfo实体类存储查询出的数据
PeakInfo peak = new PeakInfo(); TablePeak peak = new TablePeak();
peak.index = peakId; peak.setNo(peakId);
peak.area = area; peak.setNetArea(String.format("%.3f", area));
peak.areaErr = areaErr; peak.setAreaErr(String.format("%.3f", areaErr));
peak.peakCentroid = centroChannel; peak.setCentroid(String.format("%.3f", centroChannel));
peak.energy = energy; peak.setEnergy(String.format("%.3f", energy));
peak.fwhm = fwhm; peak.setFwhm(String.format("%.3f", fwhm));
peak.multiIndex = multiplet; peak.setMultiplet(String.valueOf(multiplet));
peak.significance = significance; peak.setSignificant(String.format("%.3f", significance));
peak.setIndentify("");
peakInfoList.add(peak); peakInfoList.add(peak);
} }
//判断峰信息是否为空 //判断峰信息是否为空
if (CollectionUtils.isNotEmpty(peakInfoList)) { if (CollectionUtils.isNotEmpty(peakInfoList)) {
//根据峰下标进行排序 //根据峰下标进行排序
peakInfoList = peakInfoList.stream().sorted(Comparator.comparing(item-> item.index)).collect(Collectors.toList()); peakInfoList = peakInfoList.stream().sorted(Comparator.comparing(item-> item.getNo())).collect(Collectors.toList());
//峰信息不为空 查询峰核素信息表 读取峰关联核素名称 //峰信息不为空 查询峰核素信息表 读取峰关联核素名称
String nuclIdedSql = "SELECT NAME,PEAK FROM RMSMAN.GARDS_NUCL_LINES_IDED " + String nuclIdedSql = "SELECT NAME,PEAK FROM RMSMAN.GARDS_NUCL_LINES_IDED " +
"WHERE SAMPLE_ID = "+sampleId; "WHERE SAMPLE_ID = "+sampleId;
@ -121,11 +125,13 @@ public class DataServiceImpl implements IDataService {
while (resultSet.next()) { while (resultSet.next()) {
String name = resultSet.getString("NAME"); String name = resultSet.getString("NAME");
int peak = resultSet.getInt("PEAK"); int peak = resultSet.getInt("PEAK");
peakInfoList.stream().forEach(item->{ for (TablePeak item : peakInfoList) {
if (item.index == peak) { if (item.getNo() == peak) {
item.nuclides.add(name+ StringPool.SEMICOLON); String indentify = item.getIndentify();
indentify+=name + StringPool.SEMICOLON;
item.setIndentify(indentify);
}
} }
});
} }
} }
} }
@ -168,7 +174,7 @@ public class DataServiceImpl implements IDataService {
if(Objects.nonNull(sampleId)) { if(Objects.nonNull(sampleId)) {
//拼接sql查询核素信息表 //拼接sql查询核素信息表
String nuclSql = "SELECT NAME,HALFLIFE,MDA,ACTIV_DECAY,ACTIV_KEY,ACTIV_KEY_ERR FROM RMSMAN.GARDS_NUCL_IDED " + String nuclSql = "SELECT NAME,HALFLIFE,MDA,ACTIV_DECAY,ACTIV_KEY,ACTIV_KEY_ERR FROM RMSMAN.GARDS_NUCL_IDED " +
"WHERE SAMPLE_ID = "+sampleId; "WHERE SAMPLE_ID = "+sampleId + " AND NID_FLAG = 1";
//执行sql查询结果 //执行sql查询结果
ResultSet executeQuery = statement.executeQuery(nuclSql); ResultSet executeQuery = statement.executeQuery(nuclSql);
while (executeQuery.next()) { while (executeQuery.next()) {
@ -223,6 +229,75 @@ public class DataServiceImpl implements IDataService {
return nuclideActivityList; return nuclideActivityList;
} }
@Override
public List<GardsXeResultsView> viewBetaXeResult(String siteDetCode, String spectralQualifier, Integer stationId, String collectStart, String acquisitionStop, Connection conn) {
Statement statement = null;
Integer sampleId = null;
List<GardsXeResultsView> xeResultsViewList = new LinkedList<>();
try {
statement = conn.createStatement();
//拼接sql根据探测器编码台站id采样开始时间采集结束时间 查询 sampleId
String sql = "";
sql+="SELECT SAMPLE_ID FROM RMSMAN.GARDS_SAMPLE_DATA ";
sql+="WHERE TRIM(SITE_DET_CODE) = '"+siteDetCode+"' ";
if (Objects.nonNull(stationId)) {
sql+="AND STATION_ID = "+stationId+" ";
}
sql+="AND SPECTRAL_QUALIFIER = '" + spectralQualifier +"' ";
sql+="AND COLLECT_START = TO_DATE( '"+collectStart+"', 'YYYY-MM-DD HH24:MI:SS' ) ";
sql+="AND ACQUISITION_STOP = TO_DATE( '"+acquisitionStop+"', 'YYYY-MM-DD HH24:MI:SS' )";
ResultSet query = statement.executeQuery(sql);
while (query.next()) {
sampleId = query.getInt("SAMPLE_ID");
}
//判断条件查询到的sampleId是否为空 如果不为空 则继续查询
if(Objects.nonNull(sampleId)) {
//拼接sql根据sampleId查询beta分析结果表内容数据
String xeResultSql = "SELECT NUCLIDE_ID,CONC,CONC_ERR,MDC,NID_FLAG FROM RMSMAN.GARDS_BG_ISOTOPE_CONCS " +
"WHERE SAMPLE_ID = "+sampleId;
//执行sql得到查询结果
ResultSet executeQuery = statement.executeQuery(xeResultSql);
while (executeQuery.next()) {
int nuclideId = executeQuery.getInt("NUCLIDE_ID");
double conc = executeQuery.getDouble("CONC");
double concErr = executeQuery.getDouble("CONC_ERR");
double mdc = executeQuery.getDouble("MDC");
int nidFlag = executeQuery.getInt("NID_FLAG");
//声明一个XeResult实体类存储查询出的数据
GardsXeResultsView xeResultsView = new GardsXeResultsView();
if (8 == nuclideId) {
xeResultsView.setNuclideName("XE131M");
} else if (9 == nuclideId) {
xeResultsView.setNuclideName("XE133M");
} else if (10 == nuclideId) {
xeResultsView.setNuclideName("XE133");
} else if (11 == nuclideId) {
xeResultsView.setNuclideName("XE135");
}
xeResultsView.setConc(NumberFormatUtil.numberFormat(String.valueOf(conc)));
xeResultsView.setConcErr(NumberFormatUtil.numberFormat(String.valueOf(concErr)));
xeResultsView.setMdc(NumberFormatUtil.numberFormat(String.valueOf(mdc)));
xeResultsView.setNidFlag(nidFlag);
xeResultsViewList.add(xeResultsView);
}
if (CollectionUtils.isNotEmpty(xeResultsViewList)) {
xeResultsViewList = xeResultsViewList.stream().sorted(Comparator.comparing(GardsXeResultsView::getNuclideName)).collect(Collectors.toList());
}
}
} catch (SQLException e) {
throw new RuntimeException(e);
} finally {
try {
if (Objects.nonNull(statement)) {
statement.close();
}
} catch (SQLException e) {
throw new RuntimeException(e);
}
}
return xeResultsViewList;
}
@Override @Override
public void viewStations() { public void viewStations() {
Map<String, Integer> stationMap = new HashMap<>(); Map<String, Integer> stationMap = new HashMap<>();

View File

@ -553,9 +553,24 @@ public class GammaServiceImpl extends AbstractLogOrReport implements IGammaServi
} }
//读取redis缓存的计算mdc信息 //读取redis缓存的计算mdc信息
Map<String, CalMDCInfo> mdcInfoMap = (Map<String, CalMDCInfo>) redisUtil.get("mdcInfoMap-"+phd.getHeader().getSystem_type()); Map<String, CalMDCInfo> mdcInfoMap = (Map<String, CalMDCInfo>) redisUtil.get("mdcInfoMap-"+phd.getHeader().getSystem_type());
//如果是数据库加载 判断如果mdc计算结果是空的 就加入新的 否则使用数据库加载的mdc数据
if (CollectionUtils.isEmpty(phd.getMdcInfoMap())) {
if (CollectionUtils.isNotEmpty(mdcInfoMap)) { if (CollectionUtils.isNotEmpty(mdcInfoMap)) {
phd.setMdcInfoMap(mdcInfoMap); phd.setMdcInfoMap(mdcInfoMap);
} }
} else {
if (CollectionUtils.isNotEmpty(mdcInfoMap)) {
Map<String, CalMDCInfo> infoMap = phd.getMdcInfoMap();
for (Map.Entry<String, CalMDCInfo> entry:infoMap.entrySet()) {
String nuclName = entry.getKey();
CalMDCInfo info = mdcInfoMap.get(nuclName);
if (Objects.nonNull(info)) {
CalMDCInfo mdcInfo = entry.getValue();
mdcInfo.setHalflife(info.getHalflife());
}
}
}
}
} }
// 获取当前角色配置的颜色信息 // 获取当前角色配置的颜色信息
Map<String, String> colorMap = sysUserColorService.initColor(userName); Map<String, String> colorMap = sysUserColorService.initColor(userName);
@ -4368,20 +4383,26 @@ public class GammaServiceImpl extends AbstractLogOrReport implements IGammaServi
} }
@Override @Override
public void exportRadionuclideActivity(Integer sampleId, String fileName, HttpServletRequest request, HttpServletResponse response) { public void exportRadionuclideActivity(Integer sampleId, String fileName, String arTime, String crTime, HttpServletRequest request, HttpServletResponse response) {
Result<Map<String, Object>> result = radionuclideActivity(sampleId, fileName, request); Result<Map<String, Object>> result = radionuclideActivity(sampleId, fileName, request);
Map<String, Object> dataMap = result.getResult(); Map<String, Object> dataMap = result.getResult();
if (MapUtil.isEmpty(dataMap)) return; if (MapUtil.isEmpty(dataMap)) return;
String formater = DateConstant.DATE_TIME;
Date actRef = (Date) dataMap.get("dateTime_act_ref"); Date actRef = (Date) dataMap.get("dateTime_act_ref");
Date conRef = (Date) dataMap.get("dateTime_con_ref"); Date conRef = (Date) dataMap.get("dateTime_con_ref");
if (ObjectUtil.isNotNull(actRef)) { if (StrUtil.isNotBlank(arTime)) {
String actTime = DateUtil.format(actRef, DateConstant.DATE_TIME); dataMap.put("dateTime_act_ref", arTime);
dataMap.put("dateTime_act_ref", actTime); } else {
if (ObjectUtil.isNotNull(actRef))
dataMap.put("dateTime_act_ref", DateUtil.format(actRef, formater));
} }
if (ObjectUtil.isNotNull(conRef)) { if (StrUtil.isNotBlank(crTime)) {
String conTime = DateUtil.format(conRef, DateConstant.DATE_TIME); dataMap.put("dateTime_con_ref", crTime);
dataMap.put("dateTime_con_ref", conTime); } else {
if (ObjectUtil.isNotNull(conRef))
dataMap.put("dateTime_con_ref", DateUtil.format(conRef, formater));
} }
String export = "RadionuclideActivity-Gamma.xls"; String export = "RadionuclideActivity-Gamma.xls";
String template = ExportTemplate.RadionuclideActivity_G.getName(); String template = ExportTemplate.RadionuclideActivity_G.getName();
ExportUtil.exportXls(response, template, dataMap, export); ExportUtil.exportXls(response, template, dataMap, export);
@ -5927,7 +5948,7 @@ public class GammaServiceImpl extends AbstractLogOrReport implements IGammaServi
public Result peakComparison(String fileName, HttpServletRequest request) { public Result peakComparison(String fileName, HttpServletRequest request) {
Result result = new Result(); Result result = new Result();
Connection conn = null; Connection conn = null;
List<PeakInfo> peakInfoList = new LinkedList<>(); List<TablePeak> peakInfoList = new LinkedList<>();
//获取用户名 //获取用户名
String userName = JwtUtil.getUserNameByToken(request); String userName = JwtUtil.getUserNameByToken(request);
//读取缓存内容 //读取缓存内容

View File

@ -57,6 +57,8 @@ import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse; import javax.servlet.http.HttpServletResponse;
import java.io.*; import java.io.*;
import java.net.URLEncoder; import java.net.URLEncoder;
import java.sql.Connection;
import java.sql.SQLException;
import java.text.ParseException; import java.text.ParseException;
import java.time.Instant; import java.time.Instant;
import java.time.LocalDateTime; import java.time.LocalDateTime;
@ -118,6 +120,8 @@ public class SpectrumAnalysisServiceImpl extends AbstractLogOrReport implements
private RedisStreamUtil redisStreamUtil; private RedisStreamUtil redisStreamUtil;
@Autowired @Autowired
private NameStandUtil nameStandUtil; private NameStandUtil nameStandUtil;
@Autowired
private IDataService dataService;
@Override @Override
@ -6036,4 +6040,67 @@ public class SpectrumAnalysisServiceImpl extends AbstractLogOrReport implements
return spectrumData; return spectrumData;
} }
@Override
public Result xeComparison(String sampleFileName, HttpServletRequest request) {
Result result = new Result();
Connection conn = null;
List<GardsXeResultsView> xeResultsViewList = new LinkedList<>();
//获取用户名称
String userName = JwtUtil.getUserNameByToken(request);
//获取本地缓存
Cache<String, BetaDataFile> cache = betaCache.getBetaCache();
BetaDataFile betaDataFile = cache.getIfPresent(sampleFileName + "-" + userName);
if (Objects.isNull(betaDataFile)) {
result.error500("Load basic file information first!");
return result;
}
Map<String, Integer> idcStationMap = (Map<String, Integer>) redisUtil.get("idcStationMap");
try {
EnergySpectrumStruct struct = betaDataFile.getSampleStruct();
String collectStart = "";
if (struct.collection_start_time.indexOf(StringPool.DOT) > 0) {
collectStart = struct.collection_start_date + " " + struct.collection_start_time.substring(0, struct.collection_start_time.indexOf(StringPool.DOT));
} else {
collectStart = struct.collection_start_date + " " + struct.collection_start_time;
}
//获取采集开始时间
Date acqStart = DateUtils.parseDate(struct.acquisition_start_date + " " + struct.acquisition_start_time);
//计算得到采集结束时间对应的毫秒数
long stopTime = (long) (acqStart.getTime() + (struct.acquisition_real_time * 1000));
//根据毫秒数得到采集结束时间
Date acquisitionStopDate = new Date(stopTime);
//格式化得到采集结束时间的字符串
String acquisitionStop = DateUtils.formatDate(acquisitionStopDate, "yyyy-MM-dd HH:mm:ss");
//从缓存的idc台站信息中获取当前台站对应的台站id
Integer stationId = null;
if (CollectionUtils.isNotEmpty(idcStationMap)) {
stationId = idcStationMap.get(struct.site_code);
}
//连接本地同步的idc数据库
conn = dataService.connectInland();
//连接对象为空 则连接idc国际库
if (Objects.isNull(conn)) {
conn = dataService.connectOverSea();
}
//判断是否连接成功
if (Objects.nonNull(conn)) {
//查询获取beta分析后的xe结果表的对比数据内容
xeResultsViewList = dataService.viewBetaXeResult(struct.detector_code, struct.spectrum_quantity, stationId, collectStart, acquisitionStop, conn);
}
result.setSuccess(true);
result.setResult(xeResultsViewList);
} catch (ParseException e) {
throw new RuntimeException(e);
} finally {
try {
if (Objects.nonNull(conn)) {
conn.close();
}
} catch (SQLException e) {
throw new RuntimeException(e);
}
}
return result;
}
} }