beta功能实现分析以及重新分析功能,以及相关的页面数据变化

gamma实现InteractiveTool页面的fitPeak功能
gamma新增InteractiveTool页面的acceptBaseLine接口
gamma功能修改log,report文本内容的对齐格式
AbstractLogOrReport新增相关gamma的log,report需要的独立的对齐方法
AcceptInfo新增newPeak,flag字段用来控制InteractiveTool页面的peak变化
新增BaseLineCtrls实体类
This commit is contained in:
qiaoqinzheng 2023-10-13 19:37:03 +08:00
parent 0e4d702b96
commit 2714989491
15 changed files with 2210 additions and 881 deletions

View File

@ -1,5 +1,13 @@
package org.jeecg.modules.base.abstracts;
import com.baomidou.mybatisplus.core.toolkit.StringPool;
import org.apache.commons.lang3.StringUtils;
import java.util.HashMap;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
/**
* 日志/报告格式化抽象类
*/
@ -39,4 +47,98 @@ public abstract class AbstractLogOrReport {
return String.format(source,args);
}
public String attributeFormat(String[] titles, String[] contents) {
//得到第一列的最大宽度
int a_width=0;
for(int pos=0;pos<titles.length;pos++) {
if(titles[pos].length()>a_width) {
a_width = titles[pos].length();
}
}
//得到第二列的最大宽度
int c_width=0;
for(int pos=0;pos<contents.length;pos++) {
if(contents[pos].length()>c_width) {
c_width = contents[pos].length();
}
}
//拼接最终结果
StringBuilder rData = new StringBuilder();
for(int pos=0;pos<titles.length;pos++) {
if(pos<contents.length) {
String model = "%s%s%-"+String.valueOf(a_width-titles[pos].length()+4)+"s%s%-"+String.valueOf(c_width-contents[pos].length()+4)+"s";
rData.append(String.format(model, titles[pos], " : ", " ", contents[pos], " "));
rData.append(System.lineSeparator());
rData.append(System.lineSeparator());
}
}
return rData.toString();
}
public String attributeGFormat(String[] titles, String[] contents) {
//得到第一列的最大宽度
int a_width=0;
for(int pos=0;pos<titles.length;pos++) {
if(titles[pos].length()>a_width) {
a_width = titles[pos].length();
}
}
//得到第二列的最大宽度
int c_width=0;
for(int pos=0;pos<contents.length;pos++) {
if(contents[pos].length()>c_width) {
c_width = contents[pos].length();
}
}
//拼接最终结果
StringBuilder rData = new StringBuilder();
for(int pos=0;pos<titles.length;pos++) {
if(pos<contents.length) {
String model = "%s%s%-"+String.valueOf(a_width-titles[pos].length()+4)+"s%s%-"+String.valueOf(c_width-contents[pos].length()+4)+"s";
rData.append(String.format(model, titles[pos], "", " ", contents[pos], " "));
//换行
rData.append(System.lineSeparator());
//换行
rData.append(System.lineSeparator());
}
}
return rData.toString();
}
public Map<String, List<String>> GetReportFixedWidth(String[] cells, String[] datas, int maxWidth) {
Map<String, List<String>> map =new HashMap<>();
List<String> titles = new LinkedList<>();
List<String> contents = new LinkedList<>();
//根据固定宽度切割过长的数据
for (int i=0; i<datas.length; i++) {
String cell = cells[i];
String data = datas[i];
if (data.length()> maxWidth) {
//计算需要切割成多少份
double numParts = Math.ceil(data.length() / maxWidth);
for (int j=0; j<numParts; j++) {
String dataSub = "";
//如果本次截取的末端下标没有越界 正常截取
if (((j + 1) * maxWidth) < data.length()) {
dataSub = data.substring(j * maxWidth, (j + 1) * maxWidth);
} else {
dataSub = data.substring(j * maxWidth, data.length());
}
if (j==0) {
titles.add(cell);
} else {
titles.add(" ");
}
contents.add(dataSub);
}
} else {
titles.add(cell);
contents.add(data);
}
}
map.put("title", titles);
map.put("content", contents);
return map;
}
}

View File

@ -10,6 +10,8 @@ public class AnalyseData implements Serializable {
private String applyType;
private String currentFileName;
private boolean sampleData;
private boolean gasBgData;

View File

@ -16,6 +16,7 @@ public class GStoreMiddleProcessData implements Serializable {
public String ErrorInfo; //错误信息
//gards_analyses数据表数据
public String IdAnalysis; //分析ID号
public String sample_id; //样品ID号
public String analyses_analysisBegin; //分析开始时
public String analyses_analysisEnd; //分析的结束时间

View File

@ -3,11 +3,13 @@ package org.jeecg.common.util;
import com.baomidou.mybatisplus.core.toolkit.CollectionUtils;
import com.baomidou.mybatisplus.core.toolkit.StringPool;
import com.baomidou.mybatisplus.core.toolkit.StringUtils;
import com.google.common.cache.Cache;
import org.apache.commons.io.FileUtils;
import org.apache.commons.net.ftp.FTP;
import org.apache.commons.net.ftp.FTPClient;
import org.apache.commons.net.ftp.FTPFile;
import org.jeecg.common.properties.SpectrumPathProperties;
import org.jeecg.modules.base.entity.rnman.GardsXeResults;
import org.jeecg.modules.base.enums.DataType;
import org.jeecg.modules.base.enums.DataTypeAbbr;
import org.jeecg.modules.base.enums.XeNuclideName;
@ -269,6 +271,7 @@ public class PHDFileUtil {
throw new RuntimeException("ftp连接失败!");
}
InputStream iStream= null;
File file = null;
try {
//被动模式
ftpClient.enterLocalPassiveMode();
@ -283,7 +286,7 @@ public class PHDFileUtil {
iStream=ftpClient.retrieveFileStream(fileName);
if (Objects.nonNull(iStream)) {
//声明一个临时文件
File file = File.createTempFile("betaGamma", null);
file = File.createTempFile("betaGamma", null);
//将ftp文件的输入流复制给临时文件
FileUtils.copyInputStreamToFile(iStream, file);
List<String> allLines = FileUtils.readLines(file, ftpUtil.getEncoding());
@ -299,6 +302,9 @@ public class PHDFileUtil {
if (Objects.nonNull(iStream)){
iStream.close();
}
if (Objects.nonNull(file)) {
file.delete();
}
} catch (IOException e) {
throw new RuntimeException(e);
}
@ -455,6 +461,7 @@ public class PHDFileUtil {
return map;
}
InputStream inputStream = null;
File file = null;
try {
//切换被动模式
ftpClient.enterLocalPassiveMode();
@ -468,7 +475,7 @@ public class PHDFileUtil {
inputStream = ftpClient.retrieveFileStream(sampleFileName);
if (Objects.nonNull(inputStream)) {
//声明一个临时文件
File file = File.createTempFile("betaGamma", null);
file = File.createTempFile("betaGamma", null);
//将ftp文件的输入流复制给临时文件
FileUtils.copyInputStreamToFile(inputStream, file);
//加载sampleFile内容
@ -501,7 +508,9 @@ public class PHDFileUtil {
if (inputStream!=null){
inputStream.close();
}
if (Objects.nonNull(file)) {
file.delete();
}
} catch (IOException e) {
throw new RuntimeException(e);
}
@ -514,76 +523,7 @@ public class PHDFileUtil {
FTPClient ftpClient = ftpUtil.LoginFTP();
InputStream inputStream = null;
File file = null;
try {
//被动模式
ftpClient.enterLocalPassiveMode();
//设置文件类型--二进制文件
ftpClient.setFileType(FTP.BINARY_FILE_TYPE);
//
ftpClient.setControlEncoding("UTF-8");
ftpClient.setFileTransferMode(FTPClient.STREAM_TRANSFER_MODE);
//切换文件路径
ftpClient.changeWorkingDirectory(filePath);
inputStream = ftpClient.retrieveFileStream(fileName);
if (Objects.nonNull(inputStream)){
file = File.createTempFile("betaGamma", null);
//将ftp文件的输入流复制给临时文件
FileUtils.copyInputStreamToFile(inputStream, file);
}
} catch (IOException e) {
throw new RuntimeException(e);
} finally {
try {
if (Objects.nonNull(ftpClient)){
ftpClient.disconnect();
}
if (inputStream != null){
inputStream.close();
}
} catch (IOException e) {
throw new RuntimeException(e);
}
}
//加载动态库
//System.loadLibrary("ReadPHDFile");
EnergySpectrumStruct sourceData = EnergySpectrumHandler.getSourceData(file.getAbsolutePath());
String systemType = sourceData.system_type;
String dataType = sourceData.data_type;
StringBuffer path = new StringBuffer();
if(systemType.contains("B")) {
path.append("Spectrum");
path.append(StringPool.SLASH+"Xenon");
path.append(StringPool.SLASH+"Sauna");
} else if(systemType.contains("G")) {
path.append("Spectrum");
path.append(StringPool.SLASH+"Xenon");
path.append(StringPool.SLASH+"Spalax");
}
if(dataType.contains("SAMPLEPHD")) {
path.append(StringPool.SLASH+"Samplephd");
} else if(dataType.contains("DETBKPHD")) {
path.append(StringPool.SLASH+"Detbkphd");
} else if(dataType.contains("GASBKPHD")) {
path.append(StringPool.SLASH+"Gasbkphd");
} else if(dataType.contains("QCPHD")) {
path.append(StringPool.SLASH+"Qcphd");
}
int pos = fileName.indexOf('-');
if(-1 == pos) {
} else if(fileName.length() >= pos+7) {
path.append(StringPool.SLASH+fileName.substring(pos+1,pos+5));
path.append(StringPool.SLASH+fileName.substring(pos+5,pos+7));
}
path.append(StringPool.SLASH+fileName);
return path.toString();
}
public BgBoundary CalBoundary(String filePath, String fileName){
//连接ftp
FTPClient ftpClient = ftpUtil.LoginFTP();
InputStream inputStream = null;
File file = null;
try {
//被动模式
ftpClient.enterLocalPassiveMode();
@ -599,6 +539,35 @@ public class PHDFileUtil {
file = File.createTempFile("betaGamma", null);
//将ftp文件的输入流复制给临时文件
FileUtils.copyInputStreamToFile(inputStream, file);
EnergySpectrumStruct sourceData = EnergySpectrumHandler.getSourceData(file.getAbsolutePath());
String systemType = sourceData.system_type;
String dataType = sourceData.data_type;
if(systemType.contains("B")) {
path.append("Spectrum");
path.append(StringPool.SLASH+"Xenon");
path.append(StringPool.SLASH+"Sauna");
} else if(systemType.contains("G")) {
path.append("Spectrum");
path.append(StringPool.SLASH+"Xenon");
path.append(StringPool.SLASH+"Spalax");
}
if(dataType.contains("SAMPLEPHD")) {
path.append(StringPool.SLASH+"Samplephd");
} else if(dataType.contains("DETBKPHD")) {
path.append(StringPool.SLASH+"Detbkphd");
} else if(dataType.contains("GASBKPHD")) {
path.append(StringPool.SLASH+"Gasbkphd");
} else if(dataType.contains("QCPHD")) {
path.append(StringPool.SLASH+"Qcphd");
}
int pos = fileName.indexOf('-');
if(-1 == pos) {
} else if(fileName.length() >= pos+7) {
path.append(StringPool.SLASH+fileName.substring(pos+1,pos+5));
path.append(StringPool.SLASH+fileName.substring(pos+5,pos+7));
}
path.append(StringPool.SLASH+fileName);
}
} catch (IOException e) {
throw new RuntimeException(e);
@ -610,31 +579,14 @@ public class PHDFileUtil {
if (inputStream != null){
inputStream.close();
}
if (Objects.nonNull(file)) {
file.delete();
}
} catch (IOException e) {
throw new RuntimeException(e);
}
}
//加载dll工具库
//System.loadLibrary("ReadPHDFile");
EnergySpectrumStruct struct = EnergySpectrumHandler.getSourceData(file.getAbsolutePath());
//计算边界值
List<Double> gCentroidChannel = struct.g_centroid_channel;
List<Double> gEnergy = struct.g_energy;
List<Double> bChannel = struct.b_channel;
List<Double> bElectronEnergy = struct.b_electron_energy;
CalcBgBoundaryParam calcBgBoundaryParam = new CalcBgBoundaryParam();
calcBgBoundaryParam.g_e_cal = EnergySpectrumHandler.GetFileFittingPara(gEnergy, gCentroidChannel);
calcBgBoundaryParam.b_e_cal = EnergySpectrumHandler.GetFileFittingPara(bElectronEnergy, bChannel);
calcBgBoundaryParam.b_energy = struct.b_electron_energy;
calcBgBoundaryParam.b_channel = struct.b_channel;
calcBgBoundaryParam.g_channel = struct.g_centroid_channel;
calcBgBoundaryParam.g_energy = struct.g_energy;
calcBgBoundaryParam.ROI_B_start_x1 = struct.POI_B_x1;
calcBgBoundaryParam.ROI_B_stop_x2 = struct.POI_B_x2;
calcBgBoundaryParam.ROI_G_start_y1 = struct.POI_G_y1;
calcBgBoundaryParam.ROI_G_stop_y2 = struct.POI_G_y2;
BgBoundary bgBoundary = EnergySpectrumHandler.CalcBgBoundary(calcBgBoundaryParam);
return bgBoundary;
return path.toString();
}
public List<String> FileNameByStandardForm(String filePath, String sampleFileName){
@ -744,42 +696,122 @@ public class PHDFileUtil {
return file;
}
public BgDataAnlyseResultIn analyzeSpectrum(File sampleTmp, File gasTmp, File detTmp, Map<String, Object> map) {
//加载dll工具库
//System.loadLibrary("ReadPHDFile");
public void analyzeSpectrum(File sampleTmp, File gasTmp, File detTmp, BgCalibratePara BgCalPara, Map<String, Object> map) {
//调用动态库解析文件
BgAnalyseResult bgAnalyseResult = EnergySpectrumHandler.bgAnalyse(sampleTmp.getAbsolutePath(), gasTmp.getAbsolutePath(), detTmp.getAbsolutePath());
BgDataAnlyseResultIn resultIn = new BgDataAnlyseResultIn();
resultIn.setXe131m_con(bgAnalyseResult.Xe131m_con);
resultIn.setXe131m_uncer(bgAnalyseResult.Xe131m_uncer);
resultIn.setMdc_Xe131m(bgAnalyseResult.MDC_Xe131m);
resultIn.setLc_Xe131m(bgAnalyseResult.LC_Xe131m);
resultIn.setXe131mFlag(bgAnalyseResult.XE_131m_NID_FLAG);
resultIn.setXe133_con(bgAnalyseResult.Xe133_con);
resultIn.setXe133_uncer(bgAnalyseResult.Xe133_uncer);
resultIn.setMdc_Xe133(bgAnalyseResult.MDC_Xe133);
resultIn.setLc_Xe133(bgAnalyseResult.LC_Xe133);
resultIn.setXe133Flag(bgAnalyseResult.XE_133_NID_FLAG);
resultIn.setXe133m_con(bgAnalyseResult.Xe133m_con);
resultIn.setXe133m_uncer(bgAnalyseResult.Xe133m_uncer);
resultIn.setMdc_Xe133m(bgAnalyseResult.MDC_Xe133m);
resultIn.setLc_Xe133m(bgAnalyseResult.LC_Xe133m);
resultIn.setXe133mFlag(bgAnalyseResult.XE_133m_NID_FLAG);
resultIn.setXe135_con(bgAnalyseResult.Xe135_con);
resultIn.setXe135_uncer(bgAnalyseResult.Xe135_uncer);
resultIn.setMdc_Xe135(bgAnalyseResult.MDC_Xe135);
resultIn.setLc_Xe135(bgAnalyseResult.LC_Xe135);
resultIn.setXe135Flag(bgAnalyseResult.XE_135_NID_FLAG);
map.put("bProcessed", true);
return resultIn;
BgAnalyseResult analyseResult = null;
if (Objects.isNull(BgCalPara)) {
analyseResult = EnergySpectrumHandler.bgAnalyse(sampleTmp.getAbsolutePath(), gasTmp.getAbsolutePath(), detTmp.getAbsolutePath());
} else {
analyseResult = EnergySpectrumHandler.bgReAnalyse(sampleTmp.getAbsolutePath(), gasTmp.getAbsolutePath(), detTmp.getAbsolutePath(), BgCalPara);
}
//需要返回到前端的XeData数据
List<GardsXeResultsSpectrum> xeResultsSpectrumList = new LinkedList<>();
//存入计算后得到的xeData数据
GardsXeResultsSpectrum xe131m = new GardsXeResultsSpectrum();
xe131m.setNuclideName(XeNuclideName.XE_131m.getType());
xe131m.setConc(analyseResult.Xe131m_con);
xe131m.setConcErr(analyseResult.Xe131m_uncer);
xe131m.setLc(analyseResult.LC_Xe131m);
xe131m.setMdc(analyseResult.MDC_Xe131m);
xe131m.setNidFlag(analyseResult.XE_131m_NID_FLAG);
xeResultsSpectrumList.add(xe131m);
GardsXeResultsSpectrum xe133 = new GardsXeResultsSpectrum();
xe133.setNuclideName(XeNuclideName.XE_133.getType());
xe133.setConc(analyseResult.Xe133_con);
xe133.setConcErr(analyseResult.Xe133_uncer);
xe133.setLc(analyseResult.LC_Xe133);
xe133.setMdc(analyseResult.MDC_Xe133);
xe133.setNidFlag(analyseResult.XE_133_NID_FLAG);
xeResultsSpectrumList.add(xe133);
GardsXeResultsSpectrum xe133m = new GardsXeResultsSpectrum();
xe133m.setNuclideName(XeNuclideName.XE_133m.getType());
xe133m.setConc(analyseResult.Xe133m_con);
xe133m.setConcErr(analyseResult.Xe133m_uncer);
xe133m.setLc(analyseResult.LC_Xe133m);
xe133m.setMdc(analyseResult.MDC_Xe133m);
xe133m.setNidFlag(analyseResult.XE_133m_NID_FLAG);
xeResultsSpectrumList.add(xe133m);
GardsXeResultsSpectrum xe135 = new GardsXeResultsSpectrum();
xe135.setNuclideName(XeNuclideName.XE_135.getType());
xe135.setConc(analyseResult.Xe135_con);
xe135.setConcErr(analyseResult.Xe135_uncer);
xe135.setLc(analyseResult.LC_Xe135);
xe135.setMdc(analyseResult.MDC_Xe135);
xe135.setNidFlag(analyseResult.XE_135_NID_FLAG);
xeResultsSpectrumList.add(xe135);
map.put("XeData", xeResultsSpectrumList);
//新计算得到的边界值
if (CollectionUtils.isNotEmpty(analyseResult.S_ROI_B_Boundary_start)) {
List<Boundary> boundaryList = new LinkedList<>();
for (int i=0; i<analyseResult.S_ROI_B_Boundary_start.size(); i++) {
Boundary boundary = new Boundary();
boundary.setMinX(analyseResult.S_ROI_B_Boundary_start.get(i));
boundary.setMaxX(analyseResult.S_ROI_B_Boundary_stop.get(i));
boundary.setMinY(analyseResult.S_ROI_G_Boundary_start.get(i));
boundary.setMaxY(analyseResult.S_ROI_G_Boundary_stop.get(i));
boundaryList.add(boundary);
}
map.put("SampleBoundary", boundaryList);
}
if (CollectionUtils.isNotEmpty(analyseResult.G_ROI_B_Boundary_start)) {
List<Boundary> boundaryList = new LinkedList<>();
for (int i=0; i<analyseResult.G_ROI_B_Boundary_start.size(); i++) {
Boundary boundary = new Boundary();
boundary.setMinX(analyseResult.G_ROI_B_Boundary_start.get(i));
boundary.setMaxX(analyseResult.G_ROI_B_Boundary_stop.get(i));
boundary.setMinY(analyseResult.G_ROI_G_Boundary_start.get(i));
boundary.setMaxY(analyseResult.G_ROI_G_Boundary_stop.get(i));
boundaryList.add(boundary);
}
map.put("GasBoundary", boundaryList);
}
if (CollectionUtils.isNotEmpty(analyseResult.D_ROI_B_Boundary_start)) {
List<Boundary> boundaryList = new LinkedList<>();
for (int i=0; i<analyseResult.D_ROI_B_Boundary_start.size(); i++) {
Boundary boundary = new Boundary();
boundary.setMinX(analyseResult.D_ROI_B_Boundary_start.get(i));
boundary.setMaxX(analyseResult.D_ROI_B_Boundary_stop.get(i));
boundary.setMinY(analyseResult.D_ROI_G_Boundary_start.get(i));
boundary.setMaxY(analyseResult.D_ROI_G_Boundary_stop.get(i));
boundaryList.add(boundary);
}
map.put("DetBoundary", boundaryList);
}
}
// public void CalQCBoundary() {
// //QC需要独立计算
// //从本地缓存获取beta gamma的数组
// Cache<String, Map<String, Object>> cache = betaCache.getBetaCache();
// //根据qc文件名称-用户名-beta的方式获取beta的内容
// Map<String, Object> betaMap = cache.getIfPresent(anlyseResultIn.getQcFileName() + "-" + userName + "-beta");
// List<SeriseData> betaList = new LinkedList<>();
// List<String> betaFittingPara = new LinkedList<>();
// List<String> betaFittingParaToUi = new LinkedList<>();
// if (CollectionUtils.isNotEmpty(betaMap)) {
// betaList = (List<SeriseData>)betaMap.get("Series");
// betaFittingPara = (List<String>) betaMap.get("fittingPara");
// betaFittingParaToUi = (List<String>) betaMap.get("fittingParaToUi");
// }
// //根据qc文件名称-用户名-gamma的方式获取gamma的内容
// Map<String, Object> gammaMap = cache.getIfPresent(anlyseResultIn.getQcFileName() + "-" + userName + "-gamma");
// List<SeriseData> gammaList = new LinkedList<>();
// List<String> gammaFittingPara = new LinkedList<>();
// List<String> gammaFittingParaToUi = new LinkedList<>();
// if (CollectionUtils.isNotEmpty(gammaMap)) {
// gammaList = (List<SeriseData>)gammaMap.get("Series");
// gammaFittingPara = (List<String>) gammaMap.get("fittingPara");
// gammaFittingParaToUi = (List<String>) gammaMap.get("fittingParaToUi");
// }
// }
public EnergySpectrumStruct analyzeFileSourceData(String filePath, String fileName) {
//加载dll工具库
//System.loadLibrary("ReadPHDFile");
EnergySpectrumStruct struct = new EnergySpectrumStruct();
FTPClient ftpClient = ftpUtil.LoginFTP();
InputStream inputStream = null;
File file = null;
try {
//切换被动模式
ftpClient.enterLocalPassiveMode();
@ -791,7 +823,7 @@ public class PHDFileUtil {
inputStream = ftpClient.retrieveFileStream(fileName);
if (Objects.nonNull(inputStream)){
//声明一个临时文件
File file = File.createTempFile("betaGamma", null);
file = File.createTempFile("betaGamma", null);
//将ftp文件的输入流复制给临时文件
FileUtils.copyInputStreamToFile(inputStream, file);
struct = EnergySpectrumHandler.getSourceData(file.getAbsolutePath());
@ -806,6 +838,9 @@ public class PHDFileUtil {
if (Objects.nonNull(inputStream)){
inputStream.close();
}
if (Objects.nonNull(file)) {
file.delete();
}
} catch (IOException e) {
throw new RuntimeException(e);
}
@ -945,4 +980,5 @@ public class PHDFileUtil {
result.put("xeResults", xeResultsSpectrumList);
return result;
}
}

View File

@ -7,7 +7,6 @@ import org.jeecg.common.base.BaseMap;
import org.jeecg.common.constant.GlobalConstants;
import org.jeecg.common.constant.WebSocketHandlerConst;
import org.jeecg.modules.base.bizVo.GammaRLR;
import org.jeecg.modules.base.entity.postgre.SysUser;
import org.jeecg.modules.entity.vo.*;
import org.jeecg.modules.feignclient.SystemClient;
import org.jeecg.modules.service.IGammaService;
@ -110,7 +109,7 @@ public class GammaController {
@PostMapping("acceptResults")
@ApiOperation(value = "InteractiveTool页面Insert页面save", notes = "InteractiveTool页面Insert页面save")
public Result acceptResults(@RequestBody AcceptInfo acceptInfo, HttpServletRequest request) {
return gammaService.acceptResults(acceptInfo.getFileName(), acceptInfo.isAccept(), acceptInfo.getOldPeak(), request);
return gammaService.acceptResults(acceptInfo.getFileName(), acceptInfo.isAccept(), acceptInfo.getOldPeak(), acceptInfo.getNewPeak(), acceptInfo.getFlag(), request);
}
@GetMapping("deletePeak")
@ -119,6 +118,11 @@ public class GammaController {
return gammaService.deletePeak(fileName, curRow, request);
}
@GetMapping("fitPeak")
public Result fitPeak(int left, int right, String fileName, HttpServletRequest request) {
return gammaService.fitPeak(left, right, fileName, request);
}
@GetMapping("getSelPosNuclide")
@ApiOperation(value = "InteractiveTool页面选择channel加载对应核素信息接口", notes = "InteractiveTool页面选择channel加载对应核素信息接口")
public Result getSelPosNuclide(Integer sampleId, String fileName, int channel, HttpServletRequest request) {
@ -179,6 +183,16 @@ public class GammaController {
return gammaService.searchNuclide(sampleId, fileName, energy, tolerance, request);
}
@PostMapping("replotBaseLine")
public Result replotBaseLine(@RequestBody BaseLineCtrls baseLineCtrls, HttpServletRequest request) {
return gammaService.replotBaseLine(baseLineCtrls, request);
}
@PostMapping("acceptBaseLine")
public Result acceptBaseLine(@RequestBody BaseLineCtrls baseLineCtrls, HttpServletRequest request) {
return gammaService.acceptBaseLine(baseLineCtrls, request);
}
@GetMapping("ZeroTime")
@ApiOperation(value = "Zero Time页面数据", notes = "Zero Time页面数据")
public Result ZeroTime() {
@ -477,6 +491,7 @@ public class GammaController {
}
@GetMapping("saveToTxt")
@ApiOperation(value = "Save To TXT接口", notes = "Save To TXT接口")
public void saveToTxt(String fileName, HttpServletRequest request, HttpServletResponse response) {
gammaService.saveToTxt(fileName, request, response);
}
@ -485,4 +500,10 @@ public class GammaController {
public void saveToExcel(String fileName, HttpServletResponse response) {
gammaService.saveToExcel(fileName, response);
}
@GetMapping("saveToPHD")
@ApiOperation(value = "Save To PHD接口", notes = "Save To PHD接口")
public void saveToPHD(String fileName, HttpServletRequest request, HttpServletResponse response) {
gammaService.saveToPHD(fileName, request, response);
}
}

View File

@ -197,13 +197,14 @@ public class SpectrumAnalysesController {
String sampleFileName = analyseBetaInfo.getSampleFileNames().get(0);
String gasFileName = analyseBetaInfo.getGasFileNames().get(0);
String detFileName = analyseBetaInfo.getDetFileNames().get(0);
return spectrumAnalysisService.analyseCurrentSpectrum(dbName, sampleId, sampleFileName, gasFileName, detFileName, request);
String qcFileName = analyseBetaInfo.getQcFileNames().get(0);
return spectrumAnalysisService.analyseCurrentSpectrum(dbName, sampleId, sampleFileName, gasFileName, detFileName, qcFileName, request);
}
@PostMapping("analyseAllSpectrum")
@ApiOperation(value = "解析全部加载文件数据", notes = "解析全部加载文件数据")
public Result analyseAllSpectrum(@RequestBody AnalyseBetaInfo analyseBetaInfo, HttpServletRequest request) {
return spectrumAnalysisService.analyseAllSpectrum(analyseBetaInfo.getDbNames(), analyseBetaInfo.getSampleIds(), analyseBetaInfo.getSampleFileNames(), analyseBetaInfo.getGasFileNames(), analyseBetaInfo.getDetFileNames(), request);
return spectrumAnalysisService.analyseAllSpectrum(analyseBetaInfo.getDbNames(), analyseBetaInfo.getSampleIds(), analyseBetaInfo.getSampleFileNames(), analyseBetaInfo.getGasFileNames(), analyseBetaInfo.getDetFileNames(), analyseBetaInfo.getQcFileNames(), analyseBetaInfo.getCurrentFileName(), request);
}
@PostMapping("saveToDB")

View File

@ -14,4 +14,8 @@ public class AcceptInfo implements Serializable {
private List<PeakInfo> oldPeak;
private List<PeakInfo> newPeak;
private String flag;
}

View File

@ -18,4 +18,8 @@ public class AnalyseBetaInfo implements Serializable {
private List<String> detFileNames;
private List<String> qcFileNames;
private String currentFileName;
}

View File

@ -0,0 +1,10 @@
package org.jeecg.modules.entity.vo;
import lombok.Data;
@Data
public class BaseLineCtrls extends BaseControls {
private String fileName;
}

View File

@ -8,6 +8,7 @@ import org.jeecg.modules.entity.GardsROIResultsSpectrum;
import org.jeecg.modules.entity.GardsXeResultsSpectrum;
import java.io.Serializable;
import java.util.LinkedList;
import java.util.List;
@Data
@ -263,4 +264,90 @@ public class BgDataAnlyseResultIn implements Serializable {
List<GardsXeResults> XeData;
public BgDataAnlyseResultIn() {
comment = "";
bProcessed = false;
userName = "";
stationName = "";
dbName = "";
sampleFilePath = "";
sampleFileName = "";
gasFilePath = "";
gasFileName = "";
detFilePath = "";
detFileName = "";
qcFilePath = "";
qcFileName = "";
bGammaEnergyValidSample = false;
bBetaEnergyValidSample = false;
bGammaEnergyValidGas = false;
bBetaEnergyValidGas = false;
bGammaEnergyValidDet = false;
bBetaEnergyValidDet = false;
checkSample = false;
checkGas = false;
checkDet = false;
g_channel_sample = new LinkedList<>();
g_energy_sample = new LinkedList<>();
b_channel_sample = new LinkedList<>();
b_energy_sample = new LinkedList<>();
g_channel_gas = new LinkedList<>();
g_energy_gas = new LinkedList<>();
b_channel_gas = new LinkedList<>();
b_energy_gas = new LinkedList<>();
g_channel_det = new LinkedList<>();
g_energy_det = new LinkedList<>();
b_channel_det = new LinkedList<>();
b_energy_det = new LinkedList<>();
param_a_c2e_g_sample = "";
param_b_c2e_g_sample = "";
param_c_c2e_g_sample = "";
param_a_c2e_g_gas = "";
param_b_c2e_g_gas = "";
param_c_c2e_g_gas = "";
param_a_c2e_g_det = "";
param_b_c2e_g_det = "";
param_c_c2e_g_det = "";
param_a_c2e_b = "";
param_b_c2e_b = "";
param_c_c2e_b = "";
mdc_Xe135 = 0.0;
mdc_Xe131m = 0.0;
mdc_Xe133m = 0.0;
mdc_Xe133 = 0.0;
xe135_con = 0.0;
xe135_uncer = 0.0;
xe131m_con = 0.0;
xe131m_uncer = 0.0;
xe133m_con = 0.0;
xe133m_uncer = 0.0;
xe133_con = 0.0;
xe133_uncer = 0.0;
lc_Xe135 = 0.0;
lc_Xe131m = 0.0;
lc_Xe133m = 0.0;
lc_Xe133 = 0.0;
xe131mFlag = 0;
xe133Flag = 0;
xe133mFlag = 0;
xe135Flag = 0;
gammaCalibrationSpectrumList = new LinkedList<>();
gammaCalibrationSCE = new GardsCalibrationSpectrum();
gammaCalibrationSEC = new GardsCalibrationSpectrum();
gammaCalibrationGCE = new GardsCalibrationSpectrum();
gammaCalibrationGEC = new GardsCalibrationSpectrum();
gammaCalibrationDCE = new GardsCalibrationSpectrum();
gammaCalibrationDEC = new GardsCalibrationSpectrum();
betaCalibrationSpectrumList = new LinkedList<>();
betaCalibrationSCE = new GardsCalibrationSpectrum();
betaCalibrationSEC = new GardsCalibrationSpectrum();
betaCalibrationGCE = new GardsCalibrationSpectrum();
betaCalibrationGEC = new GardsCalibrationSpectrum();
betaCalibrationDCE= new GardsCalibrationSpectrum();
betaCalibrationDEC = new GardsCalibrationSpectrum();
roiChannelsSpectrumList = new LinkedList<>();
roiResultsSpectrumList = new LinkedList<>();
XeData = new LinkedList<>();
}
}

View File

@ -34,10 +34,12 @@ public interface IGammaService{
Result insertPeak(Integer sampleId, String fileName, Integer curChan, HttpServletRequest request);
Result acceptResults(String fileName, boolean accept, List<PeakInfo> oldPeak, HttpServletRequest request);
Result acceptResults(String fileName, boolean accept, List<PeakInfo> oldPeak, List<PeakInfo> newPeak, String flag, HttpServletRequest request);
Result deletePeak(String fileName, int curRow, HttpServletRequest request);
Result fitPeak(int left, int right, String fileName, HttpServletRequest request);
Result getSelPosNuclide(Integer sampleId, String fileName, int channel, HttpServletRequest request);
Result addNuclide(Integer curRow, String nuclideName, String fileName, List<String> list_identify, HttpServletRequest request);
@ -58,6 +60,10 @@ public interface IGammaService{
Result searchNuclide(Integer sampleId, String fileName, Double energy, Double tolerance, HttpServletRequest request);
Result replotBaseLine(BaseLineCtrls baseLineCtrls, HttpServletRequest request);
Result acceptBaseLine(BaseLineCtrls baseLineCtrls, HttpServletRequest request);
Result ZeroTime();
Result ZeroTimeAnalyse(String nuclide1, String nuclide2, Double product1, Double product2, String target, String energyTFH, String date, String time);
@ -163,4 +169,7 @@ public interface IGammaService{
void saveToTxt(String fileName, HttpServletRequest request, HttpServletResponse response);
void saveToExcel(String fileName, HttpServletResponse response);
void saveToPHD(String fileName, HttpServletRequest request, HttpServletResponse response);
}

View File

@ -66,9 +66,9 @@ public interface ISpectrumAnalysisService {
Result ReAnalyse(AnalyseData analyseData, HttpServletRequest request);
Result analyseCurrentSpectrum(String dbName, Integer sampleId, String sampleFileName, String gasFileName, String detFileName, HttpServletRequest request);
Result analyseCurrentSpectrum(String dbName, Integer sampleId, String sampleFileName, String gasFileName, String detFileName, String qcFileName, HttpServletRequest request);
Result analyseAllSpectrum(List<String> dbNames, List<Integer> sampleIds, List<String> sampleFileNames, List<String> gasFileNames, List<String> detFileNames, HttpServletRequest request);
Result analyseAllSpectrum(List<String> dbNames, List<Integer> sampleIds, List<String> sampleFileNames, List<String> gasFileNames, List<String> detFileNames, List<String> qcFileNames, String currentFileName, HttpServletRequest request);
Result saveToDB(BgDataAnlyseResultIn anlyseResultIn, HttpServletRequest request);

View File

@ -371,6 +371,16 @@ public class GammaServiceImpl extends AbstractLogOrReport implements IGammaServi
ParameterInfo value = JSON.parseObject(JSON.toJSONString(entry.getValue()), ParameterInfo.class);
phd.setUsedTotEPara(value);
}
if (entry.getKey().equalsIgnoreCase("mapNucActMda")) {
HashMap<String, Object> jsonMap = JSON.parseObject(JSON.toJSONString(entry.getValue()), HashMap.class);
Map<String, NuclideActMda> value = new HashMap<>();
for (Map.Entry<String, Object> objectEntry:jsonMap.entrySet()) {
String key = objectEntry.getKey();
NuclideActMda entryValue = JSON.parseObject(JSON.toJSONString(objectEntry.getValue()), NuclideActMda.class);
value.put(key, entryValue);
}
phd.setMapNucActMda(value);
}
}
BeanUtils.copyProperties(phd.getSetting(), phd.getUsedSetting());
@ -382,7 +392,6 @@ public class GammaServiceImpl extends AbstractLogOrReport implements IGammaServi
peak.recoilDeltaChan = "1";
}
}
gammaFileUtil.NuclidesIdent(phd, nuclideLinesMap);
gammaFileUtil.RunQC(phd);
result.setResult(phd);
} catch (JsonProcessingException e) {
@ -1137,15 +1146,10 @@ public class GammaServiceImpl extends AbstractLogOrReport implements IGammaServi
insertInput.rg_high = phd.getBaseCtrls().getRg_high();
insertInput.curChan = curChan;
insertInput.vCount = m_vCount;
ObjectMapper mapper = new ObjectMapper();
try {
String value = mapper.writeValueAsString(insertInput);
System.out.println(value);
} catch (JsonProcessingException e) {
throw new RuntimeException(e);
}
StructInsertOutput structInsertOutput = CalValuesHandler.insertPeaks(insertInput);
List<Integer> indexs = new LinkedList<>();
List<PeakInfo> newPeak = new LinkedList<>();
List<PeakInfo> newPeaks = new LinkedList<>();
newPeaks.addAll(phd.getVPeak());
if (structInsertOutput.vIdx.size()>0){
for (int j=0; j<structInsertOutput.vIdx.size(); j++) {
int a = 0;
@ -1153,6 +1157,7 @@ public class GammaServiceImpl extends AbstractLogOrReport implements IGammaServi
a++;
}
PeakInfo peak = new PeakInfo();
peak.index = a;
peak.left = structInsertOutput.vLeft.get(j).intValue();;
peak.right = structInsertOutput.vRight.get(j).intValue();;
peak.peakCentroid = structInsertOutput.peakCentroid.get(j);
@ -1171,17 +1176,16 @@ public class GammaServiceImpl extends AbstractLogOrReport implements IGammaServi
peak.BWWidthChan = 0;
peak.recoilBetaChan = String.valueOf(structInsertOutput.recoilBetaChan.get(j));
peak.recoilDeltaChan = String.valueOf(structInsertOutput.recoilDeltaChan.get(j));
phd.getVPeak().add(a, peak);
indexs.add(a);
newPeaks.add(a, peak);
newPeak.add(peak);
}
}
int peakIdx = structInsertOutput.vIdx.get(0).intValue();
int left = structInsertOutput.vLeft.get(0).intValue();
int right = structInsertOutput.vRight.get(0).intValue();
List<Integer> vIdx = new LinkedList<>();
int ii = 0;
for (PeakInfo peak:phd.getVPeak()){
for (PeakInfo peak: newPeaks){
if(peak.peakCentroid >= right){
break;
} else if(peak.peakCentroid > left) {
@ -1190,21 +1194,21 @@ public class GammaServiceImpl extends AbstractLogOrReport implements IGammaServi
ii++;
}
List<TablePeaks> tablePeaksList = FitPeakBaseLine(phd, vIdx);
List<TablePeaks> tablePeaksList = FitPeakBaseLine(newPeaks, phd.getUsedEnerPara().getP(), vIdx);
map.put("tablePeaksList", tablePeaksList);
map.put("oldPeaks", vOriPeaks);
map.put("newPeaks", newPeak);
result.setSuccess(true);
result.setResult(map);
return result;
}
public List<TablePeaks> FitPeakBaseLine(PHDFile phd, List<Integer> vIdx) {
public List<TablePeaks> FitPeakBaseLine(List<PeakInfo> vPeaks, List<Double> p, List<Integer> vIdx) {
List<TablePeaks> tablePeaksList = new LinkedList<>();
int peakNum = vIdx.size();
for (int i=0; i<peakNum; i++) {
int peakIdx = vIdx.get(i);
int row = i+1;
PeakInfo peak = phd.getVPeak().get(peakIdx);
PeakInfo peak = vPeaks.get(peakIdx);
TablePeaks tablePeaks = new TablePeaks();
tablePeaks.setLab(String.valueOf(peakIdx + 1));
String nuclide = "";
@ -1217,7 +1221,7 @@ public class GammaServiceImpl extends AbstractLogOrReport implements IGammaServi
tablePeaks.setNetArea(NumberFormatUtil.numberFormat(String.valueOf(peak.area)));
tablePeaks.setFwhm(NumberFormatUtil.numberFormat(String.valueOf(peak.fwhm)));
tablePeaks.setStep(NumberFormatUtil.numberFormat(String.valueOf(peak.area * peak.stepRatio)));
double deriva = CalValuesHandler.calDerivaOut(peak.peakCentroid, phd.getUsedEnerPara().getP());
double deriva = CalValuesHandler.calDerivaOut(peak.peakCentroid, p);
tablePeaks.setBwGamma(NumberFormatUtil.numberFormat(String.valueOf(peak.BWWidthChan * deriva)));
tablePeaks.setNetAreaB(false);
tablePeaks.setCentroid(true);
@ -1228,7 +1232,7 @@ public class GammaServiceImpl extends AbstractLogOrReport implements IGammaServi
}
@Override
public Result acceptResults(String fileName, boolean accept, List<PeakInfo> oldPeak, HttpServletRequest request) {
public Result acceptResults(String fileName, boolean accept, List<PeakInfo> oldPeak, List<PeakInfo> newPeak, String flag, HttpServletRequest request) {
Result result = new Result();
String userName = JwtUtil.getUserNameByToken(request);
Cache<String, PHDFile> phdCache = localCache.getPHDCache();
@ -1256,7 +1260,28 @@ public class GammaServiceImpl extends AbstractLogOrReport implements IGammaServi
HashMap<String, Object> map = new HashMap<>();
//根据boolean 决定是否保留本次数据 如果保留则不需要操作vPeak 并重新拟合线
if (accept){
//算法有问题 --- 需要等等处理
if (flag.equalsIgnoreCase("fit")) {//如果传递的flag标识 fit则进行修改峰值等数据
for (int j=0; j<newPeak.size(); j++) {
PeakInfo peakInfo = newPeak.get(j);
phd.getVPeak().set(peakInfo.index-1, peakInfo);
}
} else if (flag.equalsIgnoreCase("insert")){//如果传递的flag标识 Insert则进行峰值的插入
for (int j=0; j<newPeak.size(); j++) {
//获取需要插入的第一个峰值信息
PeakInfo peakInfo = newPeak.get(j);
//根据下标获取旧的峰值数据
PeakInfo info = oldPeak.get(peakInfo.index - 1);
String newEnergy = NumberFormatUtil.numberFormat(String.valueOf(peakInfo.energy));
String oldEnergy = NumberFormatUtil.numberFormat(String.valueOf(info.energy));
//将旧的峰值能量与新的峰值能量格式化后 比较 如果一致则覆盖 不一致则在对应下标插入峰值
if (oldEnergy.equals(newEnergy) && peakInfo.index == info.index) {
phd.getVPeak().set(peakInfo.index-1, peakInfo);
} else {
phd.getVPeak().add(peakInfo.index-1, peakInfo);
}
}
}
//重新计算peak的改变
gammaFileUtil.PeaksChanged(phd);
List<PeakInfo> vPeak = gammaFileUtil.InitPeakTable(phd.getVPeak());
@ -1303,7 +1328,7 @@ public class GammaServiceImpl extends AbstractLogOrReport implements IGammaServi
if(curRow >= 0 && curRow < peakNum) {
phd.getVPeak().remove(curRow);
// gammaFileUtil.PeaksChanged(phd);
gammaFileUtil.PeaksChanged(phd);
for (int i=0;i<phd.getVPeak().size(); i++) {
PeakInfo peakInfo = phd.getVPeak().get(i);
peakInfo.index = i+1;
@ -1319,6 +1344,62 @@ public class GammaServiceImpl extends AbstractLogOrReport implements IGammaServi
return result;
}
@Override
public Result fitPeak(int left, int right, String fileName, HttpServletRequest request) {
Result result = new Result();
HashMap<String, Object> map = new HashMap<>();
//获取用户名
String userName = JwtUtil.getUserNameByToken(request);
Cache<String, PHDFile> phdCache = localCache.getPHDCache();
PHDFile phd = phdCache.getIfPresent(fileName+"-"+userName);
if (Objects.isNull(phd)){
result.error500("请先选择解析文件!");
return result;
}
//判断当前phd文件对应的peak的数据是否小于1
if(phd.getVPeak().size() < 1) {
result.error500("No peak to fit.");
return result;
}
//判断当前选择的左侧道值是否大于右侧道值
if(left > right) {
int temp = left;
left = right;
right = temp;
}
// 找出与插入峰相关联的峰的索引
List<Integer> vIdx = new LinkedList<>();
int ii = 0;
for(PeakInfo peak: phd.getVPeak()) {
if(peak.peakCentroid >= right) {
break;
} else if(peak.peakCentroid > left) {
vIdx.add(ii);
}
ii++;
}
if(CollectionUtils.isEmpty(vIdx)) {
result.error500("There are 0 peak between channel "+left+" and "+right);
return result;
}
// 备份原来的峰列表
List<PeakInfo> vOriPeaks = phd.getVPeak();
List<PeakInfo> newPeak = new LinkedList<>();
for (int i=0; i<vIdx.size(); i++) {
int peakIdx = vIdx.get(i);
PeakInfo peak = phd.getVPeak().get(peakIdx);
newPeak.add(peak);
}
List<TablePeaks> tablePeaksList = FitPeakBaseLine(phd.getVPeak(), phd.getUsedEnerPara().getP(), vIdx);
map.put("tablePeaksList", tablePeaksList);
map.put("oldPeaks", vOriPeaks);
map.put("newPeaks", newPeak);
result.setSuccess(true);
result.setResult(map);
return result;
}
@Override
public Result getSelPosNuclide(Integer sampleId, String fileName, int channel, HttpServletRequest request) {
Result result = new Result();
@ -1674,6 +1755,102 @@ public class GammaServiceImpl extends AbstractLogOrReport implements IGammaServi
return result;
}
@Override
public Result replotBaseLine(BaseLineCtrls baseLineCtrls, HttpServletRequest request) {
// Result result = new Result();
// HashMap<String, Object> map = new HashMap<>();
// //获取用户名
// String userName = JwtUtil.getUserNameByToken(request);
// //获取文件名称
// String fileName = baseLineCtrls.getFileName();
// Cache<String, PHDFile> phdCache = localCache.getPHDCache();
// PHDFile phd = phdCache.getIfPresent(fileName+"-"+userName);
// if (Objects.isNull(phd)){
// result.error500("请先选择解析文件!");
// return result;
// }
// List<Long> m_vCount = new LinkedList<>();
// long m_nCount = phd.getSpec().getNum_g_channel();
// long m_nSChan = phd.getSpec().getBegin_channel();
// //获取当前角色的颜色配置
// Map<String, String> colorMap = sysUserColorService.initColor(userName);
// // 确保绘制曲线时所有谱都是从1道开始
// int i = 0;
// if(m_nSChan == 0){
// i = 1;
// }
// for(; i<m_nCount; ++i) {
// m_vCount.add(phd.getSpec().getCounts().get(i));
// }
// if(m_nSChan == 0) {
// m_vCount.add(0L);
// }
// if(baseLineCtrls.isReplotNeeded()) {
// baseLineCtrls.setReplotNeeded(false);
// List<ChartData> peakSet = gammaFileUtil.PeakSet(phd.getVPeak(), baseLineCtrls.getBaseline(), colorMap.get("Color_peak"), m_nCount, null, false);
// m_chart->AddData(CreateTempBaseLine(m_data->m_Color[Color_base], "BaseLine"));
// CreateShapeCP(MyChartSpace::Shape_Round);
//
// if(m_baseCtrl.BaseStack.size() > 2) m_baseCtrl.BaseStack.remove(1, m_baseCtrl.BaseStack.size()-2);
// }
return null;
}
@Override
public Result acceptBaseLine(BaseLineCtrls baseLineCtrls, HttpServletRequest request) {
Result result = new Result();
HashMap<String, Object> map = new HashMap<>();
//获取用户名
String userName = JwtUtil.getUserNameByToken(request);
//获取文件名称
String fileName = baseLineCtrls.getFileName();
Cache<String, PHDFile> phdCache = localCache.getPHDCache();
PHDFile phd = phdCache.getIfPresent(fileName+"-"+userName);
if (Objects.isNull(phd)){
result.error500("请先选择解析文件!");
return result;
}
if(baseLineCtrls.getBaseStack().size() > 1) {
for (int i=0; i<baseLineCtrls.getBaseStack().size(); i++) {
baseLineCtrls.getBaseStack().remove(i);
}
BaseControls m_baseCtrl = new BaseLineCtrls();
BeanUtils.copyProperties(baseLineCtrls, m_baseCtrl);
phd.setBaseCtrls(m_baseCtrl);
phd.setVBase(m_baseCtrl.getBaseline());
}
List<Long> m_vCount = new LinkedList<>();
long m_nCount = phd.getSpec().getNum_g_channel();
long m_nSChan = phd.getSpec().getBegin_channel();
//获取当前角色的颜色配置
Map<String, String> colorMap = sysUserColorService.initColor(userName);
// 确保绘制曲线时所有谱都是从1道开始
int i = 0;
if(m_nSChan == 0){
i = 1;
}
for(; i<m_nCount; ++i) {
m_vCount.add(phd.getSpec().getCounts().get(i));
}
if(m_nSChan == 0) {
m_vCount.add(0L);
}
List<SeriseData> differance = gammaFileUtil.Differance(phd, phd.getVPeak(), m_vCount, m_nCount);
map.put("barChart", differance);
ChartData channelBaseLine = gammaFileUtil.Channel_BaseLine(phd, m_nCount, colorMap.get("Color_Base"));
map.put("channelBaseLineChart", channelBaseLine);
List<ChartData> peakSet = gammaFileUtil.PeakSet(phd.getVPeak(), phd.getVBase(), colorMap.get("Color_peak"), m_nCount, null, false);
map.put("peakSet", peakSet);
List<ShapeData> shapeData = gammaFileUtil.CreateShapeCP(phd.getBaseCtrls());
map.put("shapeData", shapeData);
//更新主界面的 Chart
gammaFileUtil.UpdateChart(phd, map, colorMap);
result.setSuccess(true);
result.setResult(map);
return result;
}
@Override
public Result ZeroTime() {
Result result = new Result();
@ -3269,7 +3446,7 @@ public class GammaServiceImpl extends AbstractLogOrReport implements IGammaServi
result.error500("请先选择解析文件!");
return result;
}
String spectrum = gammaFileUtil.MakeUpSpectrum(phd);
String spectrum = gammaFileUtil.makeUpSpectrum(phd);
result.setSuccess(true);
result.setResult(spectrum);
return result;
@ -3929,161 +4106,200 @@ public class GammaServiceImpl extends AbstractLogOrReport implements IGammaServi
String userName = JwtUtil.getUserNameByToken(request);
Cache<String, PHDFile> phdCache = localCache.getPHDCache();
PHDFile phd = phdCache.getIfPresent(fileName+"-"+userName);
StringBuilder strBuild = new StringBuilder();
//txt文本内容
//文本内容第一块头部信息
String title1 = " %s The Results of Peak Searching %s";
//文本内容第一块头部信息匹配
strBuild.append(titleFormat(title1, 51, StringPool.ASTERISK, StringPool.ASTERISK));
//换行
strBuild.append(System.lineSeparator());
//换行
strBuild.append(System.lineSeparator());
//文本内容第二块
//文本内容第二块匹配格式
String title2 = "%-12s %-12s %-12s %-12s %-12s %-12s %-12s %-12s %-12s %-12s";
//文本内容第二块头部信息
String[] titleArgs2 = new String[]{"PeakID", "Energy(keV)", "Centroid", "Multiplet", "FWHM(keV)", "NetArea", "NAErr%", "Signif", "Sensit", "Nuclide"};
//文本内容第二块头部信息匹配
strBuild.append(rowFormat(title2, titleArgs2));
//换行
strBuild.append(System.lineSeparator());
//换行
strBuild.append(System.lineSeparator());
//遍历数组进行文本内容第二块数据匹配
for (int i=0; i<phd.getVPeak().size(); i++) {
PeakInfo peakInfo = phd.getVPeak().get(i);
String peakId = String.valueOf(i+1);
String energy = NumberFormatUtil.numberFormat(String.valueOf(peakInfo.energy));
String peakCentroid = NumberFormatUtil.numberFormat(String.valueOf(peakInfo.peakCentroid));
String multiIndex = String.valueOf(peakInfo.multiIndex);
String fwhm = NumberFormatUtil.numberFormat(String.valueOf(peakInfo.fwhm));
String area = NumberFormatUtil.numberFormat(String.valueOf(peakInfo.area));
String areaErr = NumberFormatUtil.numberFormat(String.valueOf(peakInfo.areaErr));
String signif = NumberFormatUtil.numberFormat(String.valueOf(peakInfo.significance));
String sensit = NumberFormatUtil.numberFormat(String.valueOf(peakInfo.sensitivity));
String nuclide = StringUtils.join(peakInfo.nuclides, StringPool.SEMICOLON);
strBuild.append(rowFormat(title2, peakId, energy, peakCentroid, multiIndex, fwhm, area, areaErr, signif, sensit, nuclide));
if (Objects.nonNull(phd)) {
StringBuilder strBuild = new StringBuilder();
//txt文本内容
//文本内容第一块头部信息
String title1 = " %s The Results of Peak Searching %s";
//文本内容第一块头部信息匹配
strBuild.append(titleFormat(title1, 51, StringPool.ASTERISK, StringPool.ASTERISK));
//换行
strBuild.append(System.lineSeparator());
}
//换行
strBuild.append(System.lineSeparator());
//文本内容第三块
//文本内容第三块头部信息
String title3 = " %s The Results of Nuclide Identify %s";
strBuild.append(titleFormat(title3, 12, StringPool.ASTERISK, StringPool.ASTERISK));
//换行
strBuild.append(System.lineSeparator());
//换行
strBuild.append(System.lineSeparator());
//文本内容第三块数据
List<List<String>> peakNuclides = phd.getVPeak().stream().map(item -> item.nuclides).collect(Collectors.toList());
List<String> nuclides = new LinkedList<>();
for (int i=0; i<peakNuclides.size(); i++) {
List<String> peakNuclide = peakNuclides.get(i);
nuclides.addAll(peakNuclide);
}
nuclides = nuclides.stream().distinct().collect(Collectors.toList());
String nuclideStr = "";
for (int i=0; i<nuclides.size(); i++) {
nuclideStr+=" "+nuclides.get(i);
}
strBuild.append(nuclideStr);
//换行
strBuild.append(System.lineSeparator());
//换行
strBuild.append(System.lineSeparator());
//文本内容第四块
//文本内容第四块头部信息
String title4 = " %s Nuclide's Activity and Concentration %s";
//文本内容第四块头部信息匹配
strBuild.append(titleFormat(title4, 28, StringPool.ASTERISK, StringPool.ASTERISK));
//换行
strBuild.append(System.lineSeparator());
//文本内容第四块第一部分数据
String data1 = "Activity Reference Time: %s";
String actRefTime = DateUtils.formatDate(phd.getUsedSetting().getRefTime_act(), "yyyy/MM/dd HH:mm:ss");
strBuild.append(rowFormat(data1, actRefTime));
//换行
strBuild.append(System.lineSeparator());
//文本内容第四块第二部分数据
String data2 = "Concentration Reference Time: %s";
String concRefTime = DateUtils.formatDate(phd.getUsedSetting().getRefTime_conc(), "yyyy/MM/dd HH:mm:ss");
strBuild.append(rowFormat(data2, concRefTime));
//换行
strBuild.append(System.lineSeparator());
//文本内容第五块
//文本内容第五块头部信息
String title5 = "%-12s %-12s %-12s %-12s %-12s %-12s %-12s %-12s %-12s %-12s";
String[] titleArgs5 = new String[]{"Nuclide", "Halflife", "Yield%", "Energy(keV)", "Efficiency", "Activity(Bq)", "ActErr%", "MDA(Bq)", "Conc(uBq/m^3)", "MDC(uBq/m^3)"};
strBuild.append(rowFormat(title5, titleArgs5));
//换行
strBuild.append(System.lineSeparator());
//文本内容第五块数据
Map<String, NuclideActMda> mapNucActMda = phd.getMapNucActMda();
for (Map.Entry<String, NuclideActMda> entry:mapNucActMda.entrySet()) {
String key = entry.getKey();
NuclideActMda nuc = entry.getValue();
String halflifeValue = "";
if(nuc.isBCalculateMDA()) {
String units = "S";
double halflife = nuc.getHalflife();
if(halflife >= 31556736) {// 1年 = 365.24 * 24 * 60 * 60 = 31556736s
halflife /= 31556736;
units = "A";
} else if(halflife >= 86400) {// 1天 = 24 * 60 * 60 = 86400s
halflife /= 86400;
units = "D";
} else if(halflife >= 3600) {
halflife /= 3600;
units = "H";
//换行
strBuild.append(System.lineSeparator());
//文本内容第二块
//文本内容第二块匹配格式
String title2 = "%-12s %-12s %-12s %-12s %-12s %-12s %-12s %-12s %-12s %-12s";
//文本内容第二块头部信息
String[] titleArgs2 = new String[]{"PeakID", "Energy(keV)", "Centroid", "Multiplet", "FWHM(keV)", "NetArea", "NAErr%", "Signif", "Sensit", "Nuclide"};
//文本内容第二块头部信息匹配
strBuild.append(rowFormat(title2, titleArgs2));
//换行
strBuild.append(System.lineSeparator());
//遍历数组进行文本内容第二块数据匹配
for (int i=0; i<phd.getVPeak().size(); i++) {
PeakInfo peakInfo = phd.getVPeak().get(i);
String peakId = String.valueOf(i+1);
String energy = NumberFormatUtil.numberFormat(String.valueOf(peakInfo.energy));
String peakCentroid = NumberFormatUtil.numberFormat(String.valueOf(peakInfo.peakCentroid));
String multiIndex = String.valueOf(peakInfo.multiIndex);
String fwhm = NumberFormatUtil.numberFormat(String.valueOf(peakInfo.fwhm));
String area = NumberFormatUtil.numberFormat(String.valueOf(peakInfo.area));
String areaErr = NumberFormatUtil.numberFormat(String.valueOf(peakInfo.areaErr));
String signif = NumberFormatUtil.numberFormat(String.valueOf(peakInfo.significance));
String sensit = NumberFormatUtil.numberFormat(String.valueOf(peakInfo.sensitivity));
String nuclide = StringUtils.join(peakInfo.nuclides, StringPool.SEMICOLON);
strBuild.append(rowFormat(title2, peakId, energy, peakCentroid, multiIndex, fwhm, area, areaErr, signif, sensit, nuclide));
//换行
strBuild.append(System.lineSeparator());
}
//换行
strBuild.append(System.lineSeparator());
//文本内容第三块
//文本内容第三块头部信息
String title3 = " %s The Results of Nuclide Identify %s";
strBuild.append(titleFormat(title3, 12, StringPool.ASTERISK, StringPool.ASTERISK));
//换行
strBuild.append(System.lineSeparator());
//文本内容第三块数据
List<List<String>> peakNuclides = phd.getVPeak().stream().map(item -> item.nuclides).collect(Collectors.toList());
List<String> nuclides = new LinkedList<>();
for (int i=0; i<peakNuclides.size(); i++) {
List<String> peakNuclide = peakNuclides.get(i);
nuclides.addAll(peakNuclide);
}
nuclides = nuclides.stream().distinct().collect(Collectors.toList());
String nuclideStr = "";
for (int i=0; i<nuclides.size(); i++) {
nuclideStr+=" "+nuclides.get(i);
}
strBuild.append(nuclideStr);
//换行
strBuild.append(System.lineSeparator());
//换行
strBuild.append(System.lineSeparator());
//文本内容第四块
//文本内容第四块头部信息
String title4 = " %s Nuclide's Activity and Concentration %s";
//文本内容第四块头部信息匹配
strBuild.append(titleFormat(title4, 28, StringPool.ASTERISK, StringPool.ASTERISK));
//换行
strBuild.append(System.lineSeparator());
//文本内容第四块第一部分数据
String data1 = "Activity Reference Time: %s";
String actRefTime = DateUtils.formatDate(phd.getUsedSetting().getRefTime_act(), "yyyy/MM/dd HH:mm:ss");
strBuild.append(rowFormat(data1, actRefTime));
//换行
strBuild.append(System.lineSeparator());
//文本内容第四块第二部分数据
String data2 = "Concentration Reference Time: %s";
String concRefTime = DateUtils.formatDate(phd.getUsedSetting().getRefTime_conc(), "yyyy/MM/dd HH:mm:ss");
strBuild.append(rowFormat(data2, concRefTime));
//换行
strBuild.append(System.lineSeparator());
//换行
strBuild.append(System.lineSeparator());
//文本内容第五块
//文本内容第五块头部信息
String title5 = "%-12s %-12s %-12s %-12s %-12s %-12s %-12s %-12s %-12s %-12s";
String[] titleArgs5 = new String[]{"Nuclide", "Halflife", "Yield%", "Energy(keV)", "Efficiency", "Activity(Bq)", "ActErr%", "MDA(Bq)", "Conc(uBq/m^3)", "MDC(uBq/m^3)"};
strBuild.append(rowFormat(title5, titleArgs5));
//换行
strBuild.append(System.lineSeparator());
//文本内容第五块数据
Map<String, NuclideActMda> mapNucActMda = phd.getMapNucActMda();
for (Map.Entry<String, NuclideActMda> entry:mapNucActMda.entrySet()) {
String key = entry.getKey();
NuclideActMda nuc = entry.getValue();
String halflifeValue = "";
if(nuc.isBCalculateMDA()) {
String units = "S";
double halflife = nuc.getHalflife();
if(halflife >= 31556736) {// 1年 = 365.24 * 24 * 60 * 60 = 31556736s
halflife /= 31556736;
units = "A";
} else if(halflife >= 86400) {// 1天 = 24 * 60 * 60 = 86400s
halflife /= 86400;
units = "D";
} else if(halflife >= 3600) {
halflife /= 3600;
units = "H";
}
halflifeValue = NumberFormatUtil.numberFormat(String.valueOf(halflife)) + units;
}
String efficiency = NumberFormatUtil.numberFormat(String.valueOf(nuc.getEfficiency()));
String activity = NumberFormatUtil.numberFormat(String.valueOf(nuc.getActivity()));
String actErr = NumberFormatUtil.numberFormat(String.valueOf(nuc.getAct_err()/nuc.getActivity()*100));
String mda = NumberFormatUtil.numberFormat(String.valueOf(nuc.getMda()));
String conc = NumberFormatUtil.numberFormat(String.valueOf(nuc.getConcentration()));
String mdc = NumberFormatUtil.numberFormat(String.valueOf(nuc.getMdc()));
if(nuc.getCalculateIdx() >= 0 && nuc.getCalculateIdx() < nuc.getVEnergy().size()) {
String yield = NumberFormatUtil.numberFormat(String.valueOf(nuc.getVYield().get(nuc.getCalculateIdx())*100));
String energy = NumberFormatUtil.numberFormat(String.valueOf(nuc.getVEnergy().get(nuc.getCalculateIdx())));
strBuild.append(rowFormat(title5, key, halflifeValue, yield, energy, efficiency, activity, actErr, mda, conc, mdc));
strBuild.append(System.lineSeparator());
} else {
strBuild.append(rowFormat(title5, key, halflifeValue, "NULL", "NULL", efficiency, activity, actErr, mda, conc, mdc));
strBuild.append(System.lineSeparator());
}
halflifeValue = NumberFormatUtil.numberFormat(String.valueOf(halflife)) + units;
}
String efficiency = NumberFormatUtil.numberFormat(String.valueOf(nuc.getEfficiency()));
String activity = NumberFormatUtil.numberFormat(String.valueOf(nuc.getActivity()));
String actErr = NumberFormatUtil.numberFormat(String.valueOf(nuc.getAct_err()/nuc.getActivity()*100));
String mda = NumberFormatUtil.numberFormat(String.valueOf(nuc.getMda()));
String conc = NumberFormatUtil.numberFormat(String.valueOf(nuc.getConcentration()));
String mdc = NumberFormatUtil.numberFormat(String.valueOf(nuc.getMdc()));
if(nuc.getCalculateIdx() >= 0 && nuc.getCalculateIdx() < nuc.getVEnergy().size()) {
String yield = NumberFormatUtil.numberFormat(String.valueOf(nuc.getVYield().get(nuc.getCalculateIdx())*100));
String energy = NumberFormatUtil.numberFormat(String.valueOf(nuc.getVEnergy().get(nuc.getCalculateIdx())));
strBuild.append(rowFormat(title5, key, halflifeValue, yield, energy, efficiency, activity, actErr, mda, conc, mdc));
strBuild.append(System.lineSeparator());
} else {
strBuild.append(rowFormat(title5, key, halflifeValue, "NULL", "NULL", efficiency, activity, actErr, mda, conc, mdc));
strBuild.append(System.lineSeparator());
}
}
strBuild.append(System.lineSeparator());
String detectorCode = phd.getHeader().getDetector_code();
String date = phd.getAcq().getAcquisition_start_date().replace("/", "");
String time = phd.getAcq().getAcquisition_start_time().replace(":", "").substring(0, 4);
String dataType = phd.getMsgInfo().getData_type().substring(0, 1);
String format = ".txt";
String txtFileName = String.format("%s-%s_%s_%s_RESULT%s", detectorCode, date, time, dataType, format);
//导出数据内容到txt文本
OutputStream fos = null;
try {
//设置响应类型
response.setContentType("application/octet-stream");
//解决中文不能生成文件
response.setHeader("Content-Disposition", "attachment; fileName=" + URLEncoder.encode(txtFileName,"UTF-8"));
fos = response.getOutputStream();
fos.write(strBuild.toString().getBytes());
} catch (FileNotFoundException e) {
throw new RuntimeException(e);
} catch (IOException e) {
throw new RuntimeException(e);
} finally {
strBuild.append(System.lineSeparator());
String detectorCode = phd.getHeader().getDetector_code();
String date = phd.getAcq().getAcquisition_start_date().replace("/", "");
String time = phd.getAcq().getAcquisition_start_time().replace(":", "").substring(0, 4);
String dataType = phd.getMsgInfo().getData_type().substring(0, 1);
String format = ".txt";
String txtFileName = String.format("%s-%s_%s_%s_RESULT%s", detectorCode, date, time, dataType, format);
//导出数据内容到txt文本
OutputStream fos = null;
try {
if (Objects.nonNull(fos)) {
fos.close();
}
//设置响应类型
response.setContentType("application/octet-stream");
//解决中文不能生成文件
response.setHeader("Content-Disposition", "attachment; fileName=" + URLEncoder.encode(txtFileName,"UTF-8"));
fos = response.getOutputStream();
fos.write(strBuild.toString().getBytes());
} catch (FileNotFoundException e) {
throw new RuntimeException(e);
} catch (IOException e) {
throw new RuntimeException(e);
} finally {
try {
if (Objects.nonNull(fos)) {
fos.close();
}
} catch (IOException e) {
throw new RuntimeException(e);
}
}
}
}
@Override
public void saveToPHD(String fileName, HttpServletRequest request, HttpServletResponse response) {
//获取当前登陆用户名
String userName = JwtUtil.getUserNameByToken(request);
//读取本地缓存的phd文件信息
Cache<String, PHDFile> phdCache = localCache.getPHDCache();
PHDFile phd = phdCache.getIfPresent(fileName+"-"+userName);
if (Objects.nonNull(phd)) {
String detectorCode = phd.getHeader().getDetector_code();
String date = phd.getAcq().getAcquisition_start_date().replace("/", "");
String time = phd.getAcq().getAcquisition_start_time().replace(":", "").substring(0, 4);
String dataType = phd.getMsgInfo().getData_type().substring(0, 1);
String phdFileName = String.format("%s-%s_%s_%s.PHD", detectorCode, date, time, dataType);
String spectrum = gammaFileUtil.makeUpSpectrum(phd);
//导出数据内容到txt文本
OutputStream fos = null;
try {
//设置响应类型
response.setContentType("application/octet-stream");
//解决中文不能生成文件
response.setHeader("Content-Disposition", "attachment; fileName=" + URLEncoder.encode(phdFileName,"UTF-8"));
fos = response.getOutputStream();
fos.write(spectrum.getBytes());
} catch (FileNotFoundException e) {
throw new RuntimeException(e);
} catch (IOException e) {
throw new RuntimeException(e);
} finally {
try {
if (Objects.nonNull(fos)) {
fos.close();
}
} catch (IOException e) {
throw new RuntimeException(e);
}
}
}
}