gammaController层增加方法注解
实现InteractiveTool页面新增行数确定新增接口 实现InteractiveTool页面delete按钮 实现Config User Library页面save按钮 实现InteractiveTool页面增加核素信息接口 实现InteractiveTool页面删除核素信息接口 实现InteractiveTool页面查看峰对应备注信息接口 实现InteractiveTool页面add峰对应备注信息接口 实现InteractiveTool页面查看备注信息接口 实现InteractiveTool页面add备注信息接口 查看Gamma Viewer Log页面数据修改接口内容 DBUtil存放位置修改 GammaFileUtil增加InitPeakTable方法 GammaFileUtil增加CalcNuclideMDA方法 GammaFileUtil增加ReCalcMdaMdc方法 NameStandUtil方法内容修改 PHDFileUtil新增FileNameByStandardForm方法 PHDFileUtil新增GetMatchFile方法 PHDFileUtil新增GetQCPHD方法 PeakInfo实体类comments增加默认赋值 新增AcceptInfo,AnalyseBetaInfo,CommentsInfo,NuclideInfo,UserLibraryInfo实体类 修改GardsXeResultsSpectrum实体类 新增deleteNuclidesByUserName方法
This commit is contained in:
parent
868f0b7a93
commit
5cc898bf83
|
@ -63,6 +63,7 @@ public class PeakInfo implements Serializable {
|
|||
|
||||
public PeakInfo(){
|
||||
nuclides = new LinkedList<>();
|
||||
comments = "";
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -1,5 +1,7 @@
|
|||
package org.jeecg.common.util;
|
||||
|
||||
import com.baomidou.mybatisplus.core.toolkit.StringPool;
|
||||
import org.springframework.beans.factory.annotation.Value;
|
||||
import org.springframework.stereotype.Component;
|
||||
|
||||
import java.io.Serializable;
|
||||
|
@ -11,6 +13,9 @@ import java.sql.Statement;
|
|||
@Component
|
||||
public class DBUtil implements Serializable {
|
||||
|
||||
@Value("${parameter.dbPath}")
|
||||
private String dbFilePath;
|
||||
|
||||
public Connection openDB(){
|
||||
Connection conn = null;
|
||||
try {
|
||||
|
@ -20,7 +25,7 @@ public class DBUtil implements Serializable {
|
|||
e.printStackTrace();
|
||||
}
|
||||
//sqlite3文件的绝对路径
|
||||
String db = "D:\\workspace\\AnalysisSystemForRadionuclide\\AnalysisSystemForRadionuclide\\AnalysisSystemForRadionuclide\\jeecg-module-spectrum-analysis\\src\\main\\java\\org\\jeecg\\common\\db\\GammaDatabase.db";
|
||||
String db = dbFilePath + StringPool.SLASH + "GammaDatabase.db";
|
||||
try {
|
||||
conn = DriverManager.getConnection("jdbc:sqlite:" + db);
|
||||
} catch (SQLException throwables) {
|
||||
|
@ -39,10 +44,8 @@ public class DBUtil implements Serializable {
|
|||
try {
|
||||
if(stat!=null)
|
||||
stat.close();
|
||||
|
||||
if(con!=null)
|
||||
con.close();
|
||||
|
||||
} catch (Exception e) {
|
||||
e.printStackTrace();
|
||||
}
|
||||
|
|
|
@ -34,6 +34,8 @@ import javax.xml.parsers.ParserConfigurationException;
|
|||
import java.io.File;
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.math.BigDecimal;
|
||||
import java.math.RoundingMode;
|
||||
import java.text.DecimalFormat;
|
||||
import java.text.NumberFormat;
|
||||
import java.text.ParseException;
|
||||
|
@ -72,7 +74,6 @@ public class GammaFileUtil {
|
|||
// 设置编码,当文件中存在中文且上传后文件乱码时可使用此配置项
|
||||
ftpClient.setControlEncoding("UTF-8");
|
||||
ftpClient.setFileTransferMode(FTPClient.STREAM_TRANSFER_MODE);
|
||||
pathName = StringPool.SLASH + spectrumPathProperties.getRootPath() + StringPool.SLASH + pathName;
|
||||
ftpClient.changeWorkingDirectory(pathName);
|
||||
List<FTPFile> ftpFiles = Arrays.asList(ftpClient.listFiles());
|
||||
ftpFiles = ftpFiles.stream().filter(item -> item.getName().equals(fileName)).collect(Collectors.toList());
|
||||
|
@ -88,8 +89,6 @@ public class GammaFileUtil {
|
|||
//将ftp文件的输入流复制给临时文件
|
||||
FileUtils.copyInputStreamToFile(inputStream, file);
|
||||
//读取文件信息
|
||||
//String path = "C:\\Users\\a\\Desktop\\AUX04_005-20230601_1405_S_FULL_40186.PHD";
|
||||
//EnergySpectrumStruct struct = EnergySpectrumHandler.getSourceData(path);
|
||||
EnergySpectrumStruct struct = EnergySpectrumHandler.getSourceData(file.getAbsolutePath());
|
||||
//MsgInfo
|
||||
phd.getMsgInfo().setMsg_id(struct.msg_id);
|
||||
|
@ -239,7 +238,7 @@ public class GammaFileUtil {
|
|||
}
|
||||
|
||||
// 初始化默认分析设置
|
||||
if (phd.getHeader().getSystem_type().equals("P")) {
|
||||
if(phd.getHeader().getSystem_type().equalsIgnoreCase("P")) {
|
||||
phd.getSetting().setECutAnalysis_Low(35.0);
|
||||
phd.getSetting().setBUpdateCal(true);
|
||||
}
|
||||
|
@ -565,7 +564,7 @@ public class GammaFileUtil {
|
|||
peakInfo.BWWidthChan = peaksSpectrum.getBwwidthchan()==null?0:peaksSpectrum.getBwwidthchan();
|
||||
peakInfo.recoilBetaChan = "1";
|
||||
peakInfo.recoilDeltaChan = peaksSpectrum.getRecoildeltachan()==null?"1":peaksSpectrum.getRecoildeltachan().toString();
|
||||
peakInfo.comments = peaksSpectrum.getPeakcomments();
|
||||
peakInfo.comments = StringUtils.isNotBlank(peaksSpectrum.getPeakcomments())?peaksSpectrum.getPeakcomments():"";
|
||||
phd.getVPeak().add(peakInfo);
|
||||
}
|
||||
}
|
||||
|
@ -792,12 +791,12 @@ public class GammaFileUtil {
|
|||
qcState.set(2, phd.getQcItems().get("decay_time").isBPass()?"BLUE":"RED");
|
||||
qcState.set(3, phd.getQcItems().get("samp_vol").isBPass()?"BLUE":"RED");
|
||||
|
||||
if(phd.getHeader().getSystem_type().equals("P")) {
|
||||
if(phd.getHeader().getSystem_type().equalsIgnoreCase("P")) {
|
||||
if(Objects.isNull(phd.getQcItems().get("Be7-FWHM"))) {
|
||||
qcState.set(4, phd.getQcItems().get("Be7-FWHM").isBPass()?"BLUE":"RED");
|
||||
qcState.set(5, phd.getQcItems().get("Ba140-MDC").isBPass()?"BLUE":"RED");
|
||||
}
|
||||
} else if(phd.getHeader().getSystem_type().equals("G")) {
|
||||
} else if(phd.getHeader().getSystem_type().equalsIgnoreCase("G")) {
|
||||
if(Objects.isNull(phd.getQcItems().get("Xe133-MDC"))) {
|
||||
qcState.set(6, phd.getQcItems().get("Xe133-MDC").isBPass()?"BLUE":"RED");
|
||||
}
|
||||
|
@ -806,8 +805,8 @@ public class GammaFileUtil {
|
|||
}
|
||||
|
||||
public void RunQC(PHDFile phd){
|
||||
System.loadLibrary("GammaAnaly");
|
||||
try {
|
||||
System.loadLibrary("GammaAnaly");
|
||||
Date start = DateUtils.parseDate(phd.getCollect().getCollection_start_date() + " " + phd.getCollect().getCollection_start_time().substring(0, phd.getCollect().getCollection_start_time().indexOf(StringPool.DOT)),"yyyy/MM/dd HH:mm:ss");
|
||||
Date end = DateUtils.parseDate(phd.getCollect().getCollection_stop_date() + " " + phd.getCollect().getCollection_stop_time().substring(0, phd.getCollect().getCollection_stop_time().indexOf(StringPool.DOT)),"yyyy/MM/dd HH:mm:ss");
|
||||
Date acq = DateUtils.parseDate(phd.getAcq().getAcquisition_start_date() + " " + phd.getAcq().getAcquisition_start_time().substring(0, phd.getAcq().getAcquisition_start_time().indexOf(StringPool.DOT)),"yyyy/MM/dd HH:mm:ss");
|
||||
|
@ -817,11 +816,18 @@ public class GammaFileUtil {
|
|||
double Decay_hour = (end.getTime()/1000 - acq.getTime()/1000) / 3600.0;
|
||||
|
||||
Double ener_Be7 = 0.0;
|
||||
Map<String, Double> vMdcInfoMap = new HashMap<>();
|
||||
List<Double> vMdcInfo = new LinkedList<>();
|
||||
Map<String, QcCheckItem> qcItems = new LinkedHashMap<>();
|
||||
if(!this.ReadQCLimit(qcItems, vMdcInfo, ener_Be7, phd.getHeader().getSystem_type())) {
|
||||
if(!this.ReadQCLimit(qcItems, vMdcInfoMap, ener_Be7, phd.getHeader().getSystem_type().toUpperCase())) {
|
||||
String WARNING = "Read QC Flags from SystemManager.xml Failed!";
|
||||
}
|
||||
if (CollectionUtils.isNotEmpty(vMdcInfoMap)) {
|
||||
vMdcInfo.add(vMdcInfoMap.get("0"));
|
||||
vMdcInfo.add(vMdcInfoMap.get("1"));
|
||||
vMdcInfo.add(vMdcInfoMap.get("2"));
|
||||
}
|
||||
|
||||
QcCheckItem colTime = qcItems.get("col_time");
|
||||
colTime.setValue(collect_hour);
|
||||
qcItems.put("col_time", colTime);
|
||||
|
@ -843,7 +849,7 @@ public class GammaFileUtil {
|
|||
qcItems.put("airFlow", airFlow);
|
||||
|
||||
if(phd.isValid() && phd.getVBase().size() == phd.getSpec().getNum_g_channel()) {
|
||||
if(phd.getHeader().getSystem_type().equals("P")) {
|
||||
if(phd.getHeader().getSystem_type().equalsIgnoreCase("P")) {
|
||||
List<Double> energy = new LinkedList<>();
|
||||
energy.add(ener_Be7);
|
||||
CalValuesOut calValuesOut = CalValuesHandler.calFcnEval(energy, phd.getUsedResoPara().getP());
|
||||
|
@ -977,7 +983,7 @@ public class GammaFileUtil {
|
|||
}
|
||||
}
|
||||
|
||||
public boolean ReadQCLimit(Map<String, QcCheckItem> qcItems, List<Double> vMdcInfo,Double ener_Be7, String systemType){
|
||||
public boolean ReadQCLimit(Map<String, QcCheckItem> qcItems, Map<String, Double> vMdcInfoMap, Double ener_Be7, String systemType){
|
||||
try {
|
||||
String filePath = parameterFilePath+ File.separator +"SystemManager.xml";
|
||||
//创建一个文档解析器工厂
|
||||
|
@ -1050,11 +1056,11 @@ public class GammaFileUtil {
|
|||
Node item = attributes.item(j);
|
||||
//判断属性名称是否是 number
|
||||
if(item.getNodeName().equals(CalType.ENERGY_CAL.getType())) {
|
||||
vMdcInfo.set(0,Double.valueOf(item.getNodeValue()));
|
||||
vMdcInfoMap.put("0", Double.valueOf(item.getNodeValue()));
|
||||
}else if (item.getNodeName().equals("yield")){
|
||||
vMdcInfo.set(1,Double.valueOf(item.getNodeValue()));
|
||||
vMdcInfoMap.put("1", Double.valueOf(item.getNodeValue()));
|
||||
}else if (item.getNodeName().equals("halflife")){
|
||||
vMdcInfo.set(2,Double.valueOf(item.getNodeValue()));
|
||||
vMdcInfoMap.put("2", Double.valueOf(item.getNodeValue()));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -1098,11 +1104,11 @@ public class GammaFileUtil {
|
|||
Node item = attributes.item(j);
|
||||
//判断属性名称是否是 number
|
||||
if(item.getNodeName().equals(CalType.ENERGY_CAL.getType())) {
|
||||
vMdcInfo.set(0,Double.valueOf(item.getNodeValue()));
|
||||
vMdcInfoMap.put("0", Double.valueOf(item.getNodeValue()));
|
||||
}else if (item.getNodeName().equals("yield")){
|
||||
vMdcInfo.set(1,Double.valueOf(item.getNodeValue()));
|
||||
vMdcInfoMap.put("1", Double.valueOf(item.getNodeValue()));
|
||||
}else if (item.getNodeName().equals("halflife")){
|
||||
vMdcInfo.set(2,Double.valueOf(item.getNodeValue()));
|
||||
vMdcInfoMap.put("2", Double.valueOf(item.getNodeValue()));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -1135,7 +1141,7 @@ public class GammaFileUtil {
|
|||
detailInfo.set(0, Objects.nonNull(sampleId)?sampleId.toString():""); // Sample_Id
|
||||
detailInfo.set(1, phd.getHeader().getSite_code()); // Station_Code
|
||||
detailInfo.set(2, phd.getHeader().getDetector_code()); // Detector_Code
|
||||
detailInfo.set(3, phd.getHeader().getSystem_type()); // System_Type
|
||||
detailInfo.set(3, phd.getHeader().getSystem_type().toUpperCase()); // System_Type
|
||||
detailInfo.set(4, phd.getMsgInfo().getData_type()); // Data_Type
|
||||
detailInfo.set(5, phd.getHeader().getSpectrum_quantity()); // Spectral_Qualifier
|
||||
detailInfo.set(6, phd.getHeader().getSample_ref_id()); // SRID
|
||||
|
@ -1181,7 +1187,7 @@ public class GammaFileUtil {
|
|||
m_vCount.add(0L);
|
||||
}
|
||||
ChartData shadowEnergyChart = this.Energy_Count(phd, m_vCount, m_nCount);
|
||||
ChartData shadowChannelChart = this.Channel_Count(phd, m_vCount, m_nCount);
|
||||
ChartData shadowChannelChart = this.Channel_Count(m_vCount, m_nCount);
|
||||
map.put("shadowEnergyChart", shadowEnergyChart);
|
||||
map.put("shadowChannelChart", shadowChannelChart);
|
||||
List<ChartData> allData = AllData(false, phd, m_vCount, m_nCount);
|
||||
|
@ -1194,7 +1200,7 @@ public class GammaFileUtil {
|
|||
|
||||
public ChartData Energy_Count(PHDFile phd, List<Long> m_vCount, long m_nCount){
|
||||
int start = 0;
|
||||
long end = 0;
|
||||
long end = -1;
|
||||
//Energy
|
||||
ChartData data1 = new ChartData();
|
||||
if(m_nCount > 1 && phd.getVEnergy().size() == m_nCount) {
|
||||
|
@ -1217,9 +1223,9 @@ public class GammaFileUtil {
|
|||
return data1;
|
||||
}
|
||||
|
||||
public ChartData Channel_Count(PHDFile phd, List<Long> m_vCount, long m_nCount){
|
||||
int start = 0;
|
||||
long end = 0;
|
||||
public ChartData Channel_Count(List<Long> m_vCount, long m_nCount){
|
||||
int start = 1;
|
||||
long end = -1;
|
||||
ChartData data1 = new ChartData();
|
||||
if(m_nCount > 1) {
|
||||
if(start < 1){
|
||||
|
@ -1244,7 +1250,7 @@ public class GammaFileUtil {
|
|||
public List<ChartData> Lc_Scac_base(boolean showLc, PHDFile phd, long m_nCount){
|
||||
List<ChartData> datalist = new LinkedList<>();
|
||||
int start = 0;
|
||||
long end = 0;
|
||||
long end = -1;
|
||||
if(m_nCount > 1 && phd.getVEnergy().size() == m_nCount && phd.getVLc().size() == m_nCount
|
||||
&& phd.getVScac().size() == m_nCount && phd.getVBase().size() == m_nCount) {
|
||||
if(start < 1) {
|
||||
|
@ -1347,7 +1353,7 @@ public class GammaFileUtil {
|
|||
return datalist;
|
||||
}
|
||||
int start = 0;
|
||||
long end = 0;
|
||||
long end = -1;
|
||||
if(start < 1){
|
||||
start = 1;
|
||||
}
|
||||
|
@ -1455,7 +1461,7 @@ public class GammaFileUtil {
|
|||
|
||||
public List<ChartData> AllData(boolean showLc, PHDFile phd, List<Long> m_vCount, long m_nCount){
|
||||
List<ChartData> datalist = new LinkedList<>();
|
||||
datalist.add(Channel_Count(phd, m_vCount, m_nCount));
|
||||
datalist.add(Channel_Count(m_vCount, m_nCount));
|
||||
datalist.add(Energy_Count(phd, m_vCount, m_nCount));
|
||||
datalist.addAll(Lc_Scac_base(showLc, phd, m_nCount));
|
||||
datalist.addAll(Channel_Peak(phd, m_nCount));
|
||||
|
@ -1643,7 +1649,7 @@ public class GammaFileUtil {
|
|||
}
|
||||
|
||||
NuclideActMda ActMda = phd.getMapNucActMda().get(iter.getKey());
|
||||
boolean bActBigger = CalculateMDCs(phd, ActMda, mainPeakIdx, lambda, maxFoundYield, 1);
|
||||
boolean bActBigger = CalculateMDCs(phd, ActMda, mainPeakIdx, lambda, maxFoundYield, 1.0);
|
||||
|
||||
if(rate > 0.8 || bActBigger) {
|
||||
ActMda.setHalflife(halflife);
|
||||
|
@ -1853,10 +1859,10 @@ public class GammaFileUtil {
|
|||
return mapLines;
|
||||
}
|
||||
|
||||
public ChartData Channel_BaseLine(PHDFile phd, List<Long> m_vCount, long m_nCount) {
|
||||
public ChartData Channel_BaseLine(PHDFile phd, long m_nCount) {
|
||||
ChartData cData = new ChartData();
|
||||
int start =0;
|
||||
long end = 0;
|
||||
long end = -1;
|
||||
if(m_nCount > 1 && phd.getVBase().size() == m_nCount) {
|
||||
if(start < 1){
|
||||
start = 1;
|
||||
|
@ -1879,12 +1885,11 @@ public class GammaFileUtil {
|
|||
return cData;
|
||||
}
|
||||
|
||||
//起始道 结束道
|
||||
public List<SeriseData> Differance(PHDFile phd, List<PeakInfo> vecPeak, List<Long> m_vCount, long m_nCount){
|
||||
System.loadLibrary("GammaAnaly");
|
||||
List<SeriseData> pointlist = new LinkedList<>();
|
||||
int start =0;
|
||||
long end = 0;
|
||||
long end = -1;
|
||||
int peakNum = vecPeak.size();
|
||||
if(peakNum < 1 || phd.getVBase().size() != m_nCount){
|
||||
return pointlist;
|
||||
|
@ -3371,7 +3376,6 @@ public class GammaFileUtil {
|
|||
return bRet;
|
||||
}
|
||||
|
||||
|
||||
public Map<String ,Object> UpdateDatasNuclideActivity(Map<String, NuclideActMda> mapNucAct, Date act_ref, Date con_ref) {
|
||||
Map<String ,Object> map = new HashMap<>();
|
||||
map.put("dateTime_act_ref", act_ref);
|
||||
|
@ -3472,7 +3476,7 @@ public class GammaFileUtil {
|
|||
spectrum.append("#Header "+phd.getHeader().getDesignator()+"\r\n");
|
||||
spectrum.append(RightFill(phd.getHeader().getSite_code(), 5)+
|
||||
" "+RightFill(phd.getHeader().getDetector_code(), 9)+
|
||||
" "+RightFill(phd.getHeader().getSystem_type(), 1)+
|
||||
" "+RightFill(phd.getHeader().getSystem_type().toUpperCase(), 1)+
|
||||
" "+RightFill(phd.getHeader().getSample_geometry(), 17)+
|
||||
" "+RightFill(phd.getHeader().getSpectrum_quantity(), 4)+"\r\n");
|
||||
spectrum.append(phd.getHeader().getSample_ref_id() + "\r\n");
|
||||
|
@ -3799,6 +3803,7 @@ public class GammaFileUtil {
|
|||
}
|
||||
|
||||
public void PeaksChanged(PHDFile phd) {
|
||||
System.loadLibrary("GammaAnaly");
|
||||
List<Double> vCentroid = new LinkedList<>();
|
||||
List<Double> vFwhmCh = new LinkedList<>();
|
||||
List<Double> vTail = new LinkedList<>();
|
||||
|
@ -3809,14 +3814,9 @@ public class GammaFileUtil {
|
|||
vTail.add(peak.tail);
|
||||
vUpperTail.add(peak.upperTail);
|
||||
}
|
||||
//m_data->m_phd->vPeak
|
||||
CalValuesHandler.ComputePeakRange(phd.getVPeak().size(), (int) phd.getSpec().getNum_g_channel(), vCentroid, vFwhmCh, vTail, vUpperTail);
|
||||
//
|
||||
// if(m_data->m_phd->header.system_type.toUpper() == "P")
|
||||
// {
|
||||
// AlgFunc::NuclidesIdent(m_data->m_phd, m_data->m_mapNucLineP);
|
||||
// }
|
||||
// else AlgFunc::NuclidesIdent(m_data->m_phd, m_data->m_mapNucLineG);
|
||||
//重新计算各个peak的左值 右值 multiIndex
|
||||
StructInsertOutput structInsertOutput = CalValuesHandler.ComputePeakRange(phd.getVPeak().size(), (int) phd.getSpec().getNum_g_channel(), vCentroid, vFwhmCh, vTail, vUpperTail);
|
||||
System.out.println(structInsertOutput.vLeft.size());
|
||||
}
|
||||
|
||||
public int FindNearPeak(List<PeakInfo> vPeak, int channel, boolean bFind) {
|
||||
|
@ -3866,12 +3866,18 @@ public class GammaFileUtil {
|
|||
nuclide+=peakNuclide+";";
|
||||
}
|
||||
tablePeaks.setNuclide(StringUtils.isBlank(nuclide)?nuclide:nuclide.substring(0,nuclide.length()-1));
|
||||
tablePeaks.setEnergy(String.valueOf(peak.energy));
|
||||
tablePeaks.setNetArea(String.valueOf(peak.area));
|
||||
tablePeaks.setFwhm(String.valueOf(peak.fwhm));
|
||||
BigDecimal energy = new BigDecimal(peak.energy);
|
||||
energy = energy.setScale(3, RoundingMode.HALF_UP);
|
||||
tablePeaks.setEnergy(String.valueOf(energy));
|
||||
BigDecimal area = new BigDecimal(peak.area);
|
||||
area = area.setScale(4, RoundingMode.HALF_UP);
|
||||
tablePeaks.setNetArea(String.valueOf(area));
|
||||
BigDecimal fwhm = new BigDecimal(peak.fwhm);
|
||||
fwhm = fwhm.setScale(5, RoundingMode.HALF_UP);
|
||||
tablePeaks.setFwhm(String.valueOf(fwhm));
|
||||
tablePeaks.setStep(String.valueOf(peak.area * peak.stepRatio));
|
||||
tablePeaks.setBwGamma("0");
|
||||
// QLineEdit *edit_bwGamma = new QLineEdit(QString::number(peak.BWWidthChan * AlgFunc::calDerivaOut(peak.peakCentroid, m_enerPara)));
|
||||
double deriva = CalValuesHandler.calDerivaOut(peak.peakCentroid, phd.getUsedEnerPara().getP());
|
||||
tablePeaks.setBwGamma(String.valueOf(peak.BWWidthChan * deriva));
|
||||
tablePeaks.setNetAreaB(false);
|
||||
tablePeaks.setCentroid(true);
|
||||
tablePeaks.setFwhmB(true);
|
||||
|
@ -3880,7 +3886,139 @@ public class GammaFileUtil {
|
|||
return tablePeaksList;
|
||||
}
|
||||
|
||||
public List<PeakInfo> InitPeakTable(List<PeakInfo> vPeak) {
|
||||
List<PeakInfo> result = new LinkedList<>();
|
||||
for(int i=0; i<vPeak.size(); i++) {
|
||||
PeakInfo peakInfo = new PeakInfo();
|
||||
peakInfo.index = i+1;
|
||||
BigDecimal energy = new BigDecimal(vPeak.get(i).energy);
|
||||
energy = energy.setScale(3, RoundingMode.HALF_UP);
|
||||
peakInfo.energy = energy.doubleValue();
|
||||
BigDecimal peakCentroid = new BigDecimal(vPeak.get(i).peakCentroid);
|
||||
peakCentroid = peakCentroid.setScale(3, RoundingMode.HALF_UP);
|
||||
peakInfo.peakCentroid = peakCentroid.doubleValue();
|
||||
BigDecimal fwhm = new BigDecimal(vPeak.get(i).fwhm);
|
||||
fwhm = fwhm.setScale(3, RoundingMode.HALF_UP);
|
||||
peakInfo.fwhm = fwhm.doubleValue();
|
||||
BigDecimal area = new BigDecimal(vPeak.get(i).area);
|
||||
area = area.setScale(3, RoundingMode.HALF_UP);
|
||||
peakInfo.area = area.doubleValue();
|
||||
BigDecimal significance = new BigDecimal(vPeak.get(i).significance);
|
||||
significance = significance.setScale(3, RoundingMode.HALF_UP);
|
||||
peakInfo.significance = significance.doubleValue();
|
||||
peakInfo.comments = vPeak.get(i).comments;
|
||||
peakInfo.nuclides = vPeak.get(i).nuclides;
|
||||
result.add(peakInfo);
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
public void CalcNuclideMDA(PHDFile phd, NuclideLines lines, String nucName, List<Integer> vPeakIdx) {
|
||||
if(lines.halflife <= 0) {
|
||||
return;
|
||||
}
|
||||
// 过滤核素能量小于ECutAnalysis_Low的射线
|
||||
FilterNuclideLine(lines, phd.getUsedSetting().getECutAnalysis_Low());
|
||||
|
||||
// 获取需特殊处理的核素
|
||||
Map<String, Double> mapHalflife = new HashMap<>(); // 用其他核素半衰期计算活度/浓度的核素
|
||||
List<String> vNuclides = new LinkedList<>(); // 只识别不计算活度/浓度的核素
|
||||
ReadSpecialNuclides(mapHalflife, vNuclides);
|
||||
|
||||
double energyWidth = phd.getUsedSetting().getEnergyTolerance();
|
||||
List<Double> vEnergy = lines.vEnergy; // 该核素的所有γ射线能量
|
||||
double maxYield = 0;
|
||||
int mainPeakIdx = -1; // 记录核素主γ峰的索引下标
|
||||
|
||||
NuclideActMda ActMda = new NuclideActMda();
|
||||
ActMda.setHalflife(Objects.isNull(mapHalflife.get(nucName))?lines.halflife : mapHalflife.get(nucName));
|
||||
for (int i=0, j=0; i<vEnergy.size(); i++) {
|
||||
for(; j<vPeakIdx.size(); ++j) {
|
||||
double energy = phd.getVPeak().get(vPeakIdx.get(j)).energy;
|
||||
if(vEnergy.get(i) < energy - energyWidth) {
|
||||
break;
|
||||
} else if(vEnergy.get(i) <= energy + energyWidth) {
|
||||
ActMda.getVEnergy().add(vEnergy.get(i));
|
||||
ActMda.getVUncertE().add(lines.vUncertE.get(i));
|
||||
ActMda.getVYield().add(lines.vYield.get(i));
|
||||
ActMda.getVUncertY().add(lines.vUncertY.get(i));
|
||||
ActMda.getFullNames().add(lines.fullNames.get(i));
|
||||
ActMda.getVPeakIdx().add(vPeakIdx.get(j)+1);
|
||||
if(lines.key_flag == i) {
|
||||
ActMda.setKey_flag(ActMda.getVEnergy().size()-1);
|
||||
}
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
for(int i=0; i<ActMda.getVYield().size(); i++) {
|
||||
if(ActMda.getVYield().get(i) > maxYield) {
|
||||
maxYield = ActMda.getVYield().get(i);
|
||||
mainPeakIdx = ActMda.getVPeakIdx().get(i)-1;
|
||||
ActMda.setCalculateIdx(i);
|
||||
}
|
||||
}
|
||||
if(mainPeakIdx < 0) {
|
||||
return;
|
||||
}
|
||||
|
||||
// 如果该核素属特殊核素,则用“特殊核素配置文件”中指明的其他核素的半衰期
|
||||
double halflife = Objects.isNull(mapHalflife.get(nucName))?lines.halflife : mapHalflife.get(nucName);
|
||||
double lambda = Math.log(2.0) / halflife;
|
||||
|
||||
CalculateMDCs(phd, ActMda, mainPeakIdx, lambda, maxYield, 1.0);
|
||||
|
||||
ActMda.setBCalculateMDA(true);
|
||||
phd.getMapNucActMda().put(nucName, ActMda);
|
||||
}
|
||||
|
||||
public void ReCalcMdaMdc(PHDFile phd, String nuclide, int removePeakIdx) {
|
||||
NuclideActMda it = phd.getMapNucActMda().get(nuclide);
|
||||
if(Objects.isNull(it)) {
|
||||
return;
|
||||
}
|
||||
NuclideActMda nuc = it;
|
||||
// 如果该核素只与一个峰对应则直接删除该核素的 MDA 信息
|
||||
if(nuc.getVPeakIdx().size() == 1) {
|
||||
phd.getMapNucActMda().remove(nuclide);
|
||||
return;
|
||||
}
|
||||
// 否则删除该核素与该峰所有关联的射线信息
|
||||
boolean needReCalc = false;
|
||||
for(int i=nuc.getVPeakIdx().size()-1; i>=0; i--) {
|
||||
if(nuc.getVPeakIdx().get(i) == removePeakIdx) {
|
||||
nuc.getVEnergy().remove(i);
|
||||
nuc.getVUncertE().remove(i);
|
||||
nuc.getVYield().remove(i);
|
||||
nuc.getVUncertY().remove(i);
|
||||
nuc.getFullNames().remove(i);
|
||||
if(nuc.getCalculateIdx() == i && nuc.isBCalculateMDA()) {
|
||||
needReCalc = true;
|
||||
}
|
||||
}
|
||||
}
|
||||
nuc.getVPeakIdx().remove(removePeakIdx);
|
||||
// 如果该核素只与一个峰对应则直接删除该核素的 MDA 信息
|
||||
if(nuc.getVPeakIdx().size() < 1) {
|
||||
phd.getMapNucActMda().remove(nuclide);
|
||||
return;
|
||||
}
|
||||
|
||||
if(needReCalc) {
|
||||
double maxYield = 0;
|
||||
int mainPeakIdx = -1;
|
||||
for(int j=0; j<nuc.getVYield().size(); ++j) {
|
||||
if(nuc.getVYield().get(j) > maxYield) {
|
||||
nuc.setCalculateIdx(j);
|
||||
maxYield = nuc.getVYield().get(j);
|
||||
mainPeakIdx = nuc.getVPeakIdx().get(j)-1;
|
||||
}
|
||||
}
|
||||
if(mainPeakIdx < 0) {
|
||||
return;
|
||||
}
|
||||
CalculateMDCs(phd, nuc, mainPeakIdx, Math.log(2.0)/nuc.getHalflife(), maxYield, 1.0);
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -6,6 +6,8 @@ import org.jeecg.modules.base.enums.SystemType;
|
|||
import org.jeecg.modules.entity.vo.PHDFile;
|
||||
import org.springframework.stereotype.Component;
|
||||
|
||||
import java.math.BigDecimal;
|
||||
import java.math.RoundingMode;
|
||||
import java.text.ParseException;
|
||||
import java.util.HashMap;
|
||||
import java.util.LinkedList;
|
||||
|
@ -84,18 +86,20 @@ public class NameStandUtil {
|
|||
|
||||
public String GetSuffix(String dataType,String sysType,String Fulltype,String LT) {
|
||||
String rData = "";
|
||||
BigDecimal bd = new BigDecimal(LT);
|
||||
if(dataType.contains(DataType.SAMPLEPHD.getType())) {
|
||||
rData = "_S_%TYPE_%LT.PHD";
|
||||
}
|
||||
else if(dataType.contains(DataType.GASBKPHD.getType())){
|
||||
rData = "_G_%TYPE_%LT.PHD";
|
||||
bd = bd.setScale(1, RoundingMode.HALF_UP);
|
||||
rData = "_S_"+Fulltype+"_"+bd+".PHD";
|
||||
} else if(dataType.contains(DataType.GASBKPHD.getType())){
|
||||
bd = bd.setScale(1, RoundingMode.HALF_UP);
|
||||
rData = "_G_"+Fulltype+"_"+bd+".PHD";
|
||||
}else if(dataType.contains(DataType.DETBKPHD.getType())){
|
||||
rData = "_D_%TYPE_%LT.PHD";
|
||||
bd = bd.setScale(0, RoundingMode.HALF_UP);
|
||||
rData = "_D_"+Fulltype+"_"+bd+".PHD";
|
||||
}else if(dataType.contains(DataType.QCPHD.getType())){
|
||||
rData = "_Q_%TYPE_%LT.PHD";
|
||||
bd = bd.setScale(2, RoundingMode.HALF_UP);
|
||||
rData = "_Q_"+Fulltype+"_"+bd+".PHD";
|
||||
}
|
||||
rData.replace("%TYPE",Fulltype);
|
||||
rData.replace("%LT",LT);
|
||||
return rData;
|
||||
}
|
||||
|
||||
|
@ -134,12 +138,16 @@ public class NameStandUtil {
|
|||
|
||||
public String GetFileNameFromDateTime(String dateTimeFormat, String suffix){
|
||||
String rData = "";
|
||||
int pos = dateTimeFormat.indexOf('-');
|
||||
int pos = dateTimeFormat.indexOf("-");
|
||||
if(-1 != pos) {
|
||||
String dateTime = dateTimeFormat;
|
||||
dateTime = dateTime.substring(pos+1, pos+17);
|
||||
dateTime.replace(" ","-");
|
||||
String fileHeader = dateTime.substring(0, pos+1);
|
||||
if (pos+17>dateTime.length()){
|
||||
dateTime = dateTime.substring(pos+1);
|
||||
}else {
|
||||
dateTime = dateTime.substring(pos+1, pos+17);
|
||||
}
|
||||
dateTime = dateTime.replace(" ","-");
|
||||
String fileHeader = dateTimeFormat.substring(0, pos+1);
|
||||
String temp = DateTimeStandardToFileFormat(dateTime);
|
||||
rData = fileHeader+ temp + suffix;
|
||||
}
|
||||
|
@ -149,11 +157,11 @@ public class NameStandUtil {
|
|||
public String DateTimeStandardToFileFormat(String data) {
|
||||
String dateTime = "";
|
||||
try {
|
||||
if ( dateTime.indexOf("-") > 0 ){
|
||||
dateTime = DateUtils.formatDate(DateUtils.parseDate(data,"yyyy/MM/dd-hh:mm"), "yyyyMMdd_hhmm");
|
||||
} else if( dateTime.indexOf(" ") > 0 ) {
|
||||
dateTime = DateUtils.formatDate(DateUtils.parseDate(data,"yyyy/MM/dd hh:mm"), "yyyyMMdd_hhmm");
|
||||
} else if( dateTime.indexOf("-")<0 && dateTime.indexOf(" ") < 0) {
|
||||
if ( data.indexOf("-") > 0 ){
|
||||
dateTime = DateUtils.formatDate(DateUtils.parseDate(data,"yyyy/MM/dd-HH:mm"), "yyyyMMdd_HHmm");
|
||||
} else if( data.indexOf(" ") > 0 ) {
|
||||
dateTime = DateUtils.formatDate(DateUtils.parseDate(data,"yyyy/MM/dd HH:mm"), "yyyyMMdd_HHmm");
|
||||
} else if( data.indexOf("-")<0 && data.indexOf(" ") < 0) {
|
||||
dateTime = DateUtils.formatDate(DateUtils.parseDate(data,"yyyy/MM/dd"), "yyyyMMdd");
|
||||
}
|
||||
} catch (ParseException e) {
|
||||
|
|
|
@ -8,6 +8,7 @@ import org.apache.commons.net.ftp.FTP;
|
|||
import org.apache.commons.net.ftp.FTPClient;
|
||||
import org.apache.commons.net.ftp.FTPFile;
|
||||
import org.jeecg.common.properties.SpectrumPathProperties;
|
||||
import org.jeecg.modules.base.enums.DataTypeAbbr;
|
||||
import org.jeecg.modules.entity.GardsXeResultsSpectrum;
|
||||
import org.jeecg.modules.entity.vo.*;
|
||||
import org.jeecg.modules.native_jni.EnergySpectrumHandler;
|
||||
|
@ -30,6 +31,8 @@ public class PHDFileUtil {
|
|||
@Autowired
|
||||
private FTPUtil ftpUtil;
|
||||
@Autowired
|
||||
private NameStandUtil nameStandUtil;
|
||||
@Autowired
|
||||
private SpectrumPathProperties spectrumPathProperties;
|
||||
|
||||
public Map<String, Object> getSourceData(String filePath, List<GardsXeResultsSpectrum> xeDataList, Integer sampleId, String status){
|
||||
|
@ -49,12 +52,12 @@ public class PHDFileUtil {
|
|||
//Collection Start
|
||||
Date CollectionStart = null;
|
||||
if ( StringUtils.isNotBlank(struct.collection_start_date) && StringUtils.isNotBlank(struct.collection_start_time) ){
|
||||
CollectionStart = DateUtils.parseDate(struct.collection_start_date.replace(StringPool.SLASH,StringPool.DASH) + StringPool.SPACE + struct.collection_start_time.substring(0, struct.collection_start_time.indexOf(StringPool.DOT)), "yyyy-MM-dd HH:mm:ss");
|
||||
CollectionStart = DateUtils.parseDate(struct.collection_start_date + StringPool.SPACE + struct.collection_start_time);
|
||||
}
|
||||
//Collection Stop
|
||||
Date CollectionStop = null;
|
||||
if ( StringUtils.isNotBlank(struct.collection_stop_date) && StringUtils.isNotBlank(struct.collection_stop_time) ){
|
||||
CollectionStop = DateUtils.parseDate(struct.collection_stop_date.replace(StringPool.SLASH,StringPool.DASH) + StringPool.SPACE + struct.collection_stop_time.substring(0, struct.collection_stop_time.indexOf(StringPool.DOT)), "yyyy-MM-dd HH:mm:ss");
|
||||
CollectionStop = DateUtils.parseDate(struct.collection_stop_date + StringPool.SPACE + struct.collection_stop_time);
|
||||
}
|
||||
//Collection Time
|
||||
String CollectionTime = "";
|
||||
|
@ -64,7 +67,7 @@ public class PHDFileUtil {
|
|||
//Acquisition Start
|
||||
Date AcquisitionStart = null;
|
||||
if ( StringUtils.isNotBlank(struct.collection_start_date) && StringUtils.isNotBlank(struct.collection_start_time) ){
|
||||
AcquisitionStart = DateUtils.parseDate(struct.acquisition_start_date.replace(StringPool.SLASH,StringPool.DASH) + StringPool.SPACE + struct.acquisition_start_time);
|
||||
AcquisitionStart = DateUtils.parseDate(struct.acquisition_start_date + StringPool.SPACE + struct.acquisition_start_time);
|
||||
}
|
||||
//Acq Real Time
|
||||
double AcquisitionRealTime = struct.acquisition_real_time;
|
||||
|
@ -457,19 +460,74 @@ public class PHDFileUtil {
|
|||
|
||||
}
|
||||
|
||||
public Map<String, String> getFileData(String filePath){
|
||||
public Map<String, String> getFileData(String filePath, String sampleFileName){
|
||||
Map<String, String> map = new HashMap<>();
|
||||
//加载dll工具库
|
||||
System.loadLibrary("ReadPHDFile");
|
||||
EnergySpectrumStruct struct = EnergySpectrumHandler.getSourceData(filePath);
|
||||
//连接ftp 获取ftp文件数据
|
||||
FTPClient ftpClient = ftpUtil.LoginFTP();
|
||||
if (Objects.isNull(ftpClient)){
|
||||
return map;
|
||||
}
|
||||
InputStream inputStream = null;
|
||||
try {
|
||||
//切换被动模式
|
||||
ftpClient.enterLocalPassiveMode();
|
||||
ftpClient.setFileType(FTPClient.BINARY_FILE_TYPE);
|
||||
// 设置编码,当文件中存在中文且上传后文件乱码时可使用此配置项
|
||||
ftpClient.setControlEncoding("UTF-8");
|
||||
ftpClient.setFileTransferMode(FTPClient.STREAM_TRANSFER_MODE);
|
||||
//切换工作路径
|
||||
ftpClient.changeWorkingDirectory(filePath);
|
||||
//获取路径下所有的ftp文件信息
|
||||
List<FTPFile> ftpFiles = Arrays.asList(ftpClient.listFiles());
|
||||
if (CollectionUtils.isNotEmpty(ftpFiles)) {
|
||||
//获取sampleFileName
|
||||
ftpFiles = ftpFiles.stream().filter(item -> item.getName().equals(sampleFileName)).collect(Collectors.toList());
|
||||
if (CollectionUtils.isNotEmpty(ftpFiles)) {
|
||||
FTPFile sampleFile = ftpFiles.get(0);
|
||||
//解析sampleFile
|
||||
inputStream = ftpClient.retrieveFileStream(sampleFile.getName());
|
||||
//声明一个临时文件
|
||||
File file = File.createTempFile("tmp", null);
|
||||
//将ftp文件的输入流复制给临时文件
|
||||
FileUtils.copyInputStreamToFile(inputStream, file);
|
||||
//加载sampleFile内容
|
||||
EnergySpectrumStruct struct = EnergySpectrumHandler.getSourceData(file.getAbsolutePath());
|
||||
//获取所需要的数据
|
||||
String dataType = struct.data_type;
|
||||
String systemType = struct.system_type;
|
||||
String spectrumQuantity = struct.spectrum_quantity;
|
||||
double acquisitionLiveTime = struct.acquisition_live_time;
|
||||
String measurementId = struct.measurement_id;
|
||||
String gasBkMeasurementId = struct.gas_bk_measurement_id;
|
||||
String detectorBkMeasurementId = struct.detector_bk_measurement_id;
|
||||
//格式化文件名称
|
||||
String fileSuffix = nameStandUtil.GetSuffix(dataType, systemType, spectrumQuantity, String.valueOf(acquisitionLiveTime));
|
||||
String measurementName = nameStandUtil.GetFileNameFromDateTime(measurementId, fileSuffix);
|
||||
String gasFileName = nameStandUtil.GetFileNameFromDateTime(gasBkMeasurementId, "_G.PHD");
|
||||
String detaFileName = nameStandUtil.GetFileNameFromDateTime(detectorBkMeasurementId, "_D.PHD");
|
||||
map.put("measurementName", measurementName);
|
||||
map.put("gasFileName", gasFileName);
|
||||
map.put("detaFileName", detaFileName);
|
||||
map.put("sampleSystemType", systemType);
|
||||
}
|
||||
}
|
||||
} catch (IOException e) {
|
||||
throw new RuntimeException(e);
|
||||
} finally {
|
||||
try {
|
||||
if (ftpClient!=null){
|
||||
ftpClient.disconnect();
|
||||
}
|
||||
if (inputStream!=null){
|
||||
inputStream.close();
|
||||
}
|
||||
|
||||
String dataType = struct.data_type;
|
||||
String systemType = struct.system_type;
|
||||
String spectrumQuantity = struct.spectrum_quantity;
|
||||
double acquisitionLiveTime = struct.acquisition_live_time;
|
||||
String measurementId = struct.measurement_id;
|
||||
String gasBkMeasurementId = struct.gas_bk_measurement_id;
|
||||
String detectorBkMeasurementId = struct.detector_bk_measurement_id;
|
||||
} catch (IOException e) {
|
||||
throw new RuntimeException(e);
|
||||
}
|
||||
}
|
||||
return map;
|
||||
}
|
||||
|
||||
|
@ -615,4 +673,72 @@ public class PHDFileUtil {
|
|||
return bgBoundary;
|
||||
}
|
||||
|
||||
public List<String> FileNameByStandardForm(String filePath, String sampleFileName){
|
||||
//用于最后的结果
|
||||
List<String> fileNames = new LinkedList<>();
|
||||
String station = sampleFileName.substring(0, 9);
|
||||
//连接ftp
|
||||
FTPClient ftpClient = ftpUtil.LoginFTP();
|
||||
if (Objects.isNull(ftpClient)){
|
||||
return fileNames;
|
||||
}
|
||||
try {
|
||||
//被动模式
|
||||
ftpClient.enterLocalPassiveMode();
|
||||
//设置文件类型--二进制文件
|
||||
ftpClient.setFileType(FTP.BINARY_FILE_TYPE);
|
||||
//
|
||||
ftpClient.setControlEncoding("UTF-8");
|
||||
ftpClient.setFileTransferMode(FTPClient.STREAM_TRANSFER_MODE);
|
||||
//切换文件路径
|
||||
ftpClient.changeWorkingDirectory(filePath);
|
||||
//获取路径下所有文件信息
|
||||
List<FTPFile> ftpFiles = Arrays.asList(ftpClient.listFiles());
|
||||
if (CollectionUtils.isNotEmpty(ftpFiles)){
|
||||
fileNames = ftpFiles.stream().filter(item-> item.getName().contains(station)).map(FTPFile::getName).collect(Collectors.toList());
|
||||
}
|
||||
} catch (IOException e) {
|
||||
throw new RuntimeException(e);
|
||||
} finally {
|
||||
try {
|
||||
ftpClient.disconnect();
|
||||
} catch (IOException e) {
|
||||
throw new RuntimeException(e);
|
||||
}
|
||||
}
|
||||
return fileNames;
|
||||
}
|
||||
|
||||
public String GetMatchFile(String tmpFileName, List<String> fileNames, String type) {
|
||||
String matchFileName = "";
|
||||
fileNames = fileNames.stream().filter(item-> item.contains(type)).sorted().collect(Collectors.toList());
|
||||
//用于比较的字符串
|
||||
String fileCompare = tmpFileName.substring(0,23);
|
||||
for (String fileName:fileNames) {
|
||||
String compare = fileName.substring(0, 23);
|
||||
if (compare.equals(fileCompare)) {
|
||||
matchFileName = fileName;
|
||||
}
|
||||
}
|
||||
return matchFileName;
|
||||
}
|
||||
|
||||
public String GetQCPHD(String sampleName, List<String> fileNames) {
|
||||
//过滤出所有包含Q的文件
|
||||
fileNames = fileNames.stream().filter(item-> item.contains(DataTypeAbbr.QCPHD.getType())).sorted().collect(Collectors.toList());
|
||||
Long dateTime = Long.valueOf(sampleName.substring(10, 23).replace("_", ""));
|
||||
String rData = "";
|
||||
for(int pos=0;pos<fileNames.size();pos++) {
|
||||
Long qcphdDateTime = Long.valueOf(fileNames.get(pos).substring(10, 23).replace("_", ""));
|
||||
//如果qc文件的日期 大于 sample文件的不要
|
||||
if(qcphdDateTime>dateTime) {
|
||||
break;
|
||||
} else {
|
||||
rData = fileNames.get(pos);
|
||||
}
|
||||
}
|
||||
return rData;
|
||||
}
|
||||
|
||||
|
||||
}
|
||||
|
|
|
@ -1,6 +1,8 @@
|
|||
package org.jeecg.modules.controller;
|
||||
|
||||
import cn.hutool.core.collection.CollUtil;
|
||||
import io.swagger.annotations.Api;
|
||||
import io.swagger.annotations.ApiOperation;
|
||||
import org.jeecg.common.api.vo.Result;
|
||||
import org.jeecg.modules.entity.vo.*;
|
||||
import org.jeecg.modules.service.IGammaService;
|
||||
|
@ -13,83 +15,141 @@ import java.util.List;
|
|||
|
||||
@RestController
|
||||
@RequestMapping("gamma")
|
||||
@Api(value = "gamma人工分析",tags = "gamma人工分析")
|
||||
public class GammaController {
|
||||
|
||||
@Autowired
|
||||
private IGammaService gammaService;
|
||||
|
||||
@GetMapping("gammaByDB")
|
||||
@ApiOperation(value = "gamma页面loadFromDB加载数据", notes = "gamma页面loadFromDB加载数据")
|
||||
public Result gammaByDB(@RequestParam Integer sampleId, @RequestParam String dbName){
|
||||
return gammaService.gammaByDB(dbName, sampleId);
|
||||
}
|
||||
|
||||
@GetMapping("gammaByFile")
|
||||
public Result gammaByFile(String fileName, HttpServletRequest request){
|
||||
@ApiOperation(value = "gamma页面loadFromFile加载数据", notes = "gamma页面loadFromFile加载数据")
|
||||
public Result gammaByFile(String fileName, HttpServletRequest request) {
|
||||
return gammaService.gammaByFile(fileName, request);
|
||||
}
|
||||
|
||||
@GetMapping("configure")
|
||||
public Result configure(@RequestParam Integer sampleId, @RequestParam String fileName){
|
||||
@ApiOperation(value = "analyze菜单下configure页面数据", notes = "analyze菜单下configure页面数据")
|
||||
public Result configure(@RequestParam Integer sampleId, @RequestParam String fileName) {
|
||||
return gammaService.configure(sampleId, fileName);
|
||||
}
|
||||
|
||||
@PostMapping("Reprocessing")
|
||||
public Result Reprocessing(@RequestBody ConfigureData configureData, HttpServletRequest request){
|
||||
@ApiOperation(value = "analyze菜单下Reprocessing页面数据", notes = "analyze菜单下Reprocessing页面数据")
|
||||
public Result Reprocessing(@RequestBody ConfigureData configureData, HttpServletRequest request) {
|
||||
return gammaService.Reprocessing(configureData, request);
|
||||
}
|
||||
|
||||
@GetMapping("InteractiveTool")
|
||||
public Result InteractiveTool(@RequestParam Integer sampleId, @RequestParam String fileName){
|
||||
@ApiOperation(value = "analyze菜单下InteractiveTool页面数据", notes = "analyze菜单下InteractiveTool页面数据")
|
||||
public Result InteractiveTool(@RequestParam Integer sampleId, @RequestParam String fileName) {
|
||||
return gammaService.InteractiveTool(sampleId, fileName);
|
||||
}
|
||||
|
||||
@GetMapping("insertPeak")
|
||||
public Result insertPeak(Integer sampleId, String fileName, Integer curChan){
|
||||
@ApiOperation(value = "InteractiveTool页面Insert按钮页面", notes = "InteractiveTool页面Insert按钮页面")
|
||||
public Result insertPeak(Integer sampleId, String fileName, Integer curChan) {
|
||||
return gammaService.insertPeak(sampleId, fileName, curChan);
|
||||
}
|
||||
|
||||
@PostMapping("acceptResults")
|
||||
@ApiOperation(value = "InteractiveTool页面Insert页面save", notes = "InteractiveTool页面Insert页面save")
|
||||
public Result acceptResults(@RequestBody AcceptInfo acceptInfo) {
|
||||
return gammaService.acceptResults(acceptInfo.getFileName(), acceptInfo.isAccept(), acceptInfo.getOldPeak());
|
||||
}
|
||||
|
||||
@GetMapping("deletePeak")
|
||||
@ApiOperation(value = "InteractiveTool页面delete按钮", notes = "InteractiveTool页面delete按钮")
|
||||
public Result deletePeak(String fileName, int curRow) {
|
||||
return gammaService.deletePeak(fileName, curRow);
|
||||
}
|
||||
|
||||
@GetMapping("getSelPosNuclide")
|
||||
public Result getSelPosNuclide(Integer sampleId, String fileName, int channel, String nuclides, HttpServletRequest request){
|
||||
@ApiOperation(value = "InteractiveTool页面选择channel加载对应核素信息接口", notes = "InteractiveTool页面选择channel加载对应核素信息接口")
|
||||
public Result getSelPosNuclide(Integer sampleId, String fileName, int channel, String nuclides, HttpServletRequest request) {
|
||||
return gammaService.getSelPosNuclide(sampleId, fileName, channel, nuclides, request);
|
||||
}
|
||||
|
||||
@PostMapping("addNuclide")
|
||||
public Result addNuclide(Integer curRow, String nuclideName, Integer sampleId){
|
||||
return null;
|
||||
@ApiOperation(value = "InteractiveTool页面增加核素信息接口", notes = "InteractiveTool页面增加核素信息接口")
|
||||
public Result addNuclide(@RequestBody NuclideInfo nuclideInfo, HttpServletRequest request) {
|
||||
return gammaService.addNuclide(nuclideInfo.getCurRow(), nuclideInfo.getNuclideName(), nuclideInfo.getFileName(), nuclideInfo.getList_identify(), request);
|
||||
}
|
||||
|
||||
@PostMapping("deleteNuclide")
|
||||
@ApiOperation(value = "InteractiveTool页面删除核素信息接口", notes = "InteractiveTool页面删除核素信息接口")
|
||||
public Result deleteNuclide(@RequestBody NuclideInfo nuclideInfo) {
|
||||
return gammaService.deleteNuclide(nuclideInfo.getCurRow(), nuclideInfo.getNuclideName(), nuclideInfo.getFileName(), nuclideInfo.getList_identify());
|
||||
}
|
||||
|
||||
@GetMapping("viewPeakComment")
|
||||
@ApiOperation(value = "InteractiveTool页面查看峰对应备注信息接口", notes = "InteractiveTool页面查看峰对应备注信息接口")
|
||||
public Result viewPeakComment(String fileName, int curRow) {
|
||||
return gammaService.viewPeakComment(fileName, curRow);
|
||||
}
|
||||
|
||||
@PostMapping("addPeakComment")
|
||||
@ApiOperation(value = "InteractiveTool页面add峰对应备注信息接口", notes = "InteractiveTool页面add峰对应备注信息接口")
|
||||
public Result addPeakComment(@RequestBody CommentsInfo commentsInfo) {
|
||||
return gammaService.addPeakComment(commentsInfo.getFileName(), commentsInfo.getCurRow(), commentsInfo.getComments());
|
||||
}
|
||||
|
||||
@GetMapping("viewGenralComment")
|
||||
@ApiOperation(value = "InteractiveTool页面查看备注信息接口", notes = "InteractiveTool页面查看备注信息接口")
|
||||
public Result viewGenralComment(String fileName) {
|
||||
return gammaService.viewGenralComment(fileName);
|
||||
}
|
||||
|
||||
@PostMapping("addGeneralComment")
|
||||
@ApiOperation(value = "InteractiveTool页面add备注信息接口", notes = "InteractiveTool页面add备注信息接口")
|
||||
public Result addGeneralComment(@RequestBody CommentsInfo commentsInfo) {
|
||||
return gammaService.addGeneralComment(commentsInfo.getFileName(), commentsInfo.getComments());
|
||||
}
|
||||
|
||||
@GetMapping("nuclideReview")
|
||||
public Result nuclideReview(Integer sampleId, String fileName, Double channel, HttpServletRequest request){
|
||||
@ApiOperation(value = "InteractiveTool页面选择Nuclide Review信息接口", notes = "InteractiveTool页面选择Nuclide Review信息接口")
|
||||
public Result nuclideReview(Integer sampleId, String fileName, Double channel, HttpServletRequest request) {
|
||||
return gammaService.nuclideReview(sampleId, fileName, channel, request);
|
||||
}
|
||||
|
||||
@GetMapping("changeNuclide")
|
||||
public Result changeNuclide(Integer sampleId, String fileName, String nuclideName){
|
||||
@ApiOperation(value = "Nuclide Review页面切换nuclide数据", notes = "Nuclide Review页面切换nuclide数据")
|
||||
public Result changeNuclide(Integer sampleId, String fileName, String nuclideName) {
|
||||
return gammaService.changeNuclide(sampleId, fileName, nuclideName);
|
||||
}
|
||||
|
||||
@GetMapping("searchNuclide")
|
||||
public Result searchNuclide(Integer sampleId, String fileName, Double energy, Double tolerance, HttpServletRequest request){
|
||||
@ApiOperation(value = "Nuclide Review页面查询nuclide数据", notes = "Nuclide Review页面查询nuclide数据")
|
||||
public Result searchNuclide(Integer sampleId, String fileName, Double energy, Double tolerance, HttpServletRequest request) {
|
||||
return gammaService.searchNuclide(sampleId, fileName, energy, tolerance, request);
|
||||
}
|
||||
|
||||
@GetMapping("ZeroTime")
|
||||
public Result ZeroTime(){
|
||||
@ApiOperation(value = "Zero Time页面数据", notes = "Zero Time页面数据")
|
||||
public Result ZeroTime() {
|
||||
return gammaService.ZeroTime();
|
||||
}
|
||||
|
||||
@GetMapping("ZeroTimeAnalyse")
|
||||
public Result ZeroTimeAnalyse(String nuclide1, String nuclide2, Double product1, Double product2, String target, String energyTFH, String date, String time){
|
||||
@ApiOperation(value = "Zero Time工具分析时间按钮", notes = "Zero Time工具分析时间按钮")
|
||||
public Result ZeroTimeAnalyse(String nuclide1, String nuclide2, Double product1, Double product2, String target, String energyTFH, String date, String time) {
|
||||
return gammaService.ZeroTimeAnalyse(nuclide1, nuclide2, product1, product2, target, energyTFH, date, time);
|
||||
}
|
||||
|
||||
@GetMapping("Korsum")
|
||||
public Result Korsum(){
|
||||
@ApiOperation(value = "Korsum页面数据", notes = "Korsum页面数据")
|
||||
public Result Korsum() {
|
||||
return gammaService.Korsum();
|
||||
}
|
||||
|
||||
@PostMapping("KorSumInput")
|
||||
public Result KorSumInput(@RequestBody CoeffData coeffData){
|
||||
@ApiOperation(value = "Korsum页面点击Input计算数据", notes = "Korsum页面点击Input计算数据")
|
||||
public Result KorSumInput(@RequestBody CoeffData coeffData) {
|
||||
Result result = new Result();
|
||||
List<InputData> input = gammaService.KorSumInput(coeffData);
|
||||
result.setSuccess(true);
|
||||
|
@ -98,108 +158,132 @@ public class GammaController {
|
|||
}
|
||||
|
||||
@GetMapping("energyCalibration")
|
||||
public Result energyCalibration(@RequestParam Integer sampleId, String fileName){
|
||||
@ApiOperation(value = "查看Energy Calibration数据", notes = "查看Energy Calibration数据")
|
||||
public Result energyCalibration(@RequestParam Integer sampleId, String fileName) {
|
||||
return gammaService.energyCalibration(sampleId, fileName);
|
||||
}
|
||||
|
||||
@PostMapping("changeDataEnergy")
|
||||
public Result changeDataEnergy(@RequestBody ChangeData changeData){
|
||||
@ApiOperation(value = "修改Energy Calibration数据", notes = "修改Energy Calibration数据")
|
||||
public Result changeDataEnergy(@RequestBody ChangeData changeData) {
|
||||
return gammaService.changeDataEnergy(changeData.getM_vCurCentroid(), changeData.getM_vCurEnergy(), changeData.getM_vCurUncert(), changeData.getM_curParam(), changeData.getSampleId(), changeData.getFileName());
|
||||
}
|
||||
|
||||
@PostMapping("applyDataEnergy")
|
||||
public Result applyDataEnergy(@RequestBody ChangeData changeData){
|
||||
@ApiOperation(value = "Apply Energy Calibration数据", notes = "Apply Energy Calibration数据")
|
||||
public Result applyDataEnergy(@RequestBody ChangeData changeData) {
|
||||
return gammaService.applyDataEnergy(changeData.getM_vCurCentroid(), changeData.getM_vCurEnergy(), changeData.getM_vCurUncert(), changeData.getM_curParam(), changeData.getCurCalName(), changeData.getSampleId(), changeData.getFileName());
|
||||
}
|
||||
|
||||
@GetMapping("resolutionCalibration")
|
||||
public Result resolutionCalibration(@RequestParam Integer sampleId, String fileName){
|
||||
@ApiOperation(value = "查看Resolution Calibration数据", notes = "查看Resolution Calibration数据")
|
||||
public Result resolutionCalibration(@RequestParam Integer sampleId, String fileName) {
|
||||
return gammaService.resolutionCalibration(sampleId, fileName);
|
||||
}
|
||||
|
||||
@PostMapping("changeDataResolution")
|
||||
public Result changeDataResolution(@RequestBody ChangeData changeData){
|
||||
@ApiOperation(value = "修改Resolution Calibration数据", notes = "修改Resolution Calibration数据")
|
||||
public Result changeDataResolution(@RequestBody ChangeData changeData) {
|
||||
return gammaService.changeDataResolution(changeData.getM_vCurReso(), changeData.getM_vCurEnergy(), changeData.getM_vCurUncert(), changeData.getM_curParam(), changeData.getSampleId(), changeData.getFileName());
|
||||
}
|
||||
|
||||
@PostMapping("applyDataResolution")
|
||||
public Result applyDataResolution(@RequestBody ChangeData changeData){
|
||||
@ApiOperation(value = "Apply Resolution Calibration数据", notes = "Apply Resolution Calibration数据")
|
||||
public Result applyDataResolution(@RequestBody ChangeData changeData) {
|
||||
return gammaService.applyDataResolution(changeData.getM_vCurReso(), changeData.getM_vCurEnergy(), changeData.getM_vCurUncert(), changeData.getM_curParam(), changeData.getCurCalName(), changeData.getSampleId(), changeData.getFileName());
|
||||
}
|
||||
|
||||
@GetMapping("EfficiencyCalibration")
|
||||
public Result EfficiencyCalibration(@RequestParam Integer sampleId, String fileName){
|
||||
@ApiOperation(value = "查看Efficiency Calibration数据", notes = "查看Efficiency Calibration数据")
|
||||
public Result EfficiencyCalibration(@RequestParam Integer sampleId, String fileName) {
|
||||
return gammaService.EfficiencyCalibration(sampleId, fileName);
|
||||
}
|
||||
|
||||
@PostMapping("changeDataEfficiency")
|
||||
public Result changeDataEfficiency(@RequestBody ChangeData changeData){
|
||||
@ApiOperation(value = "修改Efficiency Calibration数据", notes = "修改Efficiency Calibration数据")
|
||||
public Result changeDataEfficiency(@RequestBody ChangeData changeData) {
|
||||
return gammaService.changeDataEfficiency(changeData.getM_vCurEffi(), changeData.getM_vCurEnergy(), changeData.getM_vCurUncert(), changeData.getM_curParam(), changeData.getFuncId(), changeData.getSampleId(), changeData.getFileName());
|
||||
}
|
||||
|
||||
@PostMapping("applyDataEfficiency")
|
||||
public Result applyDataEfficiency(@RequestBody ChangeData changeData){
|
||||
return gammaService.applyDataEfficiency(changeData.getM_vCurCentroid(), changeData.getM_vCurEnergy(), changeData.getM_vCurUncert(), changeData.getM_curParam(), changeData.getCurCalName(), changeData.getSampleId(), changeData.getFileName());
|
||||
@ApiOperation(value = "Apply Efficiency Calibration数据", notes = "Apply Efficiency Calibration数据")
|
||||
public Result applyDataEfficiency(@RequestBody ChangeData changeData) {
|
||||
return gammaService.applyDataEfficiency(changeData.getM_vCurEffi(), changeData.getM_vCurEnergy(), changeData.getM_vCurUncert(), changeData.getM_curParam(), changeData.getCurCalName(), changeData.getSampleId(), changeData.getFileName());
|
||||
}
|
||||
|
||||
@GetMapping("NuclideLibrary")
|
||||
public Result NuclideLibrary(Integer sampleId, String fileName, String editEnergy, double err, String libraryName, String nuclideName, HttpServletRequest request){
|
||||
@ApiOperation(value = "查看Nuclide Library页面数据", notes = "查看Nuclide Library页面数据")
|
||||
public Result NuclideLibrary(Integer sampleId, String fileName, String editEnergy, double err, String libraryName, String nuclideName, HttpServletRequest request) {
|
||||
return gammaService.NuclideLibrary(sampleId, fileName, editEnergy, err, libraryName, nuclideName, request);
|
||||
}
|
||||
|
||||
@GetMapping("configUserLibrary")
|
||||
public Result configUserLibrary(Integer sampleId, String fileName, HttpServletRequest request){
|
||||
@ApiOperation(value = "查看Config Nuclide Library页面数据", notes = "查看Config Nuclide Library页面数据")
|
||||
public Result configUserLibrary(Integer sampleId, String fileName, HttpServletRequest request) {
|
||||
return gammaService.configUserLibrary(sampleId, fileName, request);
|
||||
}
|
||||
|
||||
@PostMapping("saveUserLibrary")
|
||||
@ApiOperation(value = "Config User Library页面save按钮", notes = "Config User Library页面save按钮")
|
||||
public Result saveUserLibrary(@RequestBody UserLibraryInfo userLibraryInfo, HttpServletRequest request) {
|
||||
return gammaService.saveUserLibrary(userLibraryInfo.getUserLibraryName(), userLibraryInfo.getFileName(), request);
|
||||
}
|
||||
|
||||
@GetMapping("viewComment")
|
||||
public Result viewComment(@RequestParam Integer sampleId, String fileName){
|
||||
@ApiOperation(value = "查看Comment页面数据", notes = "查看Comment页面数据")
|
||||
public Result viewComment(@RequestParam Integer sampleId, String fileName) {
|
||||
return gammaService.viewComment(sampleId, fileName);
|
||||
}
|
||||
|
||||
@GetMapping("peakInformation")
|
||||
public Result peakInformation(@RequestParam Integer sampleId, String fileName){
|
||||
@ApiOperation(value = "查看Peak Information页面数据", notes = "查看Peak Information页面数据")
|
||||
public Result peakInformation(@RequestParam Integer sampleId, String fileName) {
|
||||
return gammaService.peakInformation(sampleId, fileName);
|
||||
}
|
||||
|
||||
@GetMapping("viewARR")
|
||||
public void viewARR(Integer sampleId, HttpServletResponse response){
|
||||
@ApiOperation(value = "查看ARR页面数据", notes = "查看ARR页面数据")
|
||||
public void viewARR(Integer sampleId, HttpServletResponse response) {
|
||||
gammaService.viewARR(sampleId, response);
|
||||
}
|
||||
|
||||
@GetMapping("viewRRR")
|
||||
public Result viewRRR(Integer sampleId, String fileName){
|
||||
@ApiOperation(value = "查看RRR页面数据", notes = "查看RRR页面数据")
|
||||
public Result viewRRR(Integer sampleId, String fileName) {
|
||||
return gammaService.viewRRR(sampleId, fileName);
|
||||
}
|
||||
|
||||
@GetMapping("radionuclideActivity")
|
||||
public Result radionuclideActivity(Integer sampleId, String fileName){
|
||||
@ApiOperation(value = "查看Radionuclide Activity页面数据", notes = "查看Radionuclide Activity页面数据")
|
||||
public Result radionuclideActivity(Integer sampleId, String fileName) {
|
||||
return gammaService.radionuclideActivity(sampleId, fileName);
|
||||
}
|
||||
|
||||
@GetMapping("Spectrum")
|
||||
public Result Spectrum(Integer sampleId, String fileName){
|
||||
@ApiOperation(value = "查看Spectrum页面数据", notes = "查看Spectrum页面数据")
|
||||
public Result Spectrum(Integer sampleId, String fileName) {
|
||||
return gammaService.Spectrum(sampleId, fileName);
|
||||
}
|
||||
|
||||
@GetMapping("sampleInformation")
|
||||
public Result sampleInformation(Integer sampleId, String fileName){
|
||||
@ApiOperation(value = "查看Sample Information页面数据", notes = "查看Sample Information页面数据")
|
||||
public Result sampleInformation(Integer sampleId, String fileName) {
|
||||
return gammaService.sampleInformation(sampleId, fileName);
|
||||
}
|
||||
|
||||
@GetMapping("exportSampleInformation")
|
||||
public void exportSampleInformation(Integer sampleId, String fileName,
|
||||
HttpServletResponse response){
|
||||
public void exportSampleInformation(Integer sampleId, String fileName, HttpServletResponse response){
|
||||
gammaService.exportSampleInformation(sampleId, fileName, response);
|
||||
}
|
||||
|
||||
@GetMapping("viewQCResult")
|
||||
public Result<List<TableQCResult>> viewQCResult(Integer sampleId, String fileName){
|
||||
@ApiOperation(value = "查看QC Result页面数据", notes = "查看QC Result页面数据")
|
||||
public Result<List<TableQCResult>> viewQCResult(Integer sampleId, String fileName) {
|
||||
return gammaService.viewQCResult(sampleId, fileName);
|
||||
}
|
||||
|
||||
@GetMapping("exportQCResult")
|
||||
public void exportQCResult(Integer sampleId, String fileName,HttpServletResponse response){
|
||||
public void exportQCResult(Integer sampleId, String fileName, HttpServletResponse response){
|
||||
Result<List<TableQCResult>> result = gammaService.viewQCResult(sampleId, fileName);
|
||||
List<TableQCResult> tableQCResults = result.getResult();
|
||||
if (CollUtil.isEmpty(tableQCResults))return;
|
||||
|
@ -207,22 +291,26 @@ public class GammaController {
|
|||
}
|
||||
|
||||
@GetMapping("viewRLR")
|
||||
public Result viewRLR(Integer sampleId, String fileName){
|
||||
@ApiOperation(value = "查看RLR页面数据", notes = "查看RLR页面数据")
|
||||
public Result viewRLR(Integer sampleId, String fileName) {
|
||||
return gammaService.viewRLR(sampleId, fileName);
|
||||
}
|
||||
|
||||
@GetMapping("viewAutomaticAnalysisLog")
|
||||
public void viewAutomaticAnalysisLog(Integer sampleId, HttpServletResponse response){
|
||||
@ApiOperation(value = "查看Automatic Analysis Log页面数据", notes = "查看Automatic Analysis Log页面数据")
|
||||
public void viewAutomaticAnalysisLog(Integer sampleId, HttpServletResponse response) {
|
||||
gammaService.viewAutomaticAnalysisLog(sampleId, response);
|
||||
}
|
||||
|
||||
@GetMapping("viewGammaviewerLog")
|
||||
public Result viewGammaviewerLog(Integer sampleId, String fileName){
|
||||
@GetMapping("viewGammaViewerLog")
|
||||
@ApiOperation(value = "查看Gamma Viewer Log页面数据", notes = "查看Gamma Viewer Log页面数据")
|
||||
public Result viewGammaviewerLog(Integer sampleId, String fileName) {
|
||||
return gammaService.viewGammaviewerLog(sampleId, fileName);
|
||||
}
|
||||
|
||||
@GetMapping("saveToDB")
|
||||
public Result saveToDB(String fileName, String userName){
|
||||
@ApiOperation(value = "Save To DB按钮", notes = "Save To DB按钮")
|
||||
public Result saveToDB(String fileName, String userName) {
|
||||
return gammaService.saveToDB(fileName, userName);
|
||||
}
|
||||
|
||||
|
|
|
@ -27,7 +27,7 @@ public class SpectrumAnalysesController {
|
|||
|
||||
@GetMapping("getDBSearchList")
|
||||
@ApiOperation(value = "查询查询条件数据接口", notes = "查询查询条件数据接口")
|
||||
public Result getDBSearchList(HttpServletRequest request, boolean AllUsers, String dbName, String[] menuTypes){
|
||||
public Result getDBSearchList(HttpServletRequest request, boolean AllUsers, String dbName, String[] menuTypes) {
|
||||
return spectrumAnalysisService.getDBSearchList(request, AllUsers, dbName, menuTypes);
|
||||
}
|
||||
|
||||
|
@ -39,66 +39,67 @@ public class SpectrumAnalysesController {
|
|||
}
|
||||
|
||||
@GetMapping("getSpectrumFiles")
|
||||
public Result getSpectrumFiles(HttpServletRequest request){
|
||||
public Result getSpectrumFiles(HttpServletRequest request) {
|
||||
return spectrumAnalysisService.getSpectrumFiles(request);
|
||||
}
|
||||
|
||||
@GetMapping("getFilesBySampleFile")
|
||||
@ApiOperation(value = "根据sampleFile查询出关联的文件信息", notes = "根据sampleFile查询出关联的文件信息")
|
||||
public Result getFilesBySampleFile(String sampleFileName, HttpServletRequest request){
|
||||
return null;
|
||||
public Result getFilesBySampleFile(String fileName, HttpServletRequest request) {
|
||||
return spectrumAnalysisService.getFilesBySampleFile(fileName, request);
|
||||
}
|
||||
|
||||
@GetMapping("getDBSpectrumChart")
|
||||
@ApiOperation(value = "查询折线图相关信息接口", notes = "查询折线图相关信息接口")
|
||||
public Result getDBSpectrumChart(String dbName, Integer sampleId){
|
||||
public Result getDBSpectrumChart(String dbName, Integer sampleId) {
|
||||
return spectrumAnalysisService.getDBSpectrumChart(dbName, sampleId);
|
||||
}
|
||||
|
||||
@GetMapping("getFileSpectrumChart")
|
||||
public Result getFileSpectrumChart(String sampleFileName, String gasFileName, String detFileName, String qcFileName, HttpServletRequest request){
|
||||
@ApiOperation(value = "查询文件折线图相关信息接口", notes = "查询文件折线图相关信息接口")
|
||||
public Result getFileSpectrumChart(String sampleFileName, String gasFileName, String detFileName, String qcFileName, HttpServletRequest request) {
|
||||
return spectrumAnalysisService.getFileSpectrumChart(sampleFileName, gasFileName, detFileName, qcFileName, request);
|
||||
}
|
||||
|
||||
@DeleteMapping("deleteDBSpectrumChartData")
|
||||
@ApiOperation(value = "删除折线图缓存数据",notes = "删除折线图缓存数据")
|
||||
public Result deleteDBSpectrumChartData(Integer[] sampleIds){
|
||||
public Result deleteDBSpectrumChartData(Integer[] sampleIds) {
|
||||
return spectrumAnalysisService.deleteDBSpectrumChartData(sampleIds);
|
||||
}
|
||||
|
||||
@GetMapping("viewComment")
|
||||
@ApiOperation(value = "查看comment数据", notes = "查看comment数据")
|
||||
public Result viewComment(Integer sampleId, HttpServletRequest request){
|
||||
public Result viewComment(Integer sampleId, HttpServletRequest request) {
|
||||
return spectrumAnalysisService.viewComment(sampleId, request);
|
||||
}
|
||||
|
||||
@GetMapping("viewARR")
|
||||
@ApiOperation(value = "查看ARR报告", notes = "查看ARR报告")
|
||||
public void viewARR(Integer sampleId, HttpServletResponse response){
|
||||
public void viewARR(Integer sampleId, HttpServletResponse response) {
|
||||
spectrumAnalysisService.viewARR(sampleId, response);
|
||||
}
|
||||
|
||||
@GetMapping("viewRRR")
|
||||
@ApiOperation(value = "查看RRR报告", notes = "查看RRR报告")
|
||||
public Result viewRRR(String dbName, Integer sampleId, boolean sampleData, boolean GasBgData, boolean DetBgData, boolean QCData, boolean bGammaEnergyValid, boolean bBetaEnergyValid){
|
||||
public Result viewRRR(String dbName, Integer sampleId, boolean sampleData, boolean GasBgData, boolean DetBgData, boolean QCData, boolean bGammaEnergyValid, boolean bBetaEnergyValid) {
|
||||
return spectrumAnalysisService.viewRRR(dbName, sampleId, sampleData, GasBgData, DetBgData, QCData, bGammaEnergyValid, bBetaEnergyValid);
|
||||
}
|
||||
|
||||
@GetMapping("viewSpectrum")
|
||||
@ApiOperation(value = "查看Spectrum数据", notes = "查看Spectrum数据")
|
||||
public Result viewSpectrum(Integer sampleId){
|
||||
public Result viewSpectrum(Integer sampleId) {
|
||||
return spectrumAnalysisService.viewSpectrum(sampleId);
|
||||
}
|
||||
|
||||
@GetMapping("viewSampleInformation")
|
||||
@ApiOperation(value = "查看SampleInformation数据", notes = "查看SampleInformation数据")
|
||||
public Result viewSampleInformation(Integer sampleId){
|
||||
public Result viewSampleInformation(Integer sampleId) {
|
||||
return spectrumAnalysisService.viewSampleInformation(sampleId);
|
||||
}
|
||||
|
||||
@GetMapping("viewQCResult")
|
||||
@ApiOperation(value = "查看QC Result数据", notes = "查看QC Result数据")
|
||||
public Result viewQCResult(Integer sampleId){
|
||||
public Result viewQCResult(Integer sampleId) {
|
||||
return spectrumAnalysisService.viewQCResult(sampleId);
|
||||
}
|
||||
|
||||
|
@ -114,90 +115,89 @@ public class SpectrumAnalysesController {
|
|||
|
||||
@GetMapping("viewRLR")
|
||||
@ApiOperation(value = "查看RLR数据", notes = "查看RLR数据")
|
||||
public Result viewRLR(Integer sampleId){
|
||||
public Result viewRLR(Integer sampleId) {
|
||||
return spectrumAnalysisService.viewRLR(sampleId);
|
||||
}
|
||||
|
||||
@GetMapping("viewGammaDetectorCalibration")
|
||||
@ApiOperation(value = "查询GammaDetectorCalibration数据", notes = "查询GammaDetectorCalibration数据")
|
||||
public Result viewGammaDetectorCalibration(Integer sampleId){
|
||||
public Result viewGammaDetectorCalibration(Integer sampleId) {
|
||||
return spectrumAnalysisService.viewGammaDetectorCalibration(sampleId);
|
||||
}
|
||||
|
||||
@GetMapping("viewBetaDetectorCalibration")
|
||||
@ApiOperation(value = "查询BetaDetectorCalibration数据", notes = "查询BetaDetectorCalibration数据")
|
||||
public Result viewBetaDetectorCalibration(Integer sampleId){
|
||||
public Result viewBetaDetectorCalibration(Integer sampleId) {
|
||||
return spectrumAnalysisService.viewBetaDetectorCalibration(sampleId);
|
||||
}
|
||||
|
||||
@GetMapping("viewExtrapolation")
|
||||
@ApiOperation(value = "查询Extrapolation数据", notes = "查询Extrapolation数据")
|
||||
public Result viewExtrapolation(Integer sampleId){
|
||||
public Result viewExtrapolation(Integer sampleId) {
|
||||
return spectrumAnalysisService.viewExtrapolation(sampleId);
|
||||
}
|
||||
|
||||
@GetMapping("viewMDC")
|
||||
@ApiOperation(value = "查看MDC数据", notes = "查看MDC数据")
|
||||
public Result viewMDC(Integer sampleId){
|
||||
public Result viewMDC(Integer sampleId) {
|
||||
return spectrumAnalysisService.viewMDC(sampleId);
|
||||
}
|
||||
|
||||
@GetMapping("changeDetector")
|
||||
@ApiOperation(value = "切换台站及探测器信息", notes = "切换台站及探测器信息")
|
||||
public Result changeDetector(String stationName){
|
||||
public Result changeDetector(String stationName) {
|
||||
return spectrumAnalysisService.changeDetector(stationName);
|
||||
}
|
||||
|
||||
@PostMapping("statisticsQuery")
|
||||
@ApiOperation(value = "MDC分析数据", notes = "MDC分析数据")
|
||||
public Result statisticsQuery(@RequestBody StatisticsQueryData statisticsQueryData){
|
||||
public Result statisticsQuery(@RequestBody StatisticsQueryData statisticsQueryData) {
|
||||
return spectrumAnalysisService.statisticsQuery(statisticsQueryData);
|
||||
}
|
||||
|
||||
@GetMapping("statisticsQueryBtn")
|
||||
@ApiOperation(value = "报警按钮分析数据", notes = "报警按钮分析数据")
|
||||
public Result statisticsQueryBtn(String detectorName, Integer stationId, String statisticsType, @DateTimeFormat(pattern = "yyyy-MM-dd") Date startTime, @DateTimeFormat(pattern = "yyyy-MM-dd") Date endTime){
|
||||
public Result statisticsQueryBtn(String detectorName, Integer stationId, String statisticsType, @DateTimeFormat(pattern = "yyyy-MM-dd") Date startTime, @DateTimeFormat(pattern = "yyyy-MM-dd") Date endTime) {
|
||||
return spectrumAnalysisService.statisticsQueryBtn(detectorName, stationId, statisticsType, startTime, endTime);
|
||||
}
|
||||
|
||||
@PostMapping("fitting")
|
||||
@ApiOperation(value = "公式计算新的曲线", notes = "公式计算新的曲线")
|
||||
public Result fitting(@RequestBody FittingBody fittingBody){
|
||||
public Result fitting(@RequestBody FittingBody fittingBody) {
|
||||
return spectrumAnalysisService.fitting(fittingBody.getParamA(), fittingBody.getParamB(), fittingBody.getParamC(), fittingBody.getTempPoints(), fittingBody.getCount());
|
||||
}
|
||||
|
||||
@GetMapping("getGammaGated")
|
||||
@ApiOperation(value = "获取gamma对应count数据", notes = "获取gamma对应count数据")
|
||||
public Result getGammaGated(Integer chartHeight, Integer channelWidth, Integer gammaChannel, Integer sampleId){
|
||||
public Result getGammaGated(Integer chartHeight, Integer channelWidth, Integer gammaChannel, Integer sampleId) {
|
||||
return spectrumAnalysisService.getGammaGated(chartHeight, channelWidth, gammaChannel, sampleId);
|
||||
}
|
||||
|
||||
@PostMapping("ReAnalyse")
|
||||
public Result ReAnalyse(@RequestBody AnalyseData analyseData, HttpServletRequest request){
|
||||
public Result ReAnalyse(@RequestBody AnalyseData analyseData, HttpServletRequest request) {
|
||||
return spectrumAnalysisService.ReAnalyse(analyseData, request);
|
||||
}
|
||||
|
||||
@GetMapping("analyseCurrentSpectrum")
|
||||
public Result analyseCurrentSpectrum(String dbName, Integer sampleId, String sampleFileName, String gasFileName, String detFileName, HttpServletRequest request){
|
||||
@PostMapping("analyseCurrentSpectrum")
|
||||
@ApiOperation(value = "解析当前加载文件数据", notes = "解析当前加载文件数据")
|
||||
public Result analyseCurrentSpectrum(@RequestBody AnalyseBetaInfo analyseBetaInfo, HttpServletRequest request) {
|
||||
String dbName = analyseBetaInfo.getDbNames().get(0);
|
||||
Integer sampleId = analyseBetaInfo.getSampleIds().get(0);
|
||||
String sampleFileName = analyseBetaInfo.getSampleFileNames().get(0);
|
||||
String gasFileName = analyseBetaInfo.getGasFileNames().get(0);
|
||||
String detFileName = analyseBetaInfo.getDetFileNames().get(0);
|
||||
return spectrumAnalysisService.analyseCurrentSpectrum(dbName, sampleId, sampleFileName, gasFileName, detFileName, request);
|
||||
}
|
||||
|
||||
@PostMapping("analyseAllSpectrum")
|
||||
public Result analyseAllSpectrum(){
|
||||
return null;
|
||||
@ApiOperation(value = "解析全部加载文件数据", notes = "解析全部加载文件数据")
|
||||
public Result analyseAllSpectrum(@RequestBody AnalyseBetaInfo analyseBetaInfo, HttpServletRequest request) {
|
||||
return spectrumAnalysisService.analyseAllSpectrum(analyseBetaInfo.getDbNames(), analyseBetaInfo.getSampleIds(), analyseBetaInfo.getSampleFileNames(), analyseBetaInfo.getGasFileNames(), analyseBetaInfo.getDetFileNames(), request);
|
||||
}
|
||||
|
||||
@PostMapping("saveToDB")
|
||||
public Result saveToDB(@RequestBody BgDataAnlyseResultIn anlyseResultIn){
|
||||
public Result saveToDB(@RequestBody BgDataAnlyseResultIn anlyseResultIn) {
|
||||
return spectrumAnalysisService.saveToDB(anlyseResultIn);
|
||||
}
|
||||
|
||||
public static void main(String[] args) {
|
||||
String str = "\n#FILE INFORMATION\n SampleMeasID: AUX04_005-2023/06/18-14:05\n GASBKMeasID: AUX04_005-2023/06/18-02:05\n DETBKMeasID: AUX04_005-2022/04/22-13:27\n SRID: 04202306171811X\n\n#COLLECTION INFORMATION\n Station CODE: AUX04\n Detector CODE: AUX04_005\n Sample ID: 426132\n Collection Start: 2023/06/17 18:54:10\n Collection Stop: 2023/06/18 06:54:08\n Collection TIME: 43198.00\n Sample Volume[m3]: 14.410883\n Xe Volume[cm3]: 1.00978\n\n#ACQUISITION INFORMATION\n Acquisition Start: 2023/06/18 14:05:09\n Acq Real Time: 40200.803\n Acq Live Time: 40187.202\n\n";
|
||||
String[] parts = str.split("\\n");
|
||||
|
||||
for (String part : parts) {
|
||||
System.out.println(part);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -11,7 +11,6 @@ import java.util.Date;
|
|||
@Data
|
||||
public class GardsROIChannelsSpectrum implements Serializable {
|
||||
|
||||
|
||||
/**
|
||||
* 样品id
|
||||
*/
|
||||
|
|
|
@ -1,16 +1,56 @@
|
|||
package org.jeecg.modules.entity;
|
||||
|
||||
|
||||
import com.baomidou.mybatisplus.annotation.TableField;
|
||||
import com.baomidou.mybatisplus.annotation.TableName;
|
||||
import com.fasterxml.jackson.annotation.JsonFormat;
|
||||
import lombok.Data;
|
||||
import org.jeecg.modules.base.entity.rnauto.GardsXeResults;
|
||||
import org.springframework.format.annotation.DateTimeFormat;
|
||||
|
||||
import java.io.Serializable;
|
||||
import java.util.Date;
|
||||
|
||||
@Data
|
||||
@TableName(value = "GARDS_XE_RESULTS")
|
||||
public class GardsXeResultsSpectrum extends GardsXeResults {
|
||||
public class GardsXeResultsSpectrum implements Serializable {
|
||||
|
||||
/**
|
||||
* 样品id
|
||||
*/
|
||||
private Integer sampleId;
|
||||
|
||||
/**
|
||||
* 分析ID号
|
||||
*/
|
||||
private Integer idAnalysis;
|
||||
|
||||
/**
|
||||
* 核素名称
|
||||
*/
|
||||
private String nuclideName;
|
||||
/**
|
||||
* 感兴趣区活度浓度
|
||||
*/
|
||||
private Double conc;
|
||||
|
||||
/**
|
||||
* 感兴趣区活度浓度不确定度
|
||||
*/
|
||||
private Double concErr;
|
||||
/**
|
||||
* 感兴趣区MDC
|
||||
*/
|
||||
private Double mdc;
|
||||
/**
|
||||
* 感兴趣区LC
|
||||
*/
|
||||
private Double lc;
|
||||
/**
|
||||
* 感兴趣区识别标示;1:识别到,0,未识别到
|
||||
*/
|
||||
private Integer nidFlag;
|
||||
|
||||
@JsonFormat(pattern = "yyyy-MM-dd HH:mm:ss")
|
||||
@DateTimeFormat(pattern = "yyyy-MM-dd HH:mm:ss")
|
||||
private Date moddate;
|
||||
|
||||
@TableField(exist = false)
|
||||
private String color;
|
||||
|
||||
}
|
||||
|
|
|
@ -0,0 +1,17 @@
|
|||
package org.jeecg.modules.entity.vo;
|
||||
|
||||
import lombok.Data;
|
||||
|
||||
import java.io.Serializable;
|
||||
import java.util.List;
|
||||
|
||||
@Data
|
||||
public class AcceptInfo implements Serializable {
|
||||
|
||||
private String fileName;
|
||||
|
||||
private boolean accept;
|
||||
|
||||
private List<PeakInfo> oldPeak;
|
||||
|
||||
}
|
|
@ -0,0 +1,21 @@
|
|||
package org.jeecg.modules.entity.vo;
|
||||
|
||||
import lombok.Data;
|
||||
|
||||
import java.io.Serializable;
|
||||
import java.util.List;
|
||||
|
||||
@Data
|
||||
public class AnalyseBetaInfo implements Serializable {
|
||||
|
||||
private List<String> dbNames;
|
||||
|
||||
private List<Integer> sampleIds;
|
||||
|
||||
private List<String> sampleFileNames;
|
||||
|
||||
private List<String> gasFileNames;
|
||||
|
||||
private List<String> detFileNames;
|
||||
|
||||
}
|
|
@ -0,0 +1,16 @@
|
|||
package org.jeecg.modules.entity.vo;
|
||||
|
||||
import lombok.Data;
|
||||
|
||||
import java.io.Serializable;
|
||||
|
||||
@Data
|
||||
public class CommentsInfo implements Serializable {
|
||||
|
||||
private Integer curRow;
|
||||
|
||||
private String fileName;
|
||||
|
||||
private String comments;
|
||||
|
||||
}
|
|
@ -0,0 +1,19 @@
|
|||
package org.jeecg.modules.entity.vo;
|
||||
|
||||
import lombok.Data;
|
||||
|
||||
import java.io.Serializable;
|
||||
import java.util.List;
|
||||
|
||||
@Data
|
||||
public class NuclideInfo implements Serializable {
|
||||
|
||||
private Integer curRow;
|
||||
|
||||
private String nuclideName;
|
||||
|
||||
private String fileName;
|
||||
|
||||
private List<String> list_identify;
|
||||
|
||||
}
|
|
@ -0,0 +1,16 @@
|
|||
package org.jeecg.modules.entity.vo;
|
||||
|
||||
import lombok.Data;
|
||||
|
||||
import java.io.Serializable;
|
||||
import java.util.List;
|
||||
|
||||
@Data
|
||||
public class UserLibraryInfo implements Serializable {
|
||||
|
||||
|
||||
private List<String> userLibraryName;
|
||||
|
||||
private String fileName;
|
||||
|
||||
}
|
|
@ -10,4 +10,6 @@ public interface SysDefaultNuclideSpectrumMapper extends BaseMapper<SysDefaultNu
|
|||
|
||||
List<String> findNuclidesByUserName(@Param(value = "userName") String userName, @Param(value = "systemType") String systemType);
|
||||
|
||||
void deleteNuclidesByUserName(@Param(value = "userName") String userName, @Param(value = "systemType") String systemType);
|
||||
|
||||
}
|
||||
|
|
|
@ -12,5 +12,12 @@
|
|||
and create_by = #{userName}
|
||||
</select>
|
||||
|
||||
<delete id="deleteNuclidesByUserName">
|
||||
delete from
|
||||
sys_default_nuclide
|
||||
where use_type = 3
|
||||
and nuclide_type = #{systemType}
|
||||
and create_by = #{userName}
|
||||
</delete>
|
||||
|
||||
</mapper>
|
|
@ -3,36 +3,93 @@ package org.jeecg.modules.native_jni.struct;
|
|||
import java.util.List;
|
||||
|
||||
public class BgAnalyseResult {
|
||||
/************************** BgMDCPara **************************/
|
||||
/** MDC XE135 */
|
||||
public double MDC_Xe135;
|
||||
/** MDC XE131m */
|
||||
public double MDC_Xe131m;
|
||||
/** MDC XE133m*/
|
||||
public double MDC_Xe133m;
|
||||
/** MDC XE133 */
|
||||
public double MDC_Xe133;
|
||||
public List<Double> MDC;
|
||||
public List<Double> MDC_CTS;
|
||||
|
||||
/************************** BgXeConUncer **************************/
|
||||
/** 135不浓度 */
|
||||
public double Xe135_con;
|
||||
/** 135不确定度 */
|
||||
public double Xe135_uncer;
|
||||
/************************** GARDS_XE_RESULTS START**************************/
|
||||
/************************** XE_131m **************************/
|
||||
public double Xe131m_con;
|
||||
public double Xe131m_uncer;
|
||||
public double Xe133m_con;
|
||||
public double Xe133m_uncer;
|
||||
public double MDC_Xe131m;
|
||||
public double LC_Xe131m;
|
||||
public int XE_131m_NID_FLAG;
|
||||
|
||||
/************************** XE_133 **************************/
|
||||
public double Xe133_con;
|
||||
public double Xe133_uncer;
|
||||
public double MDC_Xe133;
|
||||
public double LC_Xe133;
|
||||
public int XE_133_NID_FLAG;
|
||||
/************************** XE_133m **************************/
|
||||
public double Xe133m_con;
|
||||
public double Xe133m_uncer;
|
||||
public double MDC_Xe133m;
|
||||
public double LC_Xe133m;
|
||||
public int XE_133m_NID_FLAG;
|
||||
|
||||
// public double LC_Xe135; //LC XE135
|
||||
// public double LC_Xe131m; //LC XE131m
|
||||
// public double LC_Xe133m; //LC XE133m
|
||||
// public double LC_Xe133; //LC XE133
|
||||
// public List<Double> LC;
|
||||
// public List<Double> LC_CTS;
|
||||
/************************** XE_135 **************************/
|
||||
public double Xe135_con;
|
||||
public double Xe135_uncer;
|
||||
public double MDC_Xe135;
|
||||
public double LC_Xe135;
|
||||
public int XE_135_NID_FLAG;
|
||||
/************************** GARDS_XE_RESULTS end **************************/
|
||||
|
||||
/************************** GARDS_ROI_RESULTS START**************************/
|
||||
public List<Integer> ROI;
|
||||
public List<Double> LC;
|
||||
public List<Double> s_roi_cts;
|
||||
public List<Double> g_roi_cts;
|
||||
public List<Double> d_roi_cts;
|
||||
public List<Double> s_deduct_d_cts;
|
||||
public List<Double> g_deduct_d_cts;
|
||||
public List<Double> ROI_net_coutns;
|
||||
public List<Double> ROI_net_coutns_err;
|
||||
public List<Double> ROI_con_uncer;
|
||||
public List<Double> ROI_con_uncer_err;
|
||||
|
||||
public List<Integer> MDC;
|
||||
public List<Integer> dNidFlag;
|
||||
|
||||
/************************** GARDS_ROI_RESULTS end **************************/
|
||||
|
||||
/************************** GARDS_ROI_RESULTS START**************************/
|
||||
public List<Double> s_b_fitting_e_c;
|
||||
public int s_b_fitting_type;
|
||||
public String s_b_fitting_type_def;
|
||||
public List<Double> s_g_fitting_e_c;
|
||||
public int s_g_fitting_type;
|
||||
public String s_g_fitting_type_def;
|
||||
|
||||
public List<Double> g_b_fitting_e_c;
|
||||
public int g_b_fitting_type;
|
||||
public String g_b_fitting_type_def;
|
||||
public List<Double> g_g_fitting_e_c;
|
||||
public int g_g_fitting_type;
|
||||
public String g_g_fitting_type_def;
|
||||
|
||||
public List<Double> d_b_fitting_e_c;
|
||||
public int d_b_fitting_type;
|
||||
public String d_b_fitting_type_def;
|
||||
public List<Double> d_g_fitting_e_c;
|
||||
public int d_g_fitting_type;
|
||||
public String d_g_fitting_type_def;
|
||||
/************************** GARDS_CALIBRATION end **************************/
|
||||
|
||||
/************************** GARDS_CALIBRATION START**************************/
|
||||
public List<Integer> S_ROI;
|
||||
public List<Integer> S_ROI_B_Boundary_start;
|
||||
public List<Integer> S_ROI_B_Boundary_stop;
|
||||
public List<Integer> S_ROI_G_Boundary_start;
|
||||
public List<Integer> S_ROI_G_Boundary_stop;
|
||||
public List<Integer> G_ROI;
|
||||
public List<Integer> G_ROI_B_Boundary_start;
|
||||
public List<Integer> G_ROI_B_Boundary_stop;
|
||||
public List<Integer> G_ROI_G_Boundary_start;
|
||||
public List<Integer> G_ROI_G_Boundary_stop;
|
||||
public List<Integer> D_ROI;
|
||||
public List<Integer> D_ROI_B_Boundary_start;
|
||||
public List<Integer> D_ROI_B_Boundary_stop;
|
||||
public List<Integer> D_ROI_G_Boundary_start;
|
||||
public List<Integer> D_ROI_G_Boundary_stop;
|
||||
/************************** GARDS_ROI_CHANNELS end **************************/
|
||||
|
||||
/**
|
||||
* 分析结果标记,true成功,false失败
|
||||
|
@ -42,4 +99,79 @@ public class BgAnalyseResult {
|
|||
* 失败原因
|
||||
*/
|
||||
public String error_log;
|
||||
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return "BgAnalyseResult{" +
|
||||
"Xe131m_con=" + Xe131m_con +
|
||||
", Xe131m_uncer=" + Xe131m_uncer +
|
||||
", MDC_Xe131m=" + MDC_Xe131m +
|
||||
", LC_Xe131m=" + LC_Xe131m +
|
||||
", XE_131m_NID_FLAG=" + XE_131m_NID_FLAG +
|
||||
", Xe133_con=" + Xe133_con +
|
||||
", Xe133_uncer=" + Xe133_uncer +
|
||||
", MDC_Xe133=" + MDC_Xe133 +
|
||||
", LC_Xe133=" + LC_Xe133 +
|
||||
", XE_133_NID_FLAG=" + XE_133_NID_FLAG +
|
||||
", Xe133m_con=" + Xe133m_con +
|
||||
", Xe133m_uncer=" + Xe133m_uncer +
|
||||
", MDC_Xe133m=" + MDC_Xe133m +
|
||||
", LC_Xe133m=" + LC_Xe133m +
|
||||
", XE_133m_NID_FLAG=" + XE_133m_NID_FLAG +
|
||||
", Xe135_con=" + Xe135_con +
|
||||
", Xe135_uncer=" + Xe135_uncer +
|
||||
", MDC_Xe135=" + MDC_Xe135 +
|
||||
", LC_Xe135=" + LC_Xe135 +
|
||||
", XE_135_NID_FLAG=" + XE_135_NID_FLAG +
|
||||
", ROI=" + ROI +
|
||||
", LC=" + LC +
|
||||
", s_roi_cts=" + s_roi_cts +
|
||||
", g_roi_cts=" + g_roi_cts +
|
||||
", d_roi_cts=" + d_roi_cts +
|
||||
", s_deduct_d_cts=" + s_deduct_d_cts +
|
||||
", g_deduct_d_cts=" + g_deduct_d_cts +
|
||||
", ROI_net_coutns=" + ROI_net_coutns +
|
||||
", ROI_net_coutns_err=" + ROI_net_coutns_err +
|
||||
", ROI_con_uncer=" + ROI_con_uncer +
|
||||
", ROI_con_uncer_err=" + ROI_con_uncer_err +
|
||||
", MDC=" + MDC +
|
||||
", dNidFlag=" + dNidFlag +
|
||||
", s_b_fitting_e_c=" + s_b_fitting_e_c +
|
||||
", s_b_fitting_type=" + s_b_fitting_type +
|
||||
", s_b_fitting_type_def='" + s_b_fitting_type_def + '\'' +
|
||||
", s_g_fitting_e_c=" + s_g_fitting_e_c +
|
||||
", s_g_fitting_type=" + s_g_fitting_type +
|
||||
", s_g_fitting_type_def='" + s_g_fitting_type_def + '\'' +
|
||||
", g_b_fitting_e_c=" + g_b_fitting_e_c +
|
||||
", g_b_fitting_type=" + g_b_fitting_type +
|
||||
", g_b_fitting_type_def='" + g_b_fitting_type_def + '\'' +
|
||||
", g_g_fitting_e_c=" + g_g_fitting_e_c +
|
||||
", g_g_fitting_type=" + g_g_fitting_type +
|
||||
", g_g_fitting_type_def='" + g_g_fitting_type_def + '\'' +
|
||||
", d_b_fitting_e_c=" + d_b_fitting_e_c +
|
||||
", d_b_fitting_type=" + d_b_fitting_type +
|
||||
", d_b_fitting_type_def='" + d_b_fitting_type_def + '\'' +
|
||||
", d_g_fitting_e_c=" + d_g_fitting_e_c +
|
||||
", d_g_fitting_type=" + d_g_fitting_type +
|
||||
", d_g_fitting_type_def='" + d_g_fitting_type_def + '\'' +
|
||||
", S_ROI=" + S_ROI +
|
||||
", S_ROI_B_Boundary_start=" + S_ROI_B_Boundary_start +
|
||||
", S_ROI_B_Boundary_stop=" + S_ROI_B_Boundary_stop +
|
||||
", S_ROI_G_Boundary_start=" + S_ROI_G_Boundary_start +
|
||||
", S_ROI_G_Boundary_stop=" + S_ROI_G_Boundary_stop +
|
||||
", G_ROI=" + G_ROI +
|
||||
", G_ROI_B_Boundary_start=" + G_ROI_B_Boundary_start +
|
||||
", G_ROI_B_Boundary_stop=" + G_ROI_B_Boundary_stop +
|
||||
", G_ROI_G_Boundary_start=" + G_ROI_G_Boundary_start +
|
||||
", G_ROI_G_Boundary_stop=" + G_ROI_G_Boundary_stop +
|
||||
", D_ROI=" + D_ROI +
|
||||
", D_ROI_B_Boundary_start=" + D_ROI_B_Boundary_start +
|
||||
", D_ROI_B_Boundary_stop=" + D_ROI_B_Boundary_stop +
|
||||
", D_ROI_G_Boundary_start=" + D_ROI_G_Boundary_start +
|
||||
", D_ROI_G_Boundary_stop=" + D_ROI_G_Boundary_stop +
|
||||
", analyse_flag=" + analyse_flag +
|
||||
", error_log='" + error_log + '\'' +
|
||||
'}';
|
||||
}
|
||||
}
|
||||
|
|
|
@ -10,6 +10,8 @@ import java.util.Map;
|
|||
|
||||
public interface IGammaService{
|
||||
|
||||
Result initValue(Integer sampleId, String dbName);
|
||||
|
||||
Result gammaByDB(String dbName, Integer sampleId);
|
||||
|
||||
Result gammaByFile(String fileName, HttpServletRequest request);
|
||||
|
@ -22,8 +24,23 @@ public interface IGammaService{
|
|||
|
||||
Result insertPeak(Integer sampleId, String fileName, Integer curChan);
|
||||
|
||||
Result acceptResults(String fileName, boolean accept, List<PeakInfo> oldPeak);
|
||||
|
||||
Result deletePeak(String fileName, int curRow);
|
||||
|
||||
Result getSelPosNuclide(Integer sampleId, String fileName, int channel, String nuclides, HttpServletRequest request);
|
||||
|
||||
Result addNuclide(Integer curRow, String nuclideName, String fileName, List<String> list_identify, HttpServletRequest request);
|
||||
|
||||
Result deleteNuclide(Integer curRow, String nuclideName, String fileName, List<String> list_identify);
|
||||
|
||||
Result viewPeakComment(String fileName, int curRow);
|
||||
|
||||
Result addPeakComment(String fileName, int curRow, String comments);
|
||||
|
||||
Result viewGenralComment(String fileName);
|
||||
|
||||
Result addGeneralComment(String fileName, String comments);
|
||||
|
||||
Result nuclideReview(Integer sampleId, String fileName, Double channel, HttpServletRequest request);
|
||||
|
||||
|
@ -61,6 +78,8 @@ public interface IGammaService{
|
|||
|
||||
Result configUserLibrary(Integer sampleId, String fileName, HttpServletRequest request);
|
||||
|
||||
Result saveUserLibrary(List<String> userLibraryName, String fileName, HttpServletRequest request);
|
||||
|
||||
Result viewComment(Integer sampleId, String fileName);
|
||||
|
||||
Result peakInformation(Integer sampleId, String fileName);
|
||||
|
|
|
@ -19,7 +19,7 @@ public interface ISpectrumAnalysisService {
|
|||
|
||||
Result getSpectrumFiles(HttpServletRequest request);
|
||||
|
||||
Result getFilesBySampleFile(String sampleFileName, HttpServletRequest request);
|
||||
Result getFilesBySampleFile(String fileName, HttpServletRequest request);
|
||||
|
||||
Result getDBSpectrumChart(String dbName, Integer sampleId);
|
||||
|
||||
|
@ -65,7 +65,7 @@ public interface ISpectrumAnalysisService {
|
|||
|
||||
Result analyseCurrentSpectrum(String dbName, Integer sampleId, String sampleFileName, String gasFileName, String detFileName, HttpServletRequest request);
|
||||
|
||||
Result analyseAllSpectrum();
|
||||
Result analyseAllSpectrum(List<String> dbNames, List<Integer> sampleIds, List<String> sampleFileNames, List<String> gasFileNames, List<String> detFileNames, HttpServletRequest request);
|
||||
|
||||
Result saveToDB(BgDataAnlyseResultIn anlyseResultIn);
|
||||
|
||||
|
|
|
@ -9,4 +9,6 @@ public interface ISysDefaultNuclideSpectrumService extends IService<SysDefaultNu
|
|||
|
||||
List<String> findNuclidesByUserName(String userName, String systemType);
|
||||
|
||||
boolean saveNuclidesByUserName(List<String> userLibraryName, String userName, String systemType);
|
||||
|
||||
}
|
||||
|
|
|
@ -98,6 +98,38 @@ public class GammaServiceImpl implements IGammaService {
|
|||
@Autowired
|
||||
private IGardsAnalySettingSpectrumService analySettingSpectrumService;
|
||||
|
||||
@Override
|
||||
public Result initValue(Integer sampleId, String dbName) {
|
||||
Result result = new Result();
|
||||
Cache<String, PHDFile> phdCache = localCache.getPHDCache();
|
||||
PHDFile phd = new PHDFile();
|
||||
//读取文件内容
|
||||
//根据sampleId获取sample文件路径
|
||||
String sampleFilePath = spectrumAnalysisMapper.getSampleFilePath(sampleId);
|
||||
if (StringUtils.isBlank(sampleFilePath)){
|
||||
result.error500("样品文件不存在!");
|
||||
return result;
|
||||
}
|
||||
String pathName = StringPool.SLASH + spectrumPathProperties.getRootPath() + StringPool.SLASH + sampleFilePath.substring(0, sampleFilePath.lastIndexOf(StringPool.SLASH));
|
||||
String fileName = sampleFilePath.substring(sampleFilePath.lastIndexOf(StringPool.SLASH)+1);
|
||||
boolean flag = gammaFileUtil.loadFile(pathName, fileName, phd, result);
|
||||
if (!flag){
|
||||
return result;
|
||||
}
|
||||
//声明基础数组信息
|
||||
gammaFileUtil.SetBaseInfo(phd);
|
||||
//从数据库中读取相关信息
|
||||
boolean bRet = gammaFileUtil.getResultFromDB(dbName, sampleId, phd, result);
|
||||
if (!bRet){
|
||||
return result;
|
||||
}
|
||||
phdCache.put(fileName, phd);
|
||||
localCache.setPHDCache(phdCache);
|
||||
result.setSuccess(true);
|
||||
result.setResult(phd);
|
||||
return result;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Result gammaByDB(String dbName, Integer sampleId) {
|
||||
Result result = new Result();
|
||||
|
@ -111,7 +143,7 @@ public class GammaServiceImpl implements IGammaService {
|
|||
result.error500("样品文件不存在!");
|
||||
return result;
|
||||
}
|
||||
String pathName = sampleFilePath.substring(0, sampleFilePath.lastIndexOf(StringPool.SLASH));
|
||||
String pathName = StringPool.SLASH + spectrumPathProperties.getRootPath() + StringPool.SLASH + sampleFilePath.substring(0, sampleFilePath.lastIndexOf(StringPool.SLASH));
|
||||
String fileName = sampleFilePath.substring(sampleFilePath.lastIndexOf(StringPool.SLASH)+1);
|
||||
boolean flag = gammaFileUtil.loadFile(pathName, fileName, phd, result);
|
||||
if (!flag){
|
||||
|
@ -225,9 +257,9 @@ public class GammaServiceImpl implements IGammaService {
|
|||
//查询当前用户关联的核素信息
|
||||
List<String> nuclides = new LinkedList<>();
|
||||
//从postgreSql中获取当前用户关注的核素信息 如果当前用户没有 则返回管理员的
|
||||
nuclides = defaultNuclideSpectrumService.findNuclidesByUserName(userName, phd.getHeader().getSystem_type());
|
||||
nuclides = defaultNuclideSpectrumService.findNuclidesByUserName(userName, phd.getHeader().getSystem_type().toUpperCase());
|
||||
if (CollectionUtils.isEmpty(nuclides)){
|
||||
nuclides = defaultNuclideSpectrumService.findNuclidesByUserName("admin", phd.getHeader().getSystem_type());
|
||||
nuclides = defaultNuclideSpectrumService.findNuclidesByUserName("admin", phd.getHeader().getSystem_type().toUpperCase());
|
||||
}
|
||||
boolean flag = gammaFileUtil.AnalyseData(phd, configureData, nuclides, true);
|
||||
if (flag){
|
||||
|
@ -266,7 +298,7 @@ public class GammaServiceImpl implements IGammaService {
|
|||
return result;
|
||||
}
|
||||
//表单
|
||||
List<PeakInfo> vPeak = phd.getVPeak();
|
||||
List<PeakInfo> vPeak = gammaFileUtil.InitPeakTable(phd.getVPeak());
|
||||
map.put("table", vPeak);
|
||||
//Chart 折线图
|
||||
List<Long> m_vCount = new LinkedList<>();
|
||||
|
@ -283,8 +315,8 @@ public class GammaServiceImpl implements IGammaService {
|
|||
if(m_nSChan == 0) {
|
||||
m_vCount.add(0L);
|
||||
}
|
||||
ChartData channelCountChart = gammaFileUtil.Channel_Count(phd, m_vCount, m_nCount);
|
||||
ChartData channelBaseLineChart = gammaFileUtil.Channel_BaseLine(phd, m_vCount, m_nCount);
|
||||
ChartData channelCountChart = gammaFileUtil.Channel_Count(m_vCount, m_nCount);
|
||||
ChartData channelBaseLineChart = gammaFileUtil.Channel_BaseLine(phd, m_nCount);
|
||||
List<ChartData> channelPeakChart = gammaFileUtil.Channel_Peak(phd, m_nCount);
|
||||
List<ShapeData> channelBaseCPChart = gammaFileUtil.Channel_BaseCP(phd);
|
||||
map.put("channelCountChart", channelCountChart);
|
||||
|
@ -292,7 +324,7 @@ public class GammaServiceImpl implements IGammaService {
|
|||
map.put("channelPeakChart", channelPeakChart);
|
||||
map.put("channelBaseCPChart", channelBaseCPChart);
|
||||
//Bar Chart 柱状图
|
||||
List<SeriseData> differance = gammaFileUtil.Differance(phd, vPeak, m_vCount, m_nCount);
|
||||
List<SeriseData> differance = gammaFileUtil.Differance(phd, phd.getVPeak(), m_vCount, m_nCount);
|
||||
map.put("barChart", differance);
|
||||
//赋值energy
|
||||
map.put("energy", phd.getVEnergy());
|
||||
|
@ -304,6 +336,7 @@ public class GammaServiceImpl implements IGammaService {
|
|||
@Override
|
||||
public Result insertPeak(Integer sampleId, String fileName, Integer curChan) {
|
||||
Result result = new Result();
|
||||
Map<String, Object> map = new HashMap<>();
|
||||
System.loadLibrary("GammaAnaly");
|
||||
Cache<String, PHDFile> phdCache = localCache.getPHDCache();
|
||||
PHDFile phd = phdCache.getIfPresent(fileName);
|
||||
|
@ -317,7 +350,8 @@ public class GammaServiceImpl implements IGammaService {
|
|||
return result;
|
||||
}
|
||||
// 备份原来的峰列表
|
||||
List<PeakInfo> vOriPeaks = phd.getVPeak();
|
||||
List<PeakInfo> vOriPeaks = new LinkedList<>();
|
||||
vOriPeaks.addAll(phd.getVPeak());
|
||||
List<Double> peakCentroid = new LinkedList<>();
|
||||
List<Double> fwhmc = new LinkedList<>();
|
||||
List<Double> tail = new LinkedList<>();
|
||||
|
@ -380,6 +414,36 @@ public class GammaServiceImpl implements IGammaService {
|
|||
insertInput.curChan = curChan;
|
||||
insertInput.vCount = m_vCount;
|
||||
StructInsertOutput structInsertOutput = CalValuesHandler.insertPeaks(insertInput);
|
||||
List<Integer> indexs = new LinkedList<>();
|
||||
if (structInsertOutput.vIdx.size()>0){
|
||||
for (int j=0; j<structInsertOutput.vIdx.size(); j++) {
|
||||
int a = 0;
|
||||
while (a < phd.getVPeak().size() && structInsertOutput.peakCentroid.get(j) > phd.getVPeak().get(a).peakCentroid){
|
||||
a++;
|
||||
}
|
||||
PeakInfo peak = new PeakInfo();
|
||||
peak.left = structInsertOutput.vLeft.get(j).intValue();;
|
||||
peak.right = structInsertOutput.vRight.get(j).intValue();;
|
||||
peak.peakCentroid = structInsertOutput.peakCentroid.get(j);
|
||||
peak.energy = structInsertOutput.energy.get(j);
|
||||
peak.area = structInsertOutput.area.get(j);
|
||||
Double sensitivity = String.valueOf(structInsertOutput.sensitivity.get(j)).equalsIgnoreCase("nan") ? 0.0 : structInsertOutput.sensitivity.get(j);
|
||||
peak.sensitivity = sensitivity;
|
||||
peak.fwhm = structInsertOutput.fwhm.get(j);
|
||||
peak.fwhmc = structInsertOutput.fwhmc.get(j);
|
||||
peak.stepRatio = structInsertOutput.stepRatio.get(j);
|
||||
peak.tail = structInsertOutput.tail.get(j);
|
||||
peak.tailAlpha = structInsertOutput.tailAlpha.get(j);
|
||||
peak.upperTail = structInsertOutput.upperTail.get(j);
|
||||
peak.upperTailAlpha = structInsertOutput.upperTailAlpha.get(j);
|
||||
peak.efficiency = structInsertOutput.efficiency.get(j);
|
||||
peak.BWWidthChan = 0;
|
||||
peak.recoilBetaChan = String.valueOf(structInsertOutput.recoilBetaChan.get(j));
|
||||
peak.recoilDeltaChan = String.valueOf(structInsertOutput.recoilDeltaChan.get(j));
|
||||
phd.getVPeak().add(a, peak);
|
||||
indexs.add(a);
|
||||
}
|
||||
}
|
||||
int peakIdx = structInsertOutput.vIdx.get(0).intValue();
|
||||
int left = structInsertOutput.vLeft.get(0).intValue();
|
||||
int right = structInsertOutput.vRight.get(0).intValue();
|
||||
|
@ -392,68 +456,102 @@ public class GammaServiceImpl implements IGammaService {
|
|||
} else if(peak.peakCentroid > left) {
|
||||
vIdx.add(ii);
|
||||
}
|
||||
++ii;
|
||||
}
|
||||
|
||||
if (vIdx.size()>0){
|
||||
for (Integer index:vIdx) {
|
||||
PeakInfo peak = new PeakInfo();
|
||||
peak.peakCentroid = structInsertOutput.peakCentroid.get(0);
|
||||
peak.energy = structInsertOutput.energy.get(0);
|
||||
peak.area = structInsertOutput.area.get(0);
|
||||
Double sensitivity = String.valueOf(structInsertOutput.sensitivity.get(0)).equalsIgnoreCase("nan") ? 0.0 : structInsertOutput.sensitivity.get(0);
|
||||
peak.sensitivity = sensitivity;
|
||||
peak.fwhm = structInsertOutput.fwhm.get(0);
|
||||
peak.fwhmc = structInsertOutput.fwhmc.get(0);
|
||||
peak.stepRatio = structInsertOutput.stepRatio.get(0);
|
||||
peak.tail = structInsertOutput.tail.get(0);
|
||||
peak.tailAlpha = structInsertOutput.tailAlpha.get(0);
|
||||
peak.upperTail = structInsertOutput.upperTail.get(0);
|
||||
peak.upperTailAlpha = structInsertOutput.upperTailAlpha.get(0);
|
||||
peak.efficiency = structInsertOutput.efficiency.get(0);
|
||||
peak.BWWidthChan = 0;
|
||||
peak.recoilBetaChan = String.valueOf(structInsertOutput.recoilBetaChan.get(0));
|
||||
peak.recoilDeltaChan = String.valueOf(structInsertOutput.recoilDeltaChan.get(0));
|
||||
phd.getVPeak().add(index, peak);
|
||||
}
|
||||
ii++;
|
||||
}
|
||||
|
||||
List<TablePeaks> tablePeaksList = gammaFileUtil.FitPeakBaseLine(phd, vIdx);
|
||||
// if(CollectionUtils.isNotEmpty(tablePeaksList)) {
|
||||
map.put("tablePeaksList", tablePeaksList);
|
||||
map.put("oldPeaks", vOriPeaks);
|
||||
result.setSuccess(true);
|
||||
result.setResult(map);
|
||||
return result;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Result acceptResults(String fileName, boolean accept, List<PeakInfo> oldPeak) {
|
||||
Result result = new Result();
|
||||
Cache<String, PHDFile> phdCache = localCache.getPHDCache();
|
||||
PHDFile phd = phdCache.getIfPresent(fileName);
|
||||
if (Objects.isNull(phd)){
|
||||
result.error500("请先选择解析文件!");
|
||||
return result;
|
||||
}
|
||||
List<Long> m_vCount = new LinkedList<>();
|
||||
long m_nCount = phd.getSpec().getNum_g_channel();
|
||||
long m_nSChan = phd.getSpec().getBegin_channel();
|
||||
|
||||
// 确保绘制曲线时所有谱都是从1道开始
|
||||
int i = 0;
|
||||
if(m_nSChan == 0){
|
||||
i = 1;
|
||||
}
|
||||
for(; i<m_nCount; ++i) {
|
||||
m_vCount.add(phd.getSpec().getCounts().get(i));
|
||||
}
|
||||
if(m_nSChan == 0) {
|
||||
m_vCount.add(0L);
|
||||
}
|
||||
HashMap<String, Object> map = new HashMap<>();
|
||||
//根据boolean 决定是否保留本次数据 如果保留则不需要操作vPeak 并重新拟合线
|
||||
if (accept){
|
||||
//算法有问题 --- 需要等等处理
|
||||
// gammaFileUtil.PeaksChanged(phd);
|
||||
|
||||
// List<PeakInfo> list = gammaFileUtil.vAddPeaks(m_data->m_phd->vPeak.begin()+vIdx.first(), m_data->m_phd->vPeak.begin()+vIdx.last()+1);
|
||||
// m_chart->AddDatas(m_data->PeakSet(vAddPeaks, m_data->m_phd->vBase, Qt::white));
|
||||
//
|
||||
// if(MyMessageBox::Ensure("Accept results?") > 0)
|
||||
// {
|
||||
// m_data->m_phd->baseCtrls.Baseline = m_data->m_phd->vBase;
|
||||
// m_baseCtrl.Baseline = m_data->m_phd->vBase;
|
||||
//
|
||||
// InitTable();
|
||||
//
|
||||
// m_chart->RemoveData("Peak");
|
||||
// m_chart->RemoveData("BaseLine");
|
||||
// m_chart->AddDatas(m_data->Channel_Peak());
|
||||
// m_chart->AddData(m_data->Channel_BaseLine());
|
||||
// m_barChart->SetPointData(m_data->Differance(m_data->m_phd->vPeak));
|
||||
// m_barChart->SetRangeX(m_chart->Range().minX, m_chart->Range().maxX);
|
||||
//
|
||||
// m_pMainWin->UpdateChart(true, false);
|
||||
// }else {
|
||||
// m_data->m_phd->vBase = m_data->m_phd->baseCtrls.Baseline;
|
||||
// m_data->m_phd->vPeak = vOriPeaks;
|
||||
//
|
||||
// m_chart->RemoveData("Peak");
|
||||
// m_chart->AddDatas(m_data->Channel_Peak());
|
||||
// }
|
||||
// }
|
||||
// else {
|
||||
// m_data->m_phd->vPeak.erase(m_data->m_phd->vPeak.begin() + peakIdx);
|
||||
// }
|
||||
|
||||
List<PeakInfo> vPeak = gammaFileUtil.InitPeakTable(phd.getVPeak());
|
||||
map.put("table", vPeak);
|
||||
List<ChartData> channelPeak = gammaFileUtil.Channel_Peak(phd, m_nCount);
|
||||
map.put("channelPeakChart", channelPeak);
|
||||
ChartData channelBaseLine = gammaFileUtil.Channel_BaseLine(phd, m_nCount);
|
||||
map.put("channelBaseLineChart", channelBaseLine);
|
||||
List<SeriseData> differance = gammaFileUtil.Differance(phd, phd.getVPeak(), m_vCount, m_nCount);
|
||||
map.put("barChart", differance);
|
||||
gammaFileUtil.UpdateChart(phd, map);
|
||||
} else {//如果不保留 根据下标移除对应的vPeak数据
|
||||
if (CollectionUtils.isNotEmpty(oldPeak)) {
|
||||
phd.getVPeak().clear();
|
||||
phd.setVPeak(oldPeak);
|
||||
map.put("table", phd.getVPeak());
|
||||
List<ChartData> channelPeak = gammaFileUtil.Channel_Peak(phd, m_nCount);
|
||||
map.put("channelPeakChart", channelPeak);
|
||||
}
|
||||
}
|
||||
result.setSuccess(true);
|
||||
result.setResult(tablePeaksList);
|
||||
result.setResult(map);
|
||||
return result;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Result deletePeak(String fileName, int curRow) {
|
||||
Result result = new Result();
|
||||
Cache<String, PHDFile> phdCache = localCache.getPHDCache();
|
||||
PHDFile phd = phdCache.getIfPresent(fileName);
|
||||
if (Objects.isNull(phd)){
|
||||
result.error500("请先选择解析文件!");
|
||||
return result;
|
||||
}
|
||||
long m_nCount = phd.getSpec().getNum_g_channel();
|
||||
HashMap<String, Object> map = new HashMap<>();
|
||||
int peakNum = phd.getVPeak().size();
|
||||
if(peakNum < 1) {
|
||||
result.error500("No peak to delete.");
|
||||
return result;
|
||||
}
|
||||
if(curRow >= 0 && curRow < peakNum) {
|
||||
phd.getVPeak().remove(curRow);
|
||||
|
||||
// gammaFileUtil.PeaksChanged(phd);
|
||||
for (int i=0;i<phd.getVPeak().size(); i++) {
|
||||
PeakInfo peakInfo = phd.getVPeak().get(i);
|
||||
peakInfo.index = i+1;
|
||||
}
|
||||
List<PeakInfo> vPeak = gammaFileUtil.InitPeakTable(phd.getVPeak());
|
||||
map.put("table", vPeak);
|
||||
List<ChartData> channelPeak = gammaFileUtil.Channel_Peak(phd, m_nCount);
|
||||
map.put("channelPeakChart", channelPeak);
|
||||
gammaFileUtil.UpdateChart(phd, map);
|
||||
}
|
||||
result.setSuccess(true);
|
||||
result.setResult(map);
|
||||
return result;
|
||||
}
|
||||
|
||||
|
@ -476,9 +574,9 @@ public class GammaServiceImpl implements IGammaService {
|
|||
//查询当前用户关联的核素信息
|
||||
List<String> userLib = new LinkedList<>();
|
||||
//从postgreSql中获取当前用户关注的核素信息 如果当前用户没有 则返回管理员的
|
||||
userLib = defaultNuclideSpectrumService.findNuclidesByUserName(userName, phd.getHeader().getSystem_type());
|
||||
userLib = defaultNuclideSpectrumService.findNuclidesByUserName(userName, phd.getHeader().getSystem_type().toUpperCase());
|
||||
if (CollectionUtils.isEmpty(userLib)){
|
||||
userLib = defaultNuclideSpectrumService.findNuclidesByUserName("admin", phd.getHeader().getSystem_type());
|
||||
userLib = defaultNuclideSpectrumService.findNuclidesByUserName("admin", phd.getHeader().getSystem_type().toUpperCase());
|
||||
}
|
||||
double min = phd.getVPeak().get(index).energy - phd.getSetting().getEnergyTolerance();
|
||||
double max = phd.getVPeak().get(index).energy + phd.getSetting().getEnergyTolerance();
|
||||
|
@ -495,6 +593,161 @@ public class GammaServiceImpl implements IGammaService {
|
|||
return result;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Result addNuclide(Integer curRow, String nuclideName, String fileName, List<String> list_identify, HttpServletRequest request) {
|
||||
Result result = new Result();
|
||||
Map<String, Object> map = new HashMap<>();
|
||||
String userName = JwtUtil.getUserNameByToken(request);
|
||||
Cache<String, PHDFile> phdCache = localCache.getPHDCache();
|
||||
PHDFile phd = phdCache.getIfPresent(fileName);
|
||||
if (Objects.isNull(phd)){
|
||||
result.error500("请先选择解析文件!");
|
||||
return result;
|
||||
}
|
||||
//获取需要新增的核素名称
|
||||
if(StringUtils.isBlank(nuclideName)) {
|
||||
result.error500("核素名称不能为空!");
|
||||
return result;
|
||||
}
|
||||
//判断当前用户的核素列表是否有核素信息 如果不为空就返回 不进行改变
|
||||
if(list_identify.indexOf(nuclideName)>0) {
|
||||
return result;
|
||||
}
|
||||
//用户当前的核素信息新增核素名称
|
||||
list_identify.add(nuclideName);
|
||||
//根据要进行修改的列的数据下标 操作Vpeak数据
|
||||
phd.getVPeak().get(curRow).nuclides.add(nuclideName);
|
||||
//查询当前用户所关心的核素名称
|
||||
//查询当前用户关联的核素信息
|
||||
List<String> userLib = new LinkedList<>();
|
||||
//从postgreSql中获取当前用户关注的核素信息 如果当前用户没有 则返回管理员的
|
||||
userLib = defaultNuclideSpectrumService.findNuclidesByUserName(userName, phd.getHeader().getSystem_type().toUpperCase());
|
||||
if (CollectionUtils.isEmpty(userLib)){
|
||||
userLib = defaultNuclideSpectrumService.findNuclidesByUserName("admin", phd.getHeader().getSystem_type().toUpperCase());
|
||||
}
|
||||
Map<String, NuclideLines> mapNucLines = gammaFileUtil.GetNuclideLines(userLib);
|
||||
//查询出核素信息
|
||||
NuclideLines it_line = mapNucLines.get(nuclideName);
|
||||
//如果核素信息不存在返回
|
||||
if(Objects.isNull(it_line)){
|
||||
return result;
|
||||
}
|
||||
List<Integer> vPeakIdx = new LinkedList<>(); // 从 0 开始
|
||||
int t_idx = 0;
|
||||
for (PeakInfo peak: phd.getVPeak()) {
|
||||
if(peak.nuclides.contains(nuclideName)) {
|
||||
vPeakIdx.add(t_idx);
|
||||
}
|
||||
t_idx++;
|
||||
}
|
||||
gammaFileUtil.CalcNuclideMDA(phd, it_line, nuclideName, vPeakIdx);
|
||||
map.put("identify", list_identify);
|
||||
List<PeakInfo> vPeak = gammaFileUtil.InitPeakTable(phd.getVPeak());
|
||||
map.put("table", vPeak);
|
||||
result.setSuccess(true);
|
||||
result.setResult(map);
|
||||
return result;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Result deleteNuclide(Integer curRow, String nuclideName, String fileName, List<String> list_identify) {
|
||||
Result result = new Result();
|
||||
Map<String, Object> map = new HashMap<>();
|
||||
Cache<String, PHDFile> phdCache = localCache.getPHDCache();
|
||||
PHDFile phd = phdCache.getIfPresent(fileName);
|
||||
if (Objects.isNull(phd)){
|
||||
result.error500("请先选择解析文件!");
|
||||
return result;
|
||||
}
|
||||
int index = list_identify.indexOf(nuclideName);
|
||||
if(index>0) {
|
||||
// 如果所选的行下标小于0 或者 超出界限 则不进行处理
|
||||
if(curRow < 0 || curRow >= phd.getVPeak().size()) {
|
||||
return result;
|
||||
}
|
||||
// 更新峰信息列表和表格
|
||||
//根据核素名称获取对应的下标并从list_identify,phd.getVPeak()移除
|
||||
list_identify.remove(index);
|
||||
int peakNuclIndex = phd.getVPeak().get(curRow).nuclides.indexOf(nuclideName);
|
||||
phd.getVPeak().get(curRow).nuclides.remove(peakNuclIndex);
|
||||
List<PeakInfo> vPeak = gammaFileUtil.InitPeakTable(phd.getVPeak());
|
||||
// 处理核素MDA、MDC
|
||||
gammaFileUtil.ReCalcMdaMdc(phd, nuclideName, curRow+1);
|
||||
map.put("identify", list_identify);
|
||||
map.put("table", vPeak);
|
||||
result.setSuccess(true);
|
||||
result.setResult(map);
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Result viewPeakComment(String fileName, int curRow) {
|
||||
Result result = new Result();
|
||||
Cache<String, PHDFile> phdCache = localCache.getPHDCache();
|
||||
PHDFile phd = phdCache.getIfPresent(fileName);
|
||||
if (Objects.isNull(phd)) {
|
||||
result.error500("请先选择解析文件!");
|
||||
return result;
|
||||
}
|
||||
if(curRow >= 0 && curRow < phd.getVPeak().size()) {
|
||||
result.setSuccess(true);
|
||||
result.setResult(phd.getVPeak().get(curRow).comments);
|
||||
} else {
|
||||
result.error500("请先选择要添加注释的峰!");
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Result addPeakComment(String fileName, int curRow, String comments) {
|
||||
Result result = new Result();
|
||||
Map<String, Object> map = new HashMap<>();
|
||||
Cache<String, PHDFile> phdCache = localCache.getPHDCache();
|
||||
PHDFile phd = phdCache.getIfPresent(fileName);
|
||||
if (Objects.isNull(phd)) {
|
||||
result.error500("请先选择解析文件!");
|
||||
return result;
|
||||
}
|
||||
if(curRow >= 0 && curRow < phd.getVPeak().size()) {
|
||||
phd.getVPeak().get(curRow).comments = comments;
|
||||
List<PeakInfo> vPeak = gammaFileUtil.InitPeakTable(phd.getVPeak());
|
||||
map.put("table", vPeak);
|
||||
result.setSuccess(true);
|
||||
result.setResult(map);
|
||||
} else {
|
||||
result.error500("请先选择要添加注释的峰!");
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Result viewGenralComment(String fileName) {
|
||||
Result result = new Result();
|
||||
Cache<String, PHDFile> phdCache = localCache.getPHDCache();
|
||||
PHDFile phd = phdCache.getIfPresent(fileName);
|
||||
if (Objects.isNull(phd)) {
|
||||
result.error500("请先选择解析文件!");
|
||||
return result;
|
||||
}
|
||||
result.setSuccess(true);
|
||||
result.setResult(phd.getTotalCmt());
|
||||
return result;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Result addGeneralComment(String fileName, String comments) {
|
||||
Result result = new Result();
|
||||
Cache<String, PHDFile> phdCache = localCache.getPHDCache();
|
||||
PHDFile phd = phdCache.getIfPresent(fileName);
|
||||
if (Objects.isNull(phd)) {
|
||||
result.error500("请先选择解析文件!");
|
||||
return result;
|
||||
}
|
||||
phd.setTotalCmt(comments);
|
||||
return Result.ok();
|
||||
}
|
||||
|
||||
@Override
|
||||
public Result nuclideReview(Integer sampleId, String fileName, Double channel, HttpServletRequest request) {
|
||||
Result result = new Result();
|
||||
|
@ -516,9 +769,9 @@ public class GammaServiceImpl implements IGammaService {
|
|||
//查询当前用户关联的核素信息
|
||||
List<String> nuclides = new LinkedList<>();
|
||||
//从postgreSql中获取当前用户关注的核素信息 如果当前用户没有 则返回管理员的
|
||||
nuclides = defaultNuclideSpectrumService.findNuclidesByUserName(userName, phd.getHeader().getSystem_type());
|
||||
nuclides = defaultNuclideSpectrumService.findNuclidesByUserName(userName, phd.getHeader().getSystem_type().toUpperCase());
|
||||
if (CollectionUtils.isEmpty(nuclides)){
|
||||
nuclides = defaultNuclideSpectrumService.findNuclidesByUserName("admin", phd.getHeader().getSystem_type());
|
||||
nuclides = defaultNuclideSpectrumService.findNuclidesByUserName("admin", phd.getHeader().getSystem_type().toUpperCase());
|
||||
}
|
||||
List<String> nuclideList = gammaFileUtil.InitList(bigDecimal.doubleValue(), 0.5, nuclides);
|
||||
if(CollectionUtils.isNotEmpty(nuclideList)) {
|
||||
|
@ -572,9 +825,9 @@ public class GammaServiceImpl implements IGammaService {
|
|||
//查询当前用户关联的核素信息
|
||||
List<String> nuclides = new LinkedList<>();
|
||||
//从postgreSql中获取当前用户关注的核素信息 如果当前用户没有 则返回管理员的
|
||||
nuclides = defaultNuclideSpectrumService.findNuclidesByUserName(userName, phd.getHeader().getSystem_type());
|
||||
nuclides = defaultNuclideSpectrumService.findNuclidesByUserName(userName, phd.getHeader().getSystem_type().toUpperCase());
|
||||
if (CollectionUtils.isEmpty(nuclides)){
|
||||
nuclides = defaultNuclideSpectrumService.findNuclidesByUserName("admin", phd.getHeader().getSystem_type());
|
||||
nuclides = defaultNuclideSpectrumService.findNuclidesByUserName("admin", phd.getHeader().getSystem_type().toUpperCase());
|
||||
}
|
||||
List<String> nuclideList = gammaFileUtil.InitList(bigDecimal.doubleValue(), tolerance, nuclides);
|
||||
if(CollectionUtils.isNotEmpty(nuclideList)) {
|
||||
|
@ -616,6 +869,7 @@ public class GammaServiceImpl implements IGammaService {
|
|||
@Override
|
||||
public Result ZeroTimeAnalyse(String nuclide1, String nuclide2, Double product1, Double product2, String target, String energyTFH, String date, String time) {
|
||||
Result result = new Result();
|
||||
Map<String, Object> map = new HashMap<>();
|
||||
//建立数据库连接
|
||||
Connection conn = dbUtil.openDB();
|
||||
Statement statement = null;
|
||||
|
@ -632,8 +886,8 @@ public class GammaServiceImpl implements IGammaService {
|
|||
return result;
|
||||
}
|
||||
//根据核素名称查询halflife值
|
||||
double halflife1 = fission_changed(nuclide1);
|
||||
double halflife2 = fission_changed(nuclide2);
|
||||
long halflife1 = fission_changed(nuclide1);
|
||||
long halflife2 = fission_changed(nuclide2);
|
||||
//读取数据库获取计算值
|
||||
double FY_N1_C = 0, FY_N1_I = 0, FY_N2_C = 0, FY_N2_I = 0;
|
||||
String N1_A = "", N2_A = "";
|
||||
|
@ -663,33 +917,36 @@ public class GammaServiceImpl implements IGammaService {
|
|||
while(query4.next()){
|
||||
FY_N2_I = query4.getDouble("FY");
|
||||
}
|
||||
map.put("FY_N1_C", FY_N1_C);
|
||||
map.put("FY_N2_C", FY_N2_C);
|
||||
map.put("FY_N1_I", FY_N1_I);
|
||||
map.put("FY_N2_I", FY_N2_I);
|
||||
map.put("halflife1", halflife1);
|
||||
map.put("halflife2", halflife2);
|
||||
if(FY_N1_C > 0 && FY_N2_C > 0 && halflife1 != halflife2) {
|
||||
// Zero Time of the Nuclides
|
||||
double lamada_n1 = Math.log(2) / halflife1;
|
||||
double lamada_n2 = Math.log(2) / halflife2;
|
||||
double temp = 1.0 / (lamada_n2 - lamada_n1);
|
||||
double t;
|
||||
if(N1_A == N2_A)
|
||||
{
|
||||
if(N1_A == N2_A) {
|
||||
t = -temp * Math.log(((1/lamada_n2)*(active2/active1)-temp) / ((FY_N2_I/(FY_N1_C*lamada_n1))-temp));
|
||||
}
|
||||
else
|
||||
{
|
||||
} else {
|
||||
t = temp * Math.log((active1/active2) * (FY_N2_C/FY_N1_C) * (lamada_n2/lamada_n1));
|
||||
}
|
||||
|
||||
// date of Zero Time
|
||||
Date datetime = DateUtils.parseDate(date + " " + time, "yyyy-MM-dd HH:mm:ss");
|
||||
Date oriDate = DateUtils.parseDate("1970-01-01 00:00:00", "yyyy-MM-dd HH:mm:ss");
|
||||
double second = (datetime.getTime()/1000 - oriDate.getTime()/1000) - t - 8*60*60 + 9;
|
||||
double second = (datetime.getTime()/1000 - oriDate.getTime()/1000) - t - 8*60*60;
|
||||
|
||||
Date resultDateTime = DateUtils.getDate((long)second*1000);
|
||||
|
||||
String zerotime = DateUtils.formatDate(resultDateTime, "yyyy-MM-dd HH:mm:ss");
|
||||
result.setResult(zerotime);
|
||||
} else{
|
||||
result.setResult("");
|
||||
|
||||
map.put("zeroTime", zerotime);
|
||||
}
|
||||
result.setResult(map);
|
||||
} catch (SQLException e) {
|
||||
throw new RuntimeException(e);
|
||||
} catch (ParseException e) {
|
||||
|
@ -701,8 +958,8 @@ public class GammaServiceImpl implements IGammaService {
|
|||
return result;
|
||||
}
|
||||
|
||||
public double fission_changed(String name){
|
||||
double halflife = 0;
|
||||
public long fission_changed(String name) {
|
||||
long halflife = 0;
|
||||
//建立数据库连接
|
||||
Connection conn = dbUtil.openDB();
|
||||
Statement statement = null;
|
||||
|
@ -712,7 +969,7 @@ public class GammaServiceImpl implements IGammaService {
|
|||
String sql = "SELECT HALFLIFE FROM libdata WHERE NAME = '"+name+"'";
|
||||
ResultSet rs = statement.executeQuery(sql);
|
||||
while (rs.next()){
|
||||
halflife = 24 * 60 * 60 * rs.getDouble("HALFLIFE");
|
||||
halflife = (long) (24 * 60 * 60 * rs.getDouble("HALFLIFE"));
|
||||
}
|
||||
} catch (SQLException e) {
|
||||
throw new RuntimeException(e);
|
||||
|
@ -839,7 +1096,6 @@ public class GammaServiceImpl implements IGammaService {
|
|||
public Result changeDataEnergy(List<Double> m_vCurCentroid, List<Double> m_vCurEnergy, List<Double> m_vCurUncert, ParameterInfo m_curParam, Integer sampleId, String fileName) {
|
||||
Result result = new Result();
|
||||
Map<String, Object> map = new HashMap<>();
|
||||
System.loadLibrary("GammaAnaly");
|
||||
Cache<String, PHDFile> phdCache = localCache.getPHDCache();
|
||||
PHDFile phd = phdCache.getIfPresent(fileName);
|
||||
if (Objects.isNull(phd)){
|
||||
|
@ -852,7 +1108,7 @@ public class GammaServiceImpl implements IGammaService {
|
|||
return result;
|
||||
}
|
||||
|
||||
private void DataChangeEnergy(List<Double> m_vCurCentroid, List<Double> m_vCurEnergy, List<Double> m_vCurUncert, ParameterInfo m_curParam, PHDFile phd, Map<String, Object> map){
|
||||
private void DataChangeEnergy(List<Double> m_vCurCentroid, List<Double> m_vCurEnergy, List<Double> m_vCurUncert, ParameterInfo m_curParam, PHDFile phd, Map<String, Object> map) {
|
||||
System.loadLibrary("GammaAnaly");
|
||||
if(m_vCurEnergy.size() < 1) {
|
||||
return;
|
||||
|
@ -986,7 +1242,7 @@ public class GammaServiceImpl implements IGammaService {
|
|||
return result;
|
||||
}
|
||||
|
||||
public void DataChangeResolution(List<Double> m_vCurReso, List<Double> m_vCurEnergy, List<Double> m_vCurUncert, ParameterInfo m_curParam, PHDFile phd, Map<String, Object> map){
|
||||
public void DataChangeResolution(List<Double> m_vCurReso, List<Double> m_vCurEnergy, List<Double> m_vCurUncert, ParameterInfo m_curParam, PHDFile phd, Map<String, Object> map) {
|
||||
System.loadLibrary("GammaAnaly");
|
||||
m_curParam.setP(CalValuesHandler.calFitPara("Cal_Resolution", 4, m_vCurEnergy, m_vCurReso, m_vCurUncert));
|
||||
map.put("uncert", m_vCurUncert);
|
||||
|
@ -1004,8 +1260,7 @@ public class GammaServiceImpl implements IGammaService {
|
|||
resolutionData.setDelta(String.format("%.3f", delta));
|
||||
resolutionDataList.add(resolutionData);
|
||||
}
|
||||
}
|
||||
else {
|
||||
} else {
|
||||
for(int i=0; i<m_vCurEnergy.size(); ++i) {
|
||||
ResolutionData resolutionData = new ResolutionData();
|
||||
resolutionData.setEnergy(String.format("%.3f", m_vCurEnergy.get(i)));
|
||||
|
@ -1119,7 +1374,7 @@ public class GammaServiceImpl implements IGammaService {
|
|||
return result;
|
||||
}
|
||||
|
||||
public void DataChangeEfficiency(List<Double> m_vCurEffi, List<Double> m_vCurEnergy, List<Double> m_vCurUncert, ParameterInfo m_curParam, Integer funcId, PHDFile phd, Map<String, Object> map){
|
||||
public void DataChangeEfficiency(List<Double> m_vCurEffi, List<Double> m_vCurEnergy, List<Double> m_vCurUncert, ParameterInfo m_curParam, Integer funcId, PHDFile phd, Map<String, Object> map) {
|
||||
System.loadLibrary("GammaAnaly");
|
||||
m_curParam.setP(CalValuesHandler.calFitPara("Cal_Efficiency", funcId, m_vCurEnergy, m_vCurEffi, m_vCurUncert));
|
||||
map.put("uncert", m_vCurUncert);
|
||||
|
@ -1200,9 +1455,9 @@ public class GammaServiceImpl implements IGammaService {
|
|||
double max = editEnergyDou + err;
|
||||
if (libraryName.equals("UserLibrary")){
|
||||
//从postgreSql中获取当前用户关注的核素信息 如果当前用户没有 则返回管理员的
|
||||
nuclides = defaultNuclideSpectrumService.findNuclidesByUserName(userName, phd.getHeader().getSystem_type());
|
||||
nuclides = defaultNuclideSpectrumService.findNuclidesByUserName(userName, phd.getHeader().getSystem_type().toUpperCase());
|
||||
if (CollectionUtils.isEmpty(nuclides)){
|
||||
nuclides = defaultNuclideSpectrumService.findNuclidesByUserName("admin", phd.getHeader().getSystem_type());
|
||||
nuclides = defaultNuclideSpectrumService.findNuclidesByUserName("admin", phd.getHeader().getSystem_type().toUpperCase());
|
||||
}
|
||||
nuclides = spectrumAnalysisMapper.getUserNuclideNames(nuclides, min, max);
|
||||
}else if (libraryName.equals("FULLLibrary")){
|
||||
|
@ -1210,12 +1465,12 @@ public class GammaServiceImpl implements IGammaService {
|
|||
}else if (libraryName.equals("RelevantLibrary")){
|
||||
nuclides = spectrumAnalysisMapper.getRelevantNuclideNames(min, max);
|
||||
}
|
||||
}else {
|
||||
} else {
|
||||
if(libraryName.equals("UserLibrary")) {
|
||||
//从postgreSql中获取当前用户关注的核素信息 如果当前用户没有 则返回管理员的
|
||||
nuclides = defaultNuclideSpectrumService.findNuclidesByUserName(userName, phd.getHeader().getSystem_type());
|
||||
nuclides = defaultNuclideSpectrumService.findNuclidesByUserName(userName, phd.getHeader().getSystem_type().toUpperCase());
|
||||
if (CollectionUtils.isEmpty(nuclides)){
|
||||
nuclides = defaultNuclideSpectrumService.findNuclidesByUserName("admin", phd.getHeader().getSystem_type());
|
||||
nuclides = defaultNuclideSpectrumService.findNuclidesByUserName("admin", phd.getHeader().getSystem_type().toUpperCase());
|
||||
}
|
||||
} else if (libraryName.equals("FULLLibrary")){
|
||||
nuclides = spectrumAnalysisMapper.getNuclideNames("CONFIGURATION.GARDS_NUCL_LIB");
|
||||
|
@ -1250,9 +1505,9 @@ public class GammaServiceImpl implements IGammaService {
|
|||
return result;
|
||||
}
|
||||
List<String> nuclides = spectrumAnalysisMapper.getNuclideNames("CONFIGURATION.GARDS_NUCL_LIB");
|
||||
List<String> userNuclides = defaultNuclideSpectrumService.findNuclidesByUserName(userName, phd.getHeader().getSystem_type());
|
||||
List<String> userNuclides = defaultNuclideSpectrumService.findNuclidesByUserName(userName, phd.getHeader().getSystem_type().toUpperCase());
|
||||
if (CollectionUtils.isEmpty(userNuclides)){
|
||||
userNuclides = defaultNuclideSpectrumService.findNuclidesByUserName("admin", phd.getHeader().getSystem_type());
|
||||
userNuclides = defaultNuclideSpectrumService.findNuclidesByUserName("admin", phd.getHeader().getSystem_type().toUpperCase());
|
||||
}
|
||||
map.put("AllNuclides", nuclides);
|
||||
map.put("UserNuclides", userNuclides);
|
||||
|
@ -1261,6 +1516,30 @@ public class GammaServiceImpl implements IGammaService {
|
|||
return result;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Result saveUserLibrary(List<String> userLibraryName, String fileName, HttpServletRequest request) {
|
||||
Result result = new Result();
|
||||
String userName = JwtUtil.getUserNameByToken(request);
|
||||
Cache<String, PHDFile> phdCache = localCache.getPHDCache();
|
||||
PHDFile phd = phdCache.getIfPresent(fileName);
|
||||
if (Objects.isNull(phd)){
|
||||
result.error500("请先选择解析文件!");
|
||||
return result;
|
||||
}
|
||||
if (CollectionUtils.isEmpty(userLibraryName)) {
|
||||
result.error500("The user custom nuclide library can't be null!");
|
||||
return result;
|
||||
}
|
||||
userLibraryName = userLibraryName.stream().distinct().collect(Collectors.toList());
|
||||
boolean save = defaultNuclideSpectrumService.saveNuclidesByUserName(userLibraryName, userName, phd.getHeader().getSystem_type().toUpperCase());
|
||||
if (save) {
|
||||
result.success("修改成功!");
|
||||
} else {
|
||||
result.success("修改失败!");
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Result viewComment(Integer sampleId, String fileName) {
|
||||
Result result = new Result();
|
||||
|
@ -1468,7 +1747,7 @@ public class GammaServiceImpl implements IGammaService {
|
|||
|
||||
map.put("detectorId", phd.getHeader().getDetector_code());
|
||||
map.put("sampleGeometry", phd.getHeader().getSample_geometry());
|
||||
map.put("sampleType", phd.getHeader().getSystem_type());
|
||||
map.put("sampleType", phd.getHeader().getSystem_type().toUpperCase());
|
||||
map.put("samplingTime", String.format("%.2f", Sampling_Time)+" hours");
|
||||
map.put("decayTime", String.format("%.2f", Decay_Time)+" hours");
|
||||
map.put("acquisitionTime", String.format("%.2f", phd.getAcq().getAcquisition_real_time() / 3600.0)+" hours");
|
||||
|
|
|
@ -41,6 +41,7 @@ import javax.servlet.http.HttpServletResponse;
|
|||
import java.io.*;
|
||||
import java.text.ParseException;
|
||||
import java.util.*;
|
||||
import java.util.regex.Pattern;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
@Service("spectrumAnalysisService")
|
||||
|
@ -223,51 +224,86 @@ public class SpectrumAnalysisServiceImpl implements ISpectrumAnalysisService {
|
|||
}
|
||||
|
||||
@Override
|
||||
public Result getFilesBySampleFile(String sampleFileName, HttpServletRequest request) {
|
||||
public Result getFilesBySampleFile(String fileName, HttpServletRequest request) {
|
||||
Result result = new Result();
|
||||
List<Map<String, Object>> resultList = new LinkedList<>();
|
||||
String userName = JwtUtil.getUserNameByToken(request);
|
||||
String filePath = StringPool.SLASH + spectrumPathProperties.getUploadPath() + StringPool.SLASH +userName;
|
||||
//连接ftp 获取ftp文件数据
|
||||
FTPClient ftpClient = ftpUtil.LoginFTP();
|
||||
if (Objects.isNull(ftpClient)){
|
||||
result.error500("ftp连接失败");
|
||||
return result;
|
||||
String sampleRx = "[a-zA-Z]{3}[0-9]{2}_[0-9]{3}-[0-9]{8}_[0-9]{4}_S_(FULL_|PREL_)\\d+\\.PHD";
|
||||
Pattern regexPattern = Pattern.compile(sampleRx);
|
||||
String sampleRx1 = "[a-zA-Z]{3}[0-9]{2}_[0-9]{3}-[0-9]{8}_[0-9]{4}_S_(FULL_|PREL_)\\d+\\.\\d+\\.PHD";
|
||||
Pattern regexPattern1 = Pattern.compile(sampleRx1);
|
||||
//判断传递的文件名称是否包含,
|
||||
List<String> matchFileNames = new LinkedList<>();
|
||||
if (fileName.contains(StringPool.COMMA)) {
|
||||
matchFileNames = Arrays.asList(fileName.split(StringPool.COMMA));
|
||||
} else {
|
||||
matchFileNames.add(fileName);
|
||||
}
|
||||
InputStream inputStream = null;
|
||||
try {
|
||||
//切换被动模式
|
||||
ftpClient.enterLocalPassiveMode();
|
||||
ftpClient.setFileType(FTPClient.BINARY_FILE_TYPE);
|
||||
// 设置编码,当文件中存在中文且上传后文件乱码时可使用此配置项
|
||||
ftpClient.setControlEncoding("UTF-8");
|
||||
ftpClient.setFileTransferMode(FTPClient.STREAM_TRANSFER_MODE);
|
||||
//切换工作路径
|
||||
ftpClient.changeWorkingDirectory(filePath);
|
||||
//获取路径下所有的ftp文件信息
|
||||
List<FTPFile> ftpFiles = Arrays.asList(ftpClient.listFiles());
|
||||
//获取sampleFileName
|
||||
List<FTPFile> sampleFiles = ftpFiles.stream().filter(item -> item.getName().equals(sampleFileName)).collect(Collectors.toList());
|
||||
FTPFile sampleFile = sampleFiles.get(0);
|
||||
//解析sampleFile
|
||||
inputStream = ftpClient.retrieveFileStream(sampleFile.getName());
|
||||
//声明一个临时文件
|
||||
File file = File.createTempFile("tmp", null);
|
||||
//将ftp文件的输入流复制给临时文件
|
||||
FileUtils.copyInputStreamToFile(inputStream, file);
|
||||
|
||||
phdFileUtil.getFileData(file.getAbsolutePath());
|
||||
} catch (IOException e) {
|
||||
throw new RuntimeException(e);
|
||||
} finally {
|
||||
try {
|
||||
ftpClient.disconnect();
|
||||
inputStream.close();
|
||||
} catch (IOException e) {
|
||||
throw new RuntimeException(e);
|
||||
if (CollectionUtils.isNotEmpty(matchFileNames)) {
|
||||
for (String matchFileName :matchFileNames) {
|
||||
Map<String, Object> map =new HashMap<>();
|
||||
//判断sample文件名称是否匹配正则表达式 如果满足 则查询出对应的文件信息
|
||||
if ( regexPattern.matcher(matchFileName).find() || regexPattern1.matcher(matchFileName).find() ){
|
||||
//查询sampleFile文件内容信息 获取文件内容 获取大致的gas det文件名称
|
||||
Map<String, String> fileData = phdFileUtil.getFileData(filePath, matchFileName);
|
||||
if (CollectionUtils.isEmpty(fileData)) {
|
||||
result.error500("ftp文件查询失败");
|
||||
return result;
|
||||
}
|
||||
String sampleFileName = fileData.get("measurementName");
|
||||
String sampleSystemType = fileData.get("sampleSystemType");
|
||||
if (sampleSystemType.equals(SystemType.BETA.getType())) {
|
||||
//加载并获取当前路径下所有的文件名称并进行名称格式化 仅需要格式化和sample文件同一个台站 名称格式化为最终名称
|
||||
List<String> fileNames = phdFileUtil.FileNameByStandardForm(filePath, sampleFileName);
|
||||
//匹配获取QCFile
|
||||
boolean qcStatus = true;
|
||||
String qcphd = phdFileUtil.GetQCPHD(sampleFileName, fileNames);
|
||||
if (StringUtils.isBlank(qcphd)) {
|
||||
qcphd = sampleFileName.substring(0, 23)+"_Q.PHD";
|
||||
qcStatus = false;
|
||||
}
|
||||
//匹配gasFile
|
||||
boolean gasStatus = false;
|
||||
String gasFileName = fileData.get("gasFileName");
|
||||
String gasphd = phdFileUtil.GetMatchFile(gasFileName, fileNames, DataTypeAbbr.GASBKPHD.getType());
|
||||
//如果匹配到的文件名称不为空
|
||||
if (StringUtils.isNotBlank(gasphd)) {
|
||||
gasFileName = gasphd;
|
||||
gasStatus = true;
|
||||
}
|
||||
//匹配detFile
|
||||
boolean detStatus = false;
|
||||
String detaFileName = fileData.get("detaFileName");
|
||||
String detphd = phdFileUtil.GetMatchFile(detaFileName, fileNames, DataTypeAbbr.DETBKPHD.getType());
|
||||
if (StringUtils.isNotBlank(detphd)) {
|
||||
detaFileName = detphd;
|
||||
detStatus = true;
|
||||
}
|
||||
map.put("sampleFileName", sampleFileName);
|
||||
map.put("gasFileName", gasFileName);
|
||||
map.put("gasFileStatus", gasStatus);
|
||||
map.put("detFileName", detaFileName);
|
||||
map.put("detFileStatus", detStatus);
|
||||
map.put("qcFileName", qcphd);
|
||||
map.put("qcFileStatus", qcStatus);
|
||||
map.put("sampleSystemType", sampleSystemType);
|
||||
} else {
|
||||
map.put("sampleFileName", sampleFileName);
|
||||
map.put("gasFileName", "");
|
||||
map.put("detFileName", "");
|
||||
map.put("qcFileName", "");
|
||||
map.put("sampleSystemType", sampleSystemType);
|
||||
}
|
||||
}
|
||||
if (CollectionUtils.isNotEmpty(map)) {
|
||||
resultList.add(map);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return null;
|
||||
result.setSuccess(true);
|
||||
result.setResult(resultList);
|
||||
return result;
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -300,7 +336,7 @@ public class SpectrumAnalysisServiceImpl implements ISpectrumAnalysisService {
|
|||
String sampleFilePath = dbSpectrumFilePath.getSampleFilePath();
|
||||
filePath.add(sampleFilePath);
|
||||
GardsSampleData sample = spectrumAnalysisMapper.findSampleByFilePath(sampleFilePath);
|
||||
String pathName = sampleFilePath.substring(0, sampleFilePath.lastIndexOf(StringPool.SLASH));
|
||||
String pathName = StringPool.SLASH + spectrumPathProperties.getRootPath() + StringPool.SLASH + sampleFilePath.substring(0, sampleFilePath.lastIndexOf(StringPool.SLASH));
|
||||
String fileName = sampleFilePath.substring(sampleFilePath.lastIndexOf(StringPool.SLASH)+1);
|
||||
sampleMap = this.fenxi(pathName, fileName, xeDataList, sample.getSampleId(), sample.getStatus());
|
||||
resultMap.put("sample",sampleMap);
|
||||
|
@ -309,7 +345,7 @@ public class SpectrumAnalysisServiceImpl implements ISpectrumAnalysisService {
|
|||
String gasBgFilePath = dbSpectrumFilePath.getGasBgFilePath();
|
||||
filePath.add(gasBgFilePath);
|
||||
GardsSampleData gasBg = spectrumAnalysisMapper.findSampleByFilePath(gasBgFilePath);
|
||||
String pathName = gasBgFilePath.substring(0, gasBgFilePath.lastIndexOf(StringPool.SLASH));
|
||||
String pathName = StringPool.SLASH + spectrumPathProperties.getRootPath() + StringPool.SLASH + gasBgFilePath.substring(0, gasBgFilePath.lastIndexOf(StringPool.SLASH));
|
||||
String fileName = gasBgFilePath.substring(gasBgFilePath.lastIndexOf(StringPool.SLASH)+1);
|
||||
gasBgMap = this.fenxi(pathName, fileName, xeDataList, gasBg.getSampleId(), gasBg.getStatus());
|
||||
resultMap.put("gasBg",gasBgMap);
|
||||
|
@ -318,7 +354,7 @@ public class SpectrumAnalysisServiceImpl implements ISpectrumAnalysisService {
|
|||
String detBgFilePath = dbSpectrumFilePath.getDetBgFilePath();
|
||||
filePath.add(detBgFilePath);
|
||||
GardsSampleData detBg = spectrumAnalysisMapper.findSampleByFilePath(detBgFilePath);
|
||||
String pathName = detBgFilePath.substring(0, detBgFilePath.lastIndexOf(StringPool.SLASH));
|
||||
String pathName = StringPool.SLASH + spectrumPathProperties.getRootPath() + StringPool.SLASH + detBgFilePath.substring(0, detBgFilePath.lastIndexOf(StringPool.SLASH));
|
||||
String fileName = detBgFilePath.substring(detBgFilePath.lastIndexOf(StringPool.SLASH)+1);
|
||||
detBgMap = this.fenxi(pathName, fileName, xeDataList, detBg.getSampleId(), detBg.getStatus());
|
||||
resultMap.put("detBg",detBgMap);
|
||||
|
@ -329,7 +365,7 @@ public class SpectrumAnalysisServiceImpl implements ISpectrumAnalysisService {
|
|||
dbSpectrumFilePath.setQcFilePath(dbQcFilePath);
|
||||
filePath.add(dbQcFilePath);
|
||||
GardsSampleData qc = spectrumAnalysisMapper.findSampleByFilePath(dbQcFilePath);
|
||||
String pathName = dbQcFilePath.substring(0, dbQcFilePath.lastIndexOf(StringPool.SLASH));
|
||||
String pathName = StringPool.SLASH + spectrumPathProperties.getRootPath() + StringPool.SLASH + dbQcFilePath.substring(0, dbQcFilePath.lastIndexOf(StringPool.SLASH));
|
||||
String fileName = dbQcFilePath.substring(dbQcFilePath.lastIndexOf(StringPool.SLASH)+1);
|
||||
qcMap = this.fenxi(pathName, fileName, xeDataList, qc.getSampleId(), qc.getStatus());
|
||||
resultMap.put("qc",qcMap);
|
||||
|
@ -377,6 +413,7 @@ public class SpectrumAnalysisServiceImpl implements ISpectrumAnalysisService {
|
|||
resultMap.put("qc",qcMap);
|
||||
}
|
||||
phdFileUtil.getLightColor(sampleMap,gasBgMap,detBgMap,qcMap);
|
||||
resultMap.put("savedAnalysisResult", false);
|
||||
result.setSuccess(true);
|
||||
result.setResult(resultMap);
|
||||
return result;
|
||||
|
@ -396,12 +433,11 @@ public class SpectrumAnalysisServiceImpl implements ISpectrumAnalysisService {
|
|||
// 设置编码,当文件中存在中文且上传后文件乱码时可使用此配置项
|
||||
ftpClient.setControlEncoding("UTF-8");
|
||||
ftpClient.setFileTransferMode(FTPClient.STREAM_TRANSFER_MODE);
|
||||
pathName=StringPool.SLASH + spectrumPathProperties.getRootPath() + StringPool.SLASH + pathName;
|
||||
ftpClient.changeWorkingDirectory(pathName);
|
||||
List<FTPFile> ftpFiles = Arrays.asList(ftpClient.listFiles());
|
||||
ftpFiles=ftpFiles.stream().filter(item -> item.getName().equals(fileName)).collect(Collectors.toList());
|
||||
if (CollectionUtils.isEmpty(ftpFiles)){
|
||||
throw new RuntimeException("ftp下对应文件不存在");
|
||||
throw new RuntimeException("ftp查询文件内容失败");
|
||||
}
|
||||
FTPFile ftpFile = ftpFiles.get(0);
|
||||
if (Objects.nonNull(ftpFile)){
|
||||
|
@ -575,10 +611,7 @@ public class SpectrumAnalysisServiceImpl implements ISpectrumAnalysisService {
|
|||
}
|
||||
|
||||
@Override
|
||||
public Result viewRRR(String dbName, Integer sampleId,
|
||||
boolean sampleData, boolean GasBgData,
|
||||
boolean DetBgData, boolean QCData,
|
||||
boolean bGammaEnergyValid, boolean bBetaEnergyValid) {
|
||||
public Result viewRRR(String dbName, Integer sampleId, boolean sampleData, boolean GasBgData, boolean DetBgData, boolean QCData, boolean bGammaEnergyValid, boolean bBetaEnergyValid) {
|
||||
Result result = new Result();
|
||||
//GetAnalysisID sample_id
|
||||
if (dbName.equalsIgnoreCase("auto")){
|
||||
|
@ -912,7 +945,7 @@ public class SpectrumAnalysisServiceImpl implements ISpectrumAnalysisService {
|
|||
strBuffer.append("\n");
|
||||
|
||||
result.setSuccess(true);
|
||||
result.setResult(strBuffer);
|
||||
result.setResult(strBuffer.toString());
|
||||
return result;
|
||||
}
|
||||
|
||||
|
@ -1033,7 +1066,7 @@ public class SpectrumAnalysisServiceImpl implements ISpectrumAnalysisService {
|
|||
inputStream.close();
|
||||
}
|
||||
result.setSuccess(true);
|
||||
result.setResult(strBuffer);
|
||||
result.setResult(strBuffer.toString());
|
||||
}
|
||||
} catch (IOException e) {
|
||||
throw new RuntimeException(e);
|
||||
|
@ -1339,17 +1372,14 @@ public class SpectrumAnalysisServiceImpl implements ISpectrumAnalysisService {
|
|||
//加载dll工具库
|
||||
System.loadLibrary("ReadPHDFile");
|
||||
EnergySpectrumStruct struct = EnergySpectrumHandler.getSourceData(file.getAbsolutePath());
|
||||
|
||||
rlrDataValues.setSrid(struct.sample_ref_id);
|
||||
rlrDataValues.setColloct_start_date(struct.collection_start_date);
|
||||
rlrDataValues.setColloct_start_time(struct.collection_start_time);
|
||||
rlrDataValues.setColloct_stop_date(struct.collection_stop_date);
|
||||
rlrDataValues.setColloct_stop_time(struct.collection_stop_time);
|
||||
|
||||
rlrDataValues.setAcq_start_date(struct.acquisition_start_date);
|
||||
rlrDataValues.setAcq_start_time(struct.acquisition_start_time);
|
||||
rlrDataValues.setAcq_live_time(String.valueOf(struct.acquisition_live_time));
|
||||
|
||||
if (CollectionUtils.isNotEmpty(xeDataList)){
|
||||
List<GardsXeResultsSpectrum> xe131mDataList = xeDataList.stream().filter(item -> item.getNuclideName().equals("Xe131m")).collect(Collectors.toList());
|
||||
GardsXeResultsSpectrum xe131m = xe131mDataList.get(0);
|
||||
|
@ -1802,7 +1832,7 @@ public class SpectrumAnalysisServiceImpl implements ISpectrumAnalysisService {
|
|||
return result;
|
||||
}
|
||||
|
||||
private List<Long> handleHistogram(List<Long> hcounts, long bChannels, long gChannels, String type){
|
||||
private List<Long> handleHistogram(List<Long> hcounts, long bChannels, long gChannels, String type) {
|
||||
List<Long> projected_data_value = new LinkedList<>();
|
||||
if (type.equals("Vertical")){
|
||||
for (int i=0; i<gChannels; i++) {
|
||||
|
@ -2491,16 +2521,20 @@ public class SpectrumAnalysisServiceImpl implements ISpectrumAnalysisService {
|
|||
@Override
|
||||
public Result analyseCurrentSpectrum(String dbName, Integer sampleId, String sampleFileName, String gasFileName, String detFileName, HttpServletRequest request) {
|
||||
Result result = new Result();
|
||||
Map<String, Object> map = new HashMap<>();
|
||||
//声明一个实体类获取数据库中文件路径
|
||||
SpectrumFileRecord dbSpectrumFilePath = new SpectrumFileRecord();
|
||||
//如果sampleId不为空 说明数据来源数据库 查询出对应的文件路径
|
||||
if (Objects.nonNull(sampleId)){
|
||||
if (Objects.nonNull(sampleId) && StringUtils.isNotBlank(dbName)){
|
||||
if (dbName.equalsIgnoreCase("auto")){
|
||||
dbName = "RNAUTO.GARDS_ANALYSES";
|
||||
}else if (dbName.equalsIgnoreCase("man")){
|
||||
} else if (dbName.equalsIgnoreCase("man")){
|
||||
dbName = "RNMAN.GARDS_ANALYSES";
|
||||
}
|
||||
dbSpectrumFilePath = spectrumAnalysisMapper.getDBSpectrumFilePath(dbName, sampleId);
|
||||
} else {
|
||||
result.error500("Data load From DB need to pass in sampleId and dbName");
|
||||
return result;
|
||||
}
|
||||
//拼接ftp上传临时文件路径
|
||||
String path = StringPool.SLASH + spectrumPathProperties.getUploadPath() + StringPool.SLASH +JwtUtil.getUserNameByToken(request);
|
||||
|
@ -2516,13 +2550,49 @@ public class SpectrumAnalysisServiceImpl implements ISpectrumAnalysisService {
|
|||
File detTmp = this.analyze(detPath, detFileName);
|
||||
//调用动态库解析文件
|
||||
BgAnalyseResult bgAnalyseResult = EnergySpectrumHandler.bgAnalyse(sampleTmp.getAbsolutePath(), gasTmp.getAbsolutePath(), detTmp.getAbsolutePath());
|
||||
|
||||
BgDataAnlyseResultIn resultIn = new BgDataAnlyseResultIn();
|
||||
resultIn.setXe131m_con(bgAnalyseResult.Xe131m_con);
|
||||
resultIn.setXe131m_uncer(bgAnalyseResult.Xe131m_uncer);
|
||||
resultIn.setMDC_Xe131m(bgAnalyseResult.MDC_Xe131m);
|
||||
resultIn.setLC_Xe131m(bgAnalyseResult.LC_Xe131m);
|
||||
resultIn.setXe131mFlag(bgAnalyseResult.XE_131m_NID_FLAG);
|
||||
resultIn.setXe133_con(bgAnalyseResult.Xe133_con);
|
||||
resultIn.setXe133_uncer(bgAnalyseResult.Xe133_uncer);
|
||||
resultIn.setMDC_Xe133(bgAnalyseResult.MDC_Xe133);
|
||||
resultIn.setLC_Xe133(bgAnalyseResult.LC_Xe133);
|
||||
resultIn.setXe133Flag(bgAnalyseResult.XE_133_NID_FLAG);
|
||||
resultIn.setXe133m_con(bgAnalyseResult.Xe133m_con);
|
||||
resultIn.setXe133m_uncer(bgAnalyseResult.Xe133m_uncer);
|
||||
resultIn.setMDC_Xe133m(bgAnalyseResult.MDC_Xe133m);
|
||||
resultIn.setLC_Xe133m(bgAnalyseResult.LC_Xe133m);
|
||||
resultIn.setXe133mFlag(bgAnalyseResult.XE_133m_NID_FLAG);
|
||||
resultIn.setXe135_con(bgAnalyseResult.Xe135_con);
|
||||
resultIn.setXe135_uncer(bgAnalyseResult.Xe135_uncer);
|
||||
resultIn.setMDC_Xe135(bgAnalyseResult.MDC_Xe135);
|
||||
resultIn.setLC_Xe135(bgAnalyseResult.LC_Xe135);
|
||||
resultIn.setXe135Flag(bgAnalyseResult.XE_135_NID_FLAG);
|
||||
List<GardsXeResultsSpectrum> xeDataList = getXeResults(resultIn, null, null);
|
||||
if (CollectionUtils.isNotEmpty(xeDataList)){
|
||||
for (GardsXeResultsSpectrum xeData:xeDataList) {
|
||||
Double conc = xeData.getConc();
|
||||
Double mdc = xeData.getMdc();
|
||||
if (conc < 0){
|
||||
xeData.setColor("red");
|
||||
} else if (0<conc && conc < mdc) {
|
||||
xeData.setColor("#ffcc30");
|
||||
} else if (conc > mdc) {
|
||||
xeData.setColor("green");
|
||||
}
|
||||
}
|
||||
map.put("XeData", xeDataList);
|
||||
}
|
||||
map.put("savedAnalysisResult", true);
|
||||
result.setSuccess(true);
|
||||
result.setResult(bgAnalyseResult);
|
||||
result.setResult(map);
|
||||
return result;
|
||||
}
|
||||
|
||||
private File analyze(String path, String fileName){
|
||||
private File analyze(String path, String fileName) {
|
||||
//连接ftp
|
||||
FTPClient ftpClient = ftpUtil.LoginFTP();
|
||||
InputStream inputStream = null;
|
||||
|
@ -2568,7 +2638,11 @@ public class SpectrumAnalysisServiceImpl implements ISpectrumAnalysisService {
|
|||
}
|
||||
|
||||
@Override
|
||||
public Result analyseAllSpectrum() {
|
||||
public Result analyseAllSpectrum(List<String> dbNames, List<Integer> sampleIds, List<String> sampleFileNames, List<String> gasFileNames, List<String> detFileNames, HttpServletRequest request) {
|
||||
for (int i=0; i<sampleFileNames.size(); i++) {
|
||||
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
|
@ -2760,7 +2834,7 @@ public class SpectrumAnalysisServiceImpl implements ISpectrumAnalysisService {
|
|||
}
|
||||
}
|
||||
|
||||
public List<GardsCalibrationPairsSpectrum> getCalibrationPairs(BgDataAnlyseResultIn anlyseResultIn, Integer sampleId, Integer gasId, Integer detId, Integer idAnalysis){
|
||||
public List<GardsCalibrationPairsSpectrum> getCalibrationPairs(BgDataAnlyseResultIn anlyseResultIn, Integer sampleId, Integer gasId, Integer detId, Integer idAnalysis) {
|
||||
List<GardsCalibrationPairsSpectrum> calibrationPairsList = new LinkedList<>();
|
||||
for (int i=0; i< anlyseResultIn.getB_channel_sample().size(); i++){
|
||||
GardsCalibrationPairsSpectrum calibrationPairs = new GardsCalibrationPairsSpectrum();
|
||||
|
@ -2841,7 +2915,7 @@ public class SpectrumAnalysisServiceImpl implements ISpectrumAnalysisService {
|
|||
return calibrationPairsList;
|
||||
}
|
||||
|
||||
public void getROILimit(BgDataAnlyseResultIn anlyseResultIn){
|
||||
public void getROILimit(BgDataAnlyseResultIn anlyseResultIn) {
|
||||
//获取ftp文件路径下临时文件
|
||||
File sampleTmp = this.analyze(anlyseResultIn.getSampleFilePath(), anlyseResultIn.getSampleFileName());
|
||||
System.loadLibrary("ReadPHDFile");
|
||||
|
@ -2903,7 +2977,7 @@ public class SpectrumAnalysisServiceImpl implements ISpectrumAnalysisService {
|
|||
|
||||
}
|
||||
|
||||
public List<GardsCalibrationSpectrum> getCalibration(BgDataAnlyseResultIn anlyseResultIn, Integer sampleId, Integer gasId, Integer detId, Integer idAnalysis){
|
||||
public List<GardsCalibrationSpectrum> getCalibration(BgDataAnlyseResultIn anlyseResultIn, Integer sampleId, Integer gasId, Integer detId, Integer idAnalysis) {
|
||||
List<GardsCalibrationSpectrum> calibrationSpectrumList = new LinkedList<>();
|
||||
if (Objects.nonNull(sampleId)){
|
||||
GardsCalibrationSpectrum calibrationB = new GardsCalibrationSpectrum();
|
||||
|
@ -2994,7 +3068,7 @@ public class SpectrumAnalysisServiceImpl implements ISpectrumAnalysisService {
|
|||
return calibrationSpectrumList;
|
||||
}
|
||||
|
||||
public void getROIChannel(Integer sampleId, Integer idAnalysis, BgBoundary bgBoundary, List<GardsROIChannelsSpectrum> roiChannelsList){
|
||||
public void getROIChannel(Integer sampleId, Integer idAnalysis, BgBoundary bgBoundary, List<GardsROIChannelsSpectrum> roiChannelsList) {
|
||||
for (int i=0; i<bgBoundary.ROI_B_Boundary_start.size(); i++){
|
||||
GardsROIChannelsSpectrum roiChannels = new GardsROIChannelsSpectrum();
|
||||
roiChannels.setSampleId(sampleId);
|
||||
|
@ -3008,7 +3082,7 @@ public class SpectrumAnalysisServiceImpl implements ISpectrumAnalysisService {
|
|||
}
|
||||
}
|
||||
|
||||
public List<GardsXeResultsSpectrum> getXeResults(BgDataAnlyseResultIn anlyseResultIn, Integer sampleId, Integer idAnalysis){
|
||||
public List<GardsXeResultsSpectrum> getXeResults(BgDataAnlyseResultIn anlyseResultIn, Integer sampleId, Integer idAnalysis) {
|
||||
List<GardsXeResultsSpectrum> xeResultsList = new LinkedList<>();
|
||||
//Xe131m
|
||||
GardsXeResultsSpectrum xe131m = new GardsXeResultsSpectrum();
|
||||
|
@ -3057,7 +3131,7 @@ public class SpectrumAnalysisServiceImpl implements ISpectrumAnalysisService {
|
|||
return xeResultsList;
|
||||
}
|
||||
|
||||
public boolean OriginalDataStore(String filePath, String fileName, String filePathName, String userName){
|
||||
public boolean OriginalDataStore(String filePath, String fileName, String filePathName, String userName) {
|
||||
//连接ftp
|
||||
FTPClient ftpClient = ftpUtil.LoginFTP();
|
||||
InputStream inputStream = null;
|
||||
|
@ -3162,7 +3236,7 @@ public class SpectrumAnalysisServiceImpl implements ISpectrumAnalysisService {
|
|||
return true;
|
||||
}
|
||||
|
||||
public List<String> getFileBlockList(File file){
|
||||
public List<String> getFileBlockList(File file) {
|
||||
List<String> readLines = new LinkedList<>();
|
||||
try {
|
||||
List<String> allLines = FileUtils.readLines(file, "UTF-8");
|
||||
|
|
|
@ -2,6 +2,7 @@ package org.jeecg.modules.service.impl;
|
|||
|
||||
import com.baomidou.dynamic.datasource.annotation.DS;
|
||||
import com.baomidou.mybatisplus.core.toolkit.CollectionUtils;
|
||||
import com.baomidou.mybatisplus.core.toolkit.StringUtils;
|
||||
import com.baomidou.mybatisplus.extension.service.impl.ServiceImpl;
|
||||
import org.jeecg.modules.base.entity.postgre.SysDefaultNuclide;
|
||||
import org.jeecg.modules.mapper.SysDefaultNuclideSpectrumMapper;
|
||||
|
@ -10,6 +11,7 @@ import org.springframework.stereotype.Service;
|
|||
import org.springframework.transaction.annotation.Propagation;
|
||||
import org.springframework.transaction.annotation.Transactional;
|
||||
|
||||
import java.util.LinkedList;
|
||||
import java.util.List;
|
||||
|
||||
@Service("defaultNuclideSpectrumService")
|
||||
|
@ -27,4 +29,26 @@ public class SysDefaultNuclideSpectrumServiceImpl extends ServiceImpl<SysDefault
|
|||
return nuclides;
|
||||
}
|
||||
|
||||
@Override
|
||||
@Transactional(propagation = Propagation.REQUIRES_NEW)
|
||||
public boolean saveNuclidesByUserName(List<String> userLibraryName, String userName, String systemType) {
|
||||
//删除当前用户名,当前使用类型下所有的数据
|
||||
this.baseMapper.deleteNuclidesByUserName(userName, systemType);
|
||||
//删除后重新插入本次数据
|
||||
List<SysDefaultNuclide> defaultNuclideList = new LinkedList<>();
|
||||
for (String nuclideName: userLibraryName) {
|
||||
SysDefaultNuclide nuclide = new SysDefaultNuclide();
|
||||
if (StringUtils.isBlank(nuclideName)) {
|
||||
continue;
|
||||
}
|
||||
nuclide.setNuclideName(nuclideName);
|
||||
nuclide.setUseType(3);
|
||||
nuclide.setNuclideType(systemType);
|
||||
nuclide.setCreateBy(userName);
|
||||
defaultNuclideList.add(nuclide);
|
||||
}
|
||||
boolean saveBatch = this.saveBatch(defaultNuclideList);
|
||||
return saveBatch;
|
||||
}
|
||||
|
||||
}
|
||||
|
|
Loading…
Reference in New Issue
Block a user