Merge remote-tracking branch 'origin/station' into station
This commit is contained in:
commit
f39dc57682
|
@ -50,6 +50,10 @@ public class NumberFormatUtil {
|
||||||
|
|
||||||
//总数字个数是6位的数
|
//总数字个数是6位的数
|
||||||
public static String numberSixLen(String number) {
|
public static String numberSixLen(String number) {
|
||||||
|
String value = "";
|
||||||
|
if (number.equalsIgnoreCase("nan")) {
|
||||||
|
value = number;
|
||||||
|
} else {
|
||||||
Double preNum = null;
|
Double preNum = null;
|
||||||
String suffixNum = "";
|
String suffixNum = "";
|
||||||
//判断传进来的数据是否是科学计数法的数据
|
//判断传进来的数据是否是科学计数法的数据
|
||||||
|
@ -75,10 +79,11 @@ public class NumberFormatUtil {
|
||||||
double absNum = Math.abs(preNum);
|
double absNum = Math.abs(preNum);
|
||||||
//获取长度
|
//获取长度
|
||||||
int length = String.valueOf(absNum).substring(0, String.valueOf(absNum).indexOf(StringPool.DOT)).length();
|
int length = String.valueOf(absNum).substring(0, String.valueOf(absNum).indexOf(StringPool.DOT)).length();
|
||||||
String value = String.format("%." + (6 - length) + "f", preNum);
|
value = String.format("%." + (6 - length) + "f", preNum);
|
||||||
if (StringUtils.isNotBlank(suffixNum)) {
|
if (StringUtils.isNotBlank(suffixNum)) {
|
||||||
value = value + suffixNum;
|
value = value + suffixNum;
|
||||||
}
|
}
|
||||||
|
}
|
||||||
return value;
|
return value;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -4,10 +4,7 @@ import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
|
||||||
import lombok.Data;
|
import lombok.Data;
|
||||||
|
|
||||||
import java.io.Serializable;
|
import java.io.Serializable;
|
||||||
import java.util.HashMap;
|
import java.util.*;
|
||||||
import java.util.LinkedList;
|
|
||||||
import java.util.List;
|
|
||||||
import java.util.Map;
|
|
||||||
|
|
||||||
@Data
|
@Data
|
||||||
@JsonIgnoreProperties(ignoreUnknown = true)
|
@JsonIgnoreProperties(ignoreUnknown = true)
|
||||||
|
@ -17,6 +14,8 @@ public class PHDFile implements Serializable {
|
||||||
|
|
||||||
private String xmlFilePath;
|
private String xmlFilePath;
|
||||||
|
|
||||||
|
private String tmpFilePath;
|
||||||
|
|
||||||
private boolean isValid; // 是否有效谱
|
private boolean isValid; // 是否有效谱
|
||||||
|
|
||||||
private boolean bAnalyed; // 记录是否被分析
|
private boolean bAnalyed; // 记录是否被分析
|
||||||
|
@ -215,8 +214,8 @@ public class PHDFile implements Serializable {
|
||||||
calibration = new CalibrationBlock();
|
calibration = new CalibrationBlock();
|
||||||
sampleBlock = new SampleBlock();
|
sampleBlock = new SampleBlock();
|
||||||
certificate = new CertificateBlock();
|
certificate = new CertificateBlock();
|
||||||
QcItems = new HashMap<>();
|
QcItems = new TreeMap<>();
|
||||||
mapNucActMda = new HashMap<>();
|
mapNucActMda = new TreeMap<>();
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -65,6 +65,7 @@ public class GammaFileUtil extends AbstractLogOrReport {
|
||||||
phd.setFilename(fileName);
|
phd.setFilename(fileName);
|
||||||
String fromPath = pathName + StringPool.SLASH + fileName;
|
String fromPath = pathName + StringPool.SLASH + fileName;
|
||||||
File file = ftpUtil.downloadFile(fromPath, "betaGamma");
|
File file = ftpUtil.downloadFile(fromPath, "betaGamma");
|
||||||
|
phd.setTmpFilePath(file.getAbsolutePath());
|
||||||
if (Objects.isNull(file)) {
|
if (Objects.isNull(file)) {
|
||||||
result.error500("ftp file can't find");
|
result.error500("ftp file can't find");
|
||||||
return false;
|
return false;
|
||||||
|
@ -235,9 +236,6 @@ public class GammaFileUtil extends AbstractLogOrReport {
|
||||||
|
|
||||||
}catch (ParseException e) {
|
}catch (ParseException e) {
|
||||||
throw new RuntimeException(e);
|
throw new RuntimeException(e);
|
||||||
} finally {
|
|
||||||
if (Objects.nonNull(file))
|
|
||||||
file.delete();
|
|
||||||
}
|
}
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
|
@ -274,9 +272,7 @@ public class GammaFileUtil extends AbstractLogOrReport {
|
||||||
|
|
||||||
String baselineFileName = prefixName+"_"+subFileName + ".baseline";
|
String baselineFileName = prefixName+"_"+subFileName + ".baseline";
|
||||||
String fromPathBase = pathName + StringPool.SLASH + baselineFileName;
|
String fromPathBase = pathName + StringPool.SLASH + baselineFileName;
|
||||||
File file = ftpUtil.downloadFile(fromPathBase, "betaGamma");
|
inputStreamBase = ftpUtil.downloadFileStream(fromPathBase);
|
||||||
inputStreamBase = new FileInputStream(file);
|
|
||||||
// inputStreamBase = ftpUtil.downloadFileStream(fromPathBase);
|
|
||||||
// 调用处理BaseCtrl的方法
|
// 调用处理BaseCtrl的方法
|
||||||
ReadBaseCtrlInfo(phd, inputStreamBase);
|
ReadBaseCtrlInfo(phd, inputStreamBase);
|
||||||
// 将phdFile的BaseCtrls的BaseLine部分数据 赋值给 phdFile的vBase
|
// 将phdFile的BaseCtrls的BaseLine部分数据 赋值给 phdFile的vBase
|
||||||
|
@ -504,15 +500,15 @@ public class GammaFileUtil extends AbstractLogOrReport {
|
||||||
//计算得到衰减耗时
|
//计算得到衰减耗时
|
||||||
double Decay_hour = (acq.getTime()/1000 - end.getTime()/1000) / 3600.0;
|
double Decay_hour = (acq.getTime()/1000 - end.getTime()/1000) / 3600.0;
|
||||||
//声明一个double数据
|
//声明一个double数据
|
||||||
Double ener_Be7 = 0.0;
|
List<Double> Be7Value = new LinkedList<>();
|
||||||
//声明一个map用于存储计算数据
|
//声明一个map用于存储计算数据
|
||||||
Map<String, Double> vMdcInfoMap = new HashMap<>();
|
Map<String, Double> vMdcInfoMap = new HashMap<>();
|
||||||
//声明一个数组存储计算数据
|
//声明一个数组存储计算数据
|
||||||
List<Double> vMdcInfo = new LinkedList<>();
|
List<Double> vMdcInfo = new LinkedList<>();
|
||||||
//声明一个数组存储QcItems数据
|
//声明一个数组存储QcItems数据
|
||||||
Map<String, QcCheckItem> qcItems = new LinkedHashMap<>();
|
Map<String, QcCheckItem> qcItems = new TreeMap<>();
|
||||||
//调用方法 读取文件信息 判断QC数据
|
//调用方法 读取文件信息 判断QC数据
|
||||||
if(!ReadQCLimit(qcItems, vMdcInfoMap, ener_Be7, phd.getHeader().getSystem_type().toUpperCase())) {
|
if(!ReadQCLimit(qcItems, vMdcInfoMap, Be7Value, phd.getHeader().getSystem_type().toUpperCase())) {
|
||||||
String WARNING = "Read QC Flags from SystemManager.xml Failed!";
|
String WARNING = "Read QC Flags from SystemManager.xml Failed!";
|
||||||
}
|
}
|
||||||
//判断map是否为空
|
//判断map是否为空
|
||||||
|
@ -555,12 +551,8 @@ public class GammaFileUtil extends AbstractLogOrReport {
|
||||||
if(phd.isValid() && phd.getVBase().size() == phd.getSpec().getNum_g_channel()) {
|
if(phd.isValid() && phd.getVBase().size() == phd.getSpec().getNum_g_channel()) {
|
||||||
//判断system_type是否匹配P
|
//判断system_type是否匹配P
|
||||||
if(phd.getHeader().getSystem_type().equalsIgnoreCase("P")) {
|
if(phd.getHeader().getSystem_type().equalsIgnoreCase("P")) {
|
||||||
//声明一个energy集合
|
|
||||||
List<Double> energy = new LinkedList<>();
|
|
||||||
//集合增加数据
|
|
||||||
energy.add(ener_Be7);
|
|
||||||
//调用算法计算
|
//调用算法计算
|
||||||
CalValuesOut calValuesOut = CalValuesHandler.calFcnEval(energy, phd.getUsedResoPara().getP());
|
CalValuesOut calValuesOut = CalValuesHandler.calFcnEval(Be7Value, phd.getUsedResoPara().getP());
|
||||||
//获取计算结果的counts赋值给 fwhm集合
|
//获取计算结果的counts赋值给 fwhm集合
|
||||||
List<Double> fwhm = calValuesOut.counts;
|
List<Double> fwhm = calValuesOut.counts;
|
||||||
//获取QcItems中Be7-FWHM数据
|
//获取QcItems中Be7-FWHM数据
|
||||||
|
@ -646,22 +638,24 @@ public class GammaFileUtil extends AbstractLogOrReport {
|
||||||
//获取能谱获取时间
|
//获取能谱获取时间
|
||||||
Date acqStart = DateUtils.parseDate(phd.getAcq().getAcquisition_start_date() + StringPool.SPACE + phd.getAcq().getAcquisition_start_time().substring(0,phd.getAcq().getAcquisition_start_time().indexOf(StringPool.DOT)), "yyyy/MM/dd HH:mm:ss");
|
Date acqStart = DateUtils.parseDate(phd.getAcq().getAcquisition_start_date() + StringPool.SPACE + phd.getAcq().getAcquisition_start_time().substring(0,phd.getAcq().getAcquisition_start_time().indexOf(StringPool.DOT)), "yyyy/MM/dd HH:mm:ss");
|
||||||
//计算采样时间
|
//计算采样时间
|
||||||
double Ts = (collectStart.getTime()/1000 - collectStop.getTime()/1000);
|
double Ts = (collectStop.getTime()/1000 - collectStart.getTime()/1000);
|
||||||
//计算衰变时间
|
//计算衰变时间
|
||||||
double Td = (collectStop.getTime()/1000 - acqStart.getTime()/1000);
|
double Td = (acqStart.getTime()/1000 - collectStop.getTime()/1000);
|
||||||
//获取能谱获取实时间
|
//获取能谱获取实时间
|
||||||
double Ta = phd.getAcq().getAcquisition_real_time();
|
double Ta = phd.getAcq().getAcquisition_real_time();
|
||||||
//获取能谱获取活时间
|
//获取能谱获取活时间
|
||||||
double Tl = phd.getAcq().getAcquisition_live_time();
|
double Tl = phd.getAcq().getAcquisition_live_time();
|
||||||
//获取样品采样体积
|
//获取样品采样体积
|
||||||
double Svol = phd.getCollect().getAir_volume();
|
double Svol = phd.getCollect().getAir_volume();
|
||||||
double DCF1, DCF2, DCF3;
|
double DCF1 = 0;
|
||||||
|
double DCF2 = 0;
|
||||||
|
double DCF3 = 0;
|
||||||
//计算得到lamda计算值
|
//计算得到lamda计算值
|
||||||
double lambda = Math.log(2.0) / (vMdcInfo.get(2) * 86400);
|
double lambda = Math.log(2.0) / (vMdcInfo.get(2) * 86400);
|
||||||
if ( Ts == 0 ) {
|
if ( Ts == 0 ) {
|
||||||
DCF1 = 1;
|
DCF1 = 1;
|
||||||
} else {
|
} else {
|
||||||
DCF1 = lambda * Ts / (1-Math.exp(-lambda*Ts));
|
DCF1 = lambda * Ts / (1-Math.exp(-lambda * Ts));
|
||||||
}
|
}
|
||||||
if ( Td == 0 ) {
|
if ( Td == 0 ) {
|
||||||
DCF2 = 1;
|
DCF2 = 1;
|
||||||
|
@ -674,7 +668,7 @@ public class GammaFileUtil extends AbstractLogOrReport {
|
||||||
DCF3 = lambda * Ta / (1-Math.exp(-lambda*Ta));
|
DCF3 = lambda * Ta / (1-Math.exp(-lambda*Ta));
|
||||||
}
|
}
|
||||||
//计算得到DCF_conc
|
//计算得到DCF_conc
|
||||||
double DCF_conc = Math.exp(lambda * (phd.getUsedSetting().getRefTime_conc().getTime()/1000 - collectStart.getTime()/1000));
|
double DCF_conc = Math.exp(lambda * (collectStart.getTime()/1000 - phd.getUsedSetting().getRefTime_conc().getTime()/1000));
|
||||||
//声明一个集合
|
//声明一个集合
|
||||||
List<Double> energy = new LinkedList<>();
|
List<Double> energy = new LinkedList<>();
|
||||||
energy.add(vMdcInfo.get(0));
|
energy.add(vMdcInfo.get(0));
|
||||||
|
@ -705,7 +699,7 @@ public class GammaFileUtil extends AbstractLogOrReport {
|
||||||
for(int i=1; i<phd.getVEnergy().size(); i++) {
|
for(int i=1; i<phd.getVEnergy().size(); i++) {
|
||||||
if(phd.getVEnergy().get(i) >= vMdcInfo.get(0)) {
|
if(phd.getVEnergy().get(i) >= vMdcInfo.get(0)) {
|
||||||
index = i;
|
index = i;
|
||||||
if(phd.getVEnergy().get(i) - vMdcInfo.get(0) > vMdcInfo.get(0) - phd.getVEnergy().get(i-1)){
|
if( (phd.getVEnergy().get(i) - vMdcInfo.get(0)) > (vMdcInfo.get(0) - phd.getVEnergy().get(i-1)) ){
|
||||||
index = i-1;
|
index = i-1;
|
||||||
}
|
}
|
||||||
break;
|
break;
|
||||||
|
@ -720,7 +714,7 @@ public class GammaFileUtil extends AbstractLogOrReport {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
public boolean ReadQCLimit(Map<String, QcCheckItem> qcItems, Map<String, Double> vMdcInfoMap, Double ener_Be7, String systemType) {
|
public boolean ReadQCLimit(Map<String, QcCheckItem> qcItems, Map<String, Double> vMdcInfoMap, List<Double> Be7Value, String systemType) {
|
||||||
try {
|
try {
|
||||||
String filePath = parameterProperties.getFilePath()+ File.separator +"SystemManager.xml";
|
String filePath = parameterProperties.getFilePath()+ File.separator +"SystemManager.xml";
|
||||||
//创建一个文档解析器工厂
|
//创建一个文档解析器工厂
|
||||||
|
@ -778,7 +772,7 @@ public class GammaFileUtil extends AbstractLogOrReport {
|
||||||
Node item = attributes.item(j);
|
Node item = attributes.item(j);
|
||||||
//判断属性名称是否是 number
|
//判断属性名称是否是 number
|
||||||
if(item.getNodeName().equals(CalType.ENERGY_CAL.getType())) {
|
if(item.getNodeName().equals(CalType.ENERGY_CAL.getType())) {
|
||||||
ener_Be7 = Double.valueOf(item.getNodeValue());
|
Be7Value.add(Double.valueOf(item.getNodeValue()));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -1264,8 +1258,6 @@ public class GammaFileUtil extends AbstractLogOrReport {
|
||||||
}
|
}
|
||||||
|
|
||||||
public boolean AnalyseSpectrum(PHDFile phd, Map<String, NuclideLines> mapLines){
|
public boolean AnalyseSpectrum(PHDFile phd, Map<String, NuclideLines> mapLines){
|
||||||
//解析获取临时文件信息
|
|
||||||
File tmpFile = analyzeFile(phd.getFilepath(), phd.getFilename());
|
|
||||||
ObjectMapper mapper = new ObjectMapper();
|
ObjectMapper mapper = new ObjectMapper();
|
||||||
try {
|
try {
|
||||||
String phdStr = mapper.writeValueAsString(phd);
|
String phdStr = mapper.writeValueAsString(phd);
|
||||||
|
@ -1273,7 +1265,7 @@ public class GammaFileUtil extends AbstractLogOrReport {
|
||||||
if (ObjectUtil.isNull(analysisProcess)){
|
if (ObjectUtil.isNull(analysisProcess)){
|
||||||
analysisProcess = ApplicationContextUtil.getContext().getBean(AnalysisProcess.class);
|
analysisProcess = ApplicationContextUtil.getContext().getBean(AnalysisProcess.class);
|
||||||
}
|
}
|
||||||
String strValue = CalValuesHandler.analyseSpectrum(phdStr, nuclideLinesMap, tmpFile.getAbsolutePath(), analysisProcess);
|
String strValue = CalValuesHandler.analyseSpectrum(phdStr, nuclideLinesMap, phd.getTmpFilePath(), analysisProcess);
|
||||||
Map<String, Object> parseMap = JSON.parseObject(strValue, Map.class);
|
Map<String, Object> parseMap = JSON.parseObject(strValue, Map.class);
|
||||||
for (Map.Entry<String, Object> entry:parseMap.entrySet()) {
|
for (Map.Entry<String, Object> entry:parseMap.entrySet()) {
|
||||||
if (entry.getKey().equalsIgnoreCase("bAnalyed")) {
|
if (entry.getKey().equalsIgnoreCase("bAnalyed")) {
|
||||||
|
@ -1441,8 +1433,8 @@ public class GammaFileUtil extends AbstractLogOrReport {
|
||||||
phd.setUsedTotEPara(value);
|
phd.setUsedTotEPara(value);
|
||||||
}
|
}
|
||||||
if (entry.getKey().equalsIgnoreCase("mapNucActMda")) {
|
if (entry.getKey().equalsIgnoreCase("mapNucActMda")) {
|
||||||
HashMap<String, Object> jsonMap = JSON.parseObject(JSON.toJSONString(entry.getValue()), HashMap.class);
|
TreeMap<String, Object> jsonMap = JSON.parseObject(JSON.toJSONString(entry.getValue()), TreeMap.class);
|
||||||
Map<String, NuclideActMda> value = new HashMap<>();
|
Map<String, NuclideActMda> value = new TreeMap<>();
|
||||||
for (Map.Entry<String, Object> objectEntry:jsonMap.entrySet()) {
|
for (Map.Entry<String, Object> objectEntry:jsonMap.entrySet()) {
|
||||||
String key = objectEntry.getKey();
|
String key = objectEntry.getKey();
|
||||||
NuclideActMda entryValue = JSON.parseObject(JSON.toJSONString(objectEntry.getValue()), NuclideActMda.class);
|
NuclideActMda entryValue = JSON.parseObject(JSON.toJSONString(objectEntry.getValue()), NuclideActMda.class);
|
||||||
|
@ -1467,10 +1459,6 @@ public class GammaFileUtil extends AbstractLogOrReport {
|
||||||
} catch (RuntimeException e) {
|
} catch (RuntimeException e) {
|
||||||
e.printStackTrace();
|
e.printStackTrace();
|
||||||
log.error("AnalyseSpectrum(): {}", e.getMessage());
|
log.error("AnalyseSpectrum(): {}", e.getMessage());
|
||||||
} finally {
|
|
||||||
if (Objects.nonNull(tmpFile)) {
|
|
||||||
tmpFile.delete();
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
|
@ -1618,8 +1606,8 @@ public class GammaFileUtil extends AbstractLogOrReport {
|
||||||
if(rate > 0.8 || bActBigger) {
|
if(rate > 0.8 || bActBigger) {
|
||||||
ActMda.setHalflife(halflife);
|
ActMda.setHalflife(halflife);
|
||||||
ActMda.setKey_flag(-1);
|
ActMda.setKey_flag(-1);
|
||||||
if(!vNuclides.contains(iter.getKey())) // 需要计算活度浓度的核素
|
// 需要计算活度浓度的核素
|
||||||
{
|
if(!vNuclides.contains(iter.getKey())) {
|
||||||
ActMda.setBCalculateMDA(true);
|
ActMda.setBCalculateMDA(true);
|
||||||
} else {
|
} else {
|
||||||
ActMda.setActivity(0);
|
ActMda.setActivity(0);
|
||||||
|
@ -1677,12 +1665,11 @@ public class GammaFileUtil extends AbstractLogOrReport {
|
||||||
if(strList.size() == 3) {
|
if(strList.size() == 3) {
|
||||||
mapHalflife.put(strList.get(0), Double.valueOf(strList.get(2)) * 86400);
|
mapHalflife.put(strList.get(0), Double.valueOf(strList.get(2)) * 86400);
|
||||||
}
|
}
|
||||||
}else {
|
} else {
|
||||||
i=j;
|
i=j;
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
break;
|
|
||||||
}
|
}
|
||||||
if(line.contains("#Identify")) {
|
if(line.contains("#Identify")) {
|
||||||
for (int j=i+1; j<readLines.size(); j++){
|
for (int j=i+1; j<readLines.size(); j++){
|
||||||
|
@ -1742,8 +1729,8 @@ public class GammaFileUtil extends AbstractLogOrReport {
|
||||||
Date collectStart = DateUtils.parseDate(phd.getCollect().getCollection_start_date() + StringPool.SPACE + phd.getCollect().getCollection_start_time().substring(0, phd.getCollect().getCollection_start_time().indexOf(StringPool.DOT)), "yyyy/MM/dd HH:mm:ss");
|
Date collectStart = DateUtils.parseDate(phd.getCollect().getCollection_start_date() + StringPool.SPACE + phd.getCollect().getCollection_start_time().substring(0, phd.getCollect().getCollection_start_time().indexOf(StringPool.DOT)), "yyyy/MM/dd HH:mm:ss");
|
||||||
Date collectStop = DateUtils.parseDate(phd.getCollect().getCollection_stop_date() + StringPool.SPACE + phd.getCollect().getCollection_stop_time().substring(0, phd.getCollect().getCollection_stop_time().indexOf(StringPool.DOT)), "yyyy/MM/dd HH:mm:ss");
|
Date collectStop = DateUtils.parseDate(phd.getCollect().getCollection_stop_date() + StringPool.SPACE + phd.getCollect().getCollection_stop_time().substring(0, phd.getCollect().getCollection_stop_time().indexOf(StringPool.DOT)), "yyyy/MM/dd HH:mm:ss");
|
||||||
Date acqStart = DateUtils.parseDate(phd.getAcq().getAcquisition_start_date() + StringPool.SPACE + phd.getAcq().getAcquisition_start_time().substring(0, phd.getAcq().getAcquisition_start_time().indexOf(StringPool.DOT)), "yyyy/MM/dd HH:mm:ss");
|
Date acqStart = DateUtils.parseDate(phd.getAcq().getAcquisition_start_date() + StringPool.SPACE + phd.getAcq().getAcquisition_start_time().substring(0, phd.getAcq().getAcquisition_start_time().indexOf(StringPool.DOT)), "yyyy/MM/dd HH:mm:ss");
|
||||||
double Ts = collectStart.getTime()/1000 - collectStop.getTime()/1000; // 采样时间
|
double Ts = collectStop.getTime()/1000 - collectStart.getTime()/1000; // 采样时间
|
||||||
double Td = collectStop.getTime()/1000 - acqStart.getTime()/1000; // 衰变时间
|
double Td = acqStart.getTime()/1000 - collectStop.getTime()/1000; // 衰变时间
|
||||||
double Ta = phd.getAcq().getAcquisition_real_time(); // 能谱获取实时间
|
double Ta = phd.getAcq().getAcquisition_real_time(); // 能谱获取实时间
|
||||||
double Tl = phd.getAcq().getAcquisition_live_time(); // 能谱获取活时间
|
double Tl = phd.getAcq().getAcquisition_live_time(); // 能谱获取活时间
|
||||||
double Svol = phd.getCollect().getAir_volume(); // 样品采样体积
|
double Svol = phd.getCollect().getAir_volume(); // 样品采样体积
|
||||||
|
@ -1756,8 +1743,8 @@ public class GammaFileUtil extends AbstractLogOrReport {
|
||||||
if ( Ta == 0 ) DCF3 = 1;
|
if ( Ta == 0 ) DCF3 = 1;
|
||||||
else DCF3 = lambda * Ta / (1-Math.exp(-lambda*Ta));
|
else DCF3 = lambda * Ta / (1-Math.exp(-lambda*Ta));
|
||||||
|
|
||||||
double DCF_act = Math.exp(lambda * (phd.getUsedSetting().getRefTime_act().getTime()/1000 - acqStart.getTime()/1000));
|
double DCF_act = Math.exp(lambda * (acqStart.getTime()/1000 - phd.getUsedSetting().getRefTime_act().getTime()/1000));
|
||||||
double DCF_conc = Math.exp(lambda * (phd.getUsedSetting().getRefTime_conc().getTime()/1000 - collectStart.getTime()/1000));
|
double DCF_conc = Math.exp(lambda * (collectStart.getTime()/1000 - phd.getUsedSetting().getRefTime_conc().getTime()/1000));
|
||||||
|
|
||||||
PeakInfo peak = phd.getVPeak().get(mainPeakIdx);
|
PeakInfo peak = phd.getVPeak().get(mainPeakIdx);
|
||||||
double netKeyPeakArea = peak.area;
|
double netKeyPeakArea = peak.area;
|
||||||
|
@ -2337,7 +2324,7 @@ public class GammaFileUtil extends AbstractLogOrReport {
|
||||||
strBuffer.append(System.lineSeparator());
|
strBuffer.append(System.lineSeparator());
|
||||||
//日志文本内容第五部分数据
|
//日志文本内容第五部分数据
|
||||||
String[] dataTitles = new String[]{"Collection Start", "Collection Stop", "Sampling Time[h]", "Sample Quantity[m3]", "Decay Time[h]", "Acquisition Start", "Acquisition Stop", "Acquisition Time[s]"};
|
String[] dataTitles = new String[]{"Collection Start", "Collection Stop", "Sampling Time[h]", "Sample Quantity[m3]", "Decay Time[h]", "Acquisition Start", "Acquisition Stop", "Acquisition Time[s]"};
|
||||||
String[] dataDetail = new String[]{middleData.sample_collection_start, middleData.sample_collection_stop, middleData.sample_time, middleData.sample_quantity, middleData.sample_decay_time, middleData.sample_acquisiton_start, middleData.sample_acquistion_stop, middleData.sample_acquistion_time};
|
String[] dataDetail = new String[]{middleData.sample_collection_start, middleData.sample_collection_stop, middleData.sample_time, middleData.sample_quantity, NumberFormatUtil.numberFormat(middleData.sample_decay_time), middleData.sample_acquisiton_start, middleData.sample_acquistion_stop, middleData.sample_acquistion_time};
|
||||||
strBuffer.append(attributeFormat(dataTitles, dataDetail));
|
strBuffer.append(attributeFormat(dataTitles, dataDetail));
|
||||||
//日志文本内容第八部分头部信息
|
//日志文本内容第八部分头部信息
|
||||||
String logHead8 = "%s Read calibration finished %s";
|
String logHead8 = "%s Read calibration finished %s";
|
||||||
|
@ -2346,13 +2333,6 @@ public class GammaFileUtil extends AbstractLogOrReport {
|
||||||
strBuffer.append(System.lineSeparator());
|
strBuffer.append(System.lineSeparator());
|
||||||
//换行
|
//换行
|
||||||
strBuffer.append(System.lineSeparator());
|
strBuffer.append(System.lineSeparator());
|
||||||
//日志文本内容第九部分头部信息
|
|
||||||
String logHead9 = "%s Read calibration finished %s";
|
|
||||||
strBuffer.append(titleFormat(logHead9, 38, StringPool.DOT, StringPool.DOT));
|
|
||||||
//换行
|
|
||||||
strBuffer.append(System.lineSeparator());
|
|
||||||
//换行
|
|
||||||
strBuffer.append(System.lineSeparator());
|
|
||||||
//文本日志内容第十部分头部信息
|
//文本日志内容第十部分头部信息
|
||||||
String logHead10 = "%s Starting Calibration %s";
|
String logHead10 = "%s Starting Calibration %s";
|
||||||
strBuffer.append(titleFormat(logHead10, 38, StringPool.DOT, StringPool.DOT));
|
strBuffer.append(titleFormat(logHead10, 38, StringPool.DOT, StringPool.DOT));
|
||||||
|
@ -2415,7 +2395,7 @@ public class GammaFileUtil extends AbstractLogOrReport {
|
||||||
String.valueOf(middleData.setting_specSetup.getEnergyTolerance()), String.valueOf(middleData.setting_specSetup.getBaseImprovePSS()),
|
String.valueOf(middleData.setting_specSetup.getEnergyTolerance()), String.valueOf(middleData.setting_specSetup.getBaseImprovePSS()),
|
||||||
String.valueOf(middleData.setting_specSetup.getPss_low()), String.valueOf(middleData.setting_specSetup.getK_back()), String.valueOf(middleData.setting_specSetup.getK_alpha()),
|
String.valueOf(middleData.setting_specSetup.getPss_low()), String.valueOf(middleData.setting_specSetup.getK_back()), String.valueOf(middleData.setting_specSetup.getK_alpha()),
|
||||||
String.valueOf(middleData.setting_specSetup.getK_beta()), String.valueOf(middleData.setting_specSetup.getRiskLevelK()),
|
String.valueOf(middleData.setting_specSetup.getK_beta()), String.valueOf(middleData.setting_specSetup.getRiskLevelK()),
|
||||||
String.valueOf(middleData.setting_specSetup.getRefTime_act()), String.valueOf(middleData.setting_specSetup.getRefTime_conc())};
|
DateUtils.formatDate(middleData.setting_specSetup.getRefTime_act(), "yyyy/MM/dd HH:mm:ss"), DateUtils.formatDate(middleData.setting_specSetup.getRefTime_conc(), "yyyy/MM/dd HH:mm:ss")};
|
||||||
strBuffer.append(attributeFormat(dataTitles10, dataContent10));
|
strBuffer.append(attributeFormat(dataTitles10, dataContent10));
|
||||||
//文本内容第十四部分头部信息
|
//文本内容第十四部分头部信息
|
||||||
String logHead14 = "CALIBRATION PARAMETERS%s";
|
String logHead14 = "CALIBRATION PARAMETERS%s";
|
||||||
|
@ -2429,7 +2409,7 @@ public class GammaFileUtil extends AbstractLogOrReport {
|
||||||
String[] dataContent11 = new String[]{String.valueOf(middleData.setting_specSetup.getCalibrationPSS_low()), String.valueOf(middleData.setting_specSetup.getCalibrationPSS_high()), (middleData.setting_specSetup.isBUpdateCal()?"1":"0"), (middleData.setting_specSetup.isKeepCalPeakSearchPeaks()?"1":"0")};
|
String[] dataContent11 = new String[]{String.valueOf(middleData.setting_specSetup.getCalibrationPSS_low()), String.valueOf(middleData.setting_specSetup.getCalibrationPSS_high()), (middleData.setting_specSetup.isBUpdateCal()?"1":"0"), (middleData.setting_specSetup.isKeepCalPeakSearchPeaks()?"1":"0")};
|
||||||
strBuffer.append(attributeFormat(dataTitle11, dataContent11));
|
strBuffer.append(attributeFormat(dataTitle11, dataContent11));
|
||||||
//获取peak的数据
|
//获取peak的数据
|
||||||
double peaksUsed = 0;
|
int peaksUsed = 0;
|
||||||
for(int m=0;m<middleData.peaks_Nuclide_name.size();m++) {
|
for(int m=0;m<middleData.peaks_Nuclide_name.size();m++) {
|
||||||
if(StringUtils.isNotBlank(middleData.peaks_Nuclide_name.get(m))) {
|
if(StringUtils.isNotBlank(middleData.peaks_Nuclide_name.get(m))) {
|
||||||
peaksUsed++;
|
peaksUsed++;
|
||||||
|
@ -2444,8 +2424,8 @@ public class GammaFileUtil extends AbstractLogOrReport {
|
||||||
strBuffer.append(System.lineSeparator());
|
strBuffer.append(System.lineSeparator());
|
||||||
//文本内容第十二部分数据
|
//文本内容第十二部分数据
|
||||||
String data1 = String.format("%s peaks reported", String.valueOf(middleData.peaks_idPeak.size()));
|
String data1 = String.format("%s peaks reported", String.valueOf(middleData.peaks_idPeak.size()));
|
||||||
String data2 = String.format("%s peaks with ID (%s%%)", String.valueOf(peaksUsed), String.valueOf(peaksUsed / middleData.peaks_idPeak.size() * 100 ) );
|
String data2 = String.format("%s peaks with ID (%s%%)", String.valueOf(peaksUsed), NumberFormatUtil.numberSixLen(String.valueOf(Double.valueOf(peaksUsed) / middleData.peaks_idPeak.size() * 100 )) );
|
||||||
String data3 = String.format("%s peaks without ID(%s%%)", String.valueOf(middleData.peaks_idPeak.size() - peaksUsed), String.valueOf(Double.valueOf(middleData.peaks_idPeak.size() - peaksUsed) / middleData.peaks_idPeak.size() * 100));
|
String data3 = String.format("%s peaks without ID(%s%%)", String.valueOf(middleData.peaks_idPeak.size() - peaksUsed), NumberFormatUtil.numberSixLen(String.valueOf(Double.valueOf(middleData.peaks_idPeak.size() - peaksUsed) / middleData.peaks_idPeak.size() * 100)));
|
||||||
String[] dataTitle12 = new String[]{data1, data2, data3};
|
String[] dataTitle12 = new String[]{data1, data2, data3};
|
||||||
String[] dataContent12 = new String[]{"", "", ""};
|
String[] dataContent12 = new String[]{"", "", ""};
|
||||||
strBuffer.append(attributeFormat(dataTitle12, dataContent12));
|
strBuffer.append(attributeFormat(dataTitle12, dataContent12));
|
||||||
|
@ -2453,14 +2433,14 @@ public class GammaFileUtil extends AbstractLogOrReport {
|
||||||
String data14= "PeakID : %-7s Energy : %-13s Centroid : %-13s FWHM : %-10s NetArea : %-16s NAErr%% : %-23s Signif : %-13s Sensit : %-13s Nuclide : %-30s";
|
String data14= "PeakID : %-7s Energy : %-13s Centroid : %-13s FWHM : %-10s NetArea : %-16s NAErr%% : %-23s Signif : %-13s Sensit : %-13s Nuclide : %-30s";
|
||||||
for (int i=0; i<middleData.peaks_idPeak.size();i++){
|
for (int i=0; i<middleData.peaks_idPeak.size();i++){
|
||||||
String idPeak = middleData.peaks_idPeak.get(i);
|
String idPeak = middleData.peaks_idPeak.get(i);
|
||||||
String energy = middleData.peaks_Energy.get(i);
|
String energy = String.format("%.3f", Double.valueOf(middleData.peaks_Energy.get(i)));
|
||||||
String peakCentroid = String.format("%.3f", Double.valueOf(middleData.peaks_peakCentroid.get(i)));
|
String peakCentroid = String.format("%.3f", Double.valueOf(middleData.peaks_peakCentroid.get(i)));
|
||||||
String fwhm = String.format("%.3f", Double.valueOf(middleData.peaks_Fwhm.get(i)));
|
String fwhm = String.format("%.3f", Double.valueOf(middleData.peaks_Fwhm.get(i)));
|
||||||
String area = String.format("%.3f", Double.valueOf(middleData.peaks_Area.get(i)));
|
String area = String.format("%.3f", Double.valueOf(middleData.peaks_Area.get(i)));
|
||||||
String areaErr = String.format("%.3f", Double.valueOf(middleData.peaks_areaErr.get(i)) / Double.valueOf(middleData.peaks_Area.get(i)) * 100);
|
String areaErr = String.format("%.3f", Double.valueOf(middleData.peaks_areaErr.get(i)) / Double.valueOf(middleData.peaks_Area.get(i)) * 100);
|
||||||
String signif = String.format("%.3f", Double.valueOf(middleData.peaks_Significance.get(i)));
|
String signif = String.format("%.3f", Double.valueOf(middleData.peaks_Significance.get(i)));
|
||||||
String sensit = String.format("%.3f", Double.valueOf(middleData.peaks_Sensitivity.get(i)));
|
String sensit = String.format("%.3f", Double.valueOf(middleData.peaks_Sensitivity.get(i)));
|
||||||
String nuclide = StringUtils.join(middleData.peaks_Nuclide_name.get(i), StringPool.SEMICOLON);
|
String nuclide = middleData.peaks_Nuclide_name.get(i).length()>0?StringUtils.join(middleData.peaks_Nuclide_name.get(i), StringPool.SEMICOLON):"";
|
||||||
strBuffer.append(rowFormat(data14, idPeak, energy, peakCentroid, fwhm, area, areaErr, signif, sensit, nuclide));
|
strBuffer.append(rowFormat(data14, idPeak, energy, peakCentroid, fwhm, area, areaErr, signif, sensit, nuclide));
|
||||||
//换行
|
//换行
|
||||||
strBuffer.append(System.lineSeparator());
|
strBuffer.append(System.lineSeparator());
|
||||||
|
@ -2499,7 +2479,7 @@ public class GammaFileUtil extends AbstractLogOrReport {
|
||||||
String activity = String.format("%.3f", Double.valueOf(middleData.nucl_ided_activ_key.get(i))) + " bq";
|
String activity = String.format("%.3f", Double.valueOf(middleData.nucl_ided_activ_key.get(i))) + " bq";
|
||||||
String relErr = String.format("%.3f", (Double.valueOf(middleData.nucl_ided_activ_key_err.get(i)) / Double.valueOf(middleData.nucl_ided_activ_key.get(i)) * 100)) + " %";
|
String relErr = String.format("%.3f", (Double.valueOf(middleData.nucl_ided_activ_key_err.get(i)) / Double.valueOf(middleData.nucl_ided_activ_key.get(i)) * 100)) + " %";
|
||||||
String conc = middleData.nucl_ided_Concentration.get(i) + " uBq/m^3";
|
String conc = middleData.nucl_ided_Concentration.get(i) + " uBq/m^3";
|
||||||
String mdc = StringUtils.join(middleData.nucl_ided_MDC, " ");
|
String mdc = StringUtils.join(middleData.nucl_ided_MDC.get(i), " ");
|
||||||
strBuffer.append(rowFormat(data15, nuclideName, halflife, activity, relErr, conc, mdc));
|
strBuffer.append(rowFormat(data15, nuclideName, halflife, activity, relErr, conc, mdc));
|
||||||
//换行
|
//换行
|
||||||
strBuffer.append(System.lineSeparator());
|
strBuffer.append(System.lineSeparator());
|
||||||
|
@ -2689,10 +2669,10 @@ public class GammaFileUtil extends AbstractLogOrReport {
|
||||||
peaksUsed++;
|
peaksUsed++;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
strBuffer.append(StringPool.SPACE+peaksUsed+" peaks with ID ("+(peaksUsed/middleData.peaks_idPeak.size()*100)+"%):");
|
strBuffer.append(StringPool.SPACE+peaksUsed+" peaks with ID ("+NumberFormatUtil.numberSixLen(String.valueOf(peaksUsed/middleData.peaks_idPeak.size()*100))+"%):");
|
||||||
//换行
|
//换行
|
||||||
strBuffer.append(System.lineSeparator());
|
strBuffer.append(System.lineSeparator());
|
||||||
strBuffer.append(StringPool.SPACE+(middleData.peaks_idPeak.size()-peaksUsed)+" peaks without ID("+Double.valueOf(middleData.peaks_idPeak.size()-peaksUsed)/middleData.peaks_idPeak.size()*100+"%):");
|
strBuffer.append(StringPool.SPACE+(middleData.peaks_idPeak.size()-peaksUsed)+" peaks without ID("+NumberFormatUtil.numberSixLen(String.valueOf(Double.valueOf(middleData.peaks_idPeak.size()-peaksUsed)/middleData.peaks_idPeak.size()*100))+"%):");
|
||||||
//换行
|
//换行
|
||||||
strBuffer.append(System.lineSeparator());
|
strBuffer.append(System.lineSeparator());
|
||||||
//换行
|
//换行
|
||||||
|
@ -2916,7 +2896,7 @@ public class GammaFileUtil extends AbstractLogOrReport {
|
||||||
//换行
|
//换行
|
||||||
strBuffer.append(System.lineSeparator());
|
strBuffer.append(System.lineSeparator());
|
||||||
for (int i=0;i<middleData.QC_CHECK_QC_NAME.size(); i++){
|
for (int i=0;i<middleData.QC_CHECK_QC_NAME.size(); i++){
|
||||||
strBuffer.append(rowFormat(qualityTitle, StringPool.SPACE+middleData.QC_CHECK_QC_NAME.get(i), (middleData.QC_CHECK_QC_RESULT.get(i).equals("0")?"Fail":"Pass"), String.format("%.3f", Double.valueOf(middleData.QC_CHECK_QC_VALUE.get(i))), middleData.QC_CHECK_QC_STANDARD.get(i)));
|
strBuffer.append(rowFormat(qualityTitle, StringPool.SPACE+middleData.QC_CHECK_QC_NAME.get(i), (Double.valueOf(middleData.QC_CHECK_QC_RESULT.get(i)) < 1?"Fail":"Pass"), String.format("%.3f", Double.valueOf(middleData.QC_CHECK_QC_VALUE.get(i))), middleData.QC_CHECK_QC_STANDARD.get(i)));
|
||||||
//换行
|
//换行
|
||||||
strBuffer.append(System.lineSeparator());
|
strBuffer.append(System.lineSeparator());
|
||||||
}
|
}
|
||||||
|
@ -3041,7 +3021,7 @@ public class GammaFileUtil extends AbstractLogOrReport {
|
||||||
//上传本次文件到ftp人工交互存储路径下
|
//上传本次文件到ftp人工交互存储路径下
|
||||||
File sampleTmp = null;
|
File sampleTmp = null;
|
||||||
try {
|
try {
|
||||||
sampleTmp = analyzeFile(StringPool.SLASH + ftpUtil.getFtpRootPath() + fileAnlyse.getFilepath(), fileAnlyse.getFilename());
|
sampleTmp = new File(fileAnlyse.getTmpFilePath());
|
||||||
if (Objects.nonNull(sampleTmp)) {
|
if (Objects.nonNull(sampleTmp)) {
|
||||||
bRet = ftpUtil.saveFile(StringPool.SLASH + ftpUtil.getFtpRootPath() + spectrumPathProperties.getSaveFilePath() + StringPool.SLASH + middleData.analyses_save_filePath.substring(0, middleData.analyses_save_filePath.lastIndexOf(StringPool.SLASH)), middleData.analyses_save_filePath.substring(middleData.analyses_save_filePath.lastIndexOf(StringPool.SLASH)+1), new FileInputStream(sampleTmp));
|
bRet = ftpUtil.saveFile(StringPool.SLASH + ftpUtil.getFtpRootPath() + spectrumPathProperties.getSaveFilePath() + StringPool.SLASH + middleData.analyses_save_filePath.substring(0, middleData.analyses_save_filePath.lastIndexOf(StringPool.SLASH)), middleData.analyses_save_filePath.substring(middleData.analyses_save_filePath.lastIndexOf(StringPool.SLASH)+1), new FileInputStream(sampleTmp));
|
||||||
}
|
}
|
||||||
|
@ -3596,7 +3576,7 @@ public class GammaFileUtil extends AbstractLogOrReport {
|
||||||
middleData.sample_acquisiton_start = fileAnlyse.getAcq().getAcquisition_start_date()+StringPool.SPACE+fileAnlyse.getAcq().getAcquisition_start_time();
|
middleData.sample_acquisiton_start = fileAnlyse.getAcq().getAcquisition_start_date()+StringPool.SPACE+fileAnlyse.getAcq().getAcquisition_start_time();
|
||||||
String acquisition_start = middleData.sample_acquisiton_start;
|
String acquisition_start = middleData.sample_acquisiton_start;
|
||||||
Date dataTime = DateUtils.parseDate(acquisition_start);
|
Date dataTime = DateUtils.parseDate(acquisition_start);
|
||||||
middleData.sample_acquistion_stop = DateUtils.formatDate(new Date((long) (dataTime.getTime() + fileAnlyse.getAcq().getAcquisition_live_time())), "yyyy/MM/dd HH:mm:ss");
|
middleData.sample_acquistion_stop = DateUtils.formatDate(new Date((long) (dataTime.getTime() + fileAnlyse.getAcq().getAcquisition_live_time()*1000)), "yyyy/MM/dd HH:mm:ss");
|
||||||
middleData.sample_acquistion_time = String.format("%.2f", fileAnlyse.getAcq().getAcquisition_real_time()) ;
|
middleData.sample_acquistion_time = String.format("%.2f", fileAnlyse.getAcq().getAcquisition_real_time()) ;
|
||||||
middleData.sample_stationID = fileAnlyse.getHeader().getSite_code();
|
middleData.sample_stationID = fileAnlyse.getHeader().getSite_code();
|
||||||
middleData.sample_detectID = fileAnlyse.getHeader().getDetector_code();
|
middleData.sample_detectID = fileAnlyse.getHeader().getDetector_code();
|
||||||
|
@ -3655,17 +3635,17 @@ public class GammaFileUtil extends AbstractLogOrReport {
|
||||||
|
|
||||||
String str_mda = (nuc.getMda() <= 0 ? "null" : NumberFormatUtil.numberFormat(String.valueOf(nuc.getMda())));
|
String str_mda = (nuc.getMda() <= 0 ? "null" : NumberFormatUtil.numberFormat(String.valueOf(nuc.getMda())));
|
||||||
tableNuclideActivity.setMda(str_mda);
|
tableNuclideActivity.setMda(str_mda);
|
||||||
if (nuc.getConcentration() > 1000000) {
|
if (Double.isFinite(nuc.getConcentration())) {
|
||||||
DecimalFormat decimalFormat = new DecimalFormat("0.###E0");
|
DecimalFormat decimalFormat = new DecimalFormat("0.###E0");
|
||||||
nuc.setConcentration(Double.valueOf(decimalFormat.format(nuc.getConcentration())));
|
nuc.setConcentration(Double.valueOf(decimalFormat.format(nuc.getConcentration())));
|
||||||
}
|
}
|
||||||
String str_con = (nuc.getConcentration() <= 0 ? "null" : NumberFormatUtil.numberFormat(String.valueOf(nuc.getConcentration())));
|
String str_con = (Double.isFinite(nuc.getConcentration())? nuc.getConcentration() <= 0 ? "null" : NumberFormatUtil.numberFormat(String.valueOf(nuc.getConcentration())) : "inf");
|
||||||
tableNuclideActivity.setConc(str_con);
|
tableNuclideActivity.setConc(str_con);
|
||||||
if (nuc.getMdc() > 1000000) {
|
if (Double.isFinite(nuc.getMdc())) {
|
||||||
DecimalFormat decimalFormat = new DecimalFormat("0.###E0");
|
DecimalFormat decimalFormat = new DecimalFormat("0.###E0");
|
||||||
nuc.setConcentration(Double.valueOf(decimalFormat.format(nuc.getMdc())));
|
nuc.setMdc(Double.valueOf(decimalFormat.format(nuc.getMdc())));
|
||||||
}
|
}
|
||||||
String str_mdc = (nuc.getMdc() <= 0 ? "null" : NumberFormatUtil.numberFormat(String.valueOf(nuc.getMdc())));
|
String str_mdc = (Double.isFinite(nuc.getMdc())? nuc.getMdc() <= 0 ? "null" : NumberFormatUtil.numberFormat(String.valueOf(nuc.getMdc())) : "inf");
|
||||||
tableNuclideActivity.setMdc(str_mdc);
|
tableNuclideActivity.setMdc(str_mdc);
|
||||||
nuclideActivityList.add(tableNuclideActivity);
|
nuclideActivityList.add(tableNuclideActivity);
|
||||||
}
|
}
|
||||||
|
|
Binary file not shown.
|
@ -281,17 +281,13 @@ public class PHDFileUtil extends AbstractLogOrReport {
|
||||||
}
|
}
|
||||||
|
|
||||||
public List<String> readLine(String filePath) {
|
public List<String> readLine(String filePath) {
|
||||||
File file = null;
|
|
||||||
List<String> allLines = new ArrayList<>();
|
List<String> allLines = new ArrayList<>();
|
||||||
try {
|
try {
|
||||||
file = ftpUtil.downloadFile(filePath, "betaGamma");
|
File file = new File(filePath);
|
||||||
return FileUtils.readLines(file, ftpUtil.getEncoding());
|
return FileUtils.readLines(file, "UTF-8");
|
||||||
}catch (IOException e){
|
}catch (IOException e){
|
||||||
e.printStackTrace();
|
e.printStackTrace();
|
||||||
return allLines;
|
return allLines;
|
||||||
} finally {
|
|
||||||
if (ObjectUtil.isNotNull(file))
|
|
||||||
file.delete();
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -431,7 +427,6 @@ public class PHDFileUtil extends AbstractLogOrReport {
|
||||||
detBgMap.put("DetBgBtn", "RedLight");
|
detBgMap.put("DetBgBtn", "RedLight");
|
||||||
qcMap.put("DetBgBtn", "RedLight");
|
qcMap.put("DetBgBtn", "RedLight");
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
public Map<String, String> getFileData(String filePath, String sampleFileName) {
|
public Map<String, String> getFileData(String filePath, String sampleFileName) {
|
||||||
|
@ -583,47 +578,46 @@ public class PHDFileUtil extends AbstractLogOrReport {
|
||||||
return rData;
|
return rData;
|
||||||
}
|
}
|
||||||
|
|
||||||
public File analyzeFile(String path, String fileName) {
|
// public File analyzeFile(String path, String fileName) {
|
||||||
//连接ftp
|
// //连接ftp
|
||||||
FTPClient ftpClient = ftpUtil.LoginFTP();
|
// FTPClient ftpClient = ftpUtil.LoginFTP();
|
||||||
InputStream inputStream = null;
|
// InputStream inputStream = null;
|
||||||
File file = null;
|
// File file = null;
|
||||||
try {
|
// try {
|
||||||
//被动模式
|
// //被动模式
|
||||||
ftpClient.enterLocalPassiveMode();
|
// ftpClient.enterLocalPassiveMode();
|
||||||
//设置文件类型--二进制文件
|
// //设置文件类型--二进制文件
|
||||||
ftpClient.setFileType(FTP.BINARY_FILE_TYPE);
|
// ftpClient.setFileType(FTP.BINARY_FILE_TYPE);
|
||||||
//
|
// //
|
||||||
ftpClient.setControlEncoding("UTF-8");
|
// ftpClient.setControlEncoding("UTF-8");
|
||||||
ftpClient.setFileTransferMode(FTPClient.STREAM_TRANSFER_MODE);
|
// ftpClient.setFileTransferMode(FTPClient.STREAM_TRANSFER_MODE);
|
||||||
//切换文件路径
|
// //切换文件路径
|
||||||
ftpClient.changeWorkingDirectory(path);
|
// ftpClient.changeWorkingDirectory(path);
|
||||||
inputStream = ftpClient.retrieveFileStream(fileName);
|
// inputStream = ftpClient.retrieveFileStream(fileName);
|
||||||
if (Objects.nonNull(inputStream)){
|
// if (Objects.nonNull(inputStream)){
|
||||||
file = File.createTempFile("betaGamma", null);
|
// file = File.createTempFile("betaGamma", null);
|
||||||
//将ftp文件的输入流复制给临时文件
|
// //将ftp文件的输入流复制给临时文件
|
||||||
FileUtils.copyInputStreamToFile(inputStream, file);
|
// FileUtils.copyInputStreamToFile(inputStream, file);
|
||||||
}
|
// }
|
||||||
} catch (IOException e) {
|
// } catch (IOException e) {
|
||||||
throw new RuntimeException(e);
|
// throw new RuntimeException(e);
|
||||||
} finally {
|
// } finally {
|
||||||
try {
|
// try {
|
||||||
if (Objects.nonNull(ftpClient)){
|
// if (Objects.nonNull(ftpClient)){
|
||||||
ftpClient.disconnect();
|
// ftpClient.disconnect();
|
||||||
}
|
// }
|
||||||
if (Objects.nonNull(inputStream)){
|
// if (Objects.nonNull(inputStream)){
|
||||||
inputStream.close();
|
// inputStream.close();
|
||||||
}
|
// }
|
||||||
} catch (IOException e) {
|
// } catch (IOException e) {
|
||||||
throw new RuntimeException(e);
|
// throw new RuntimeException(e);
|
||||||
}
|
// }
|
||||||
}
|
// }
|
||||||
return file;
|
// return file;
|
||||||
}
|
// }
|
||||||
|
|
||||||
public boolean analyzeSpectrum(File sampleTmp, File gasTmp, File detTmp, BgCalibratePara BgCalPara, Map<String, Object> map, BetaDataFile betaDataFile) {
|
public boolean analyzeSpectrum(File sampleTmp, File gasTmp, File detTmp, BgCalibratePara BgCalPara, Map<String, Object> map, BetaDataFile betaDataFile) {
|
||||||
boolean bRet = true;
|
boolean bRet = true;
|
||||||
long start = System.currentTimeMillis();
|
|
||||||
//调用动态库解析文件
|
//调用动态库解析文件
|
||||||
BgAnalyseResult analyseResult = null;
|
BgAnalyseResult analyseResult = null;
|
||||||
if (Objects.isNull(BgCalPara)) {
|
if (Objects.isNull(BgCalPara)) {
|
||||||
|
@ -631,8 +625,8 @@ public class PHDFileUtil extends AbstractLogOrReport {
|
||||||
} else {
|
} else {
|
||||||
analyseResult = EnergySpectrumHandler.bgReAnalyse(sampleTmp.getAbsolutePath(), gasTmp.getAbsolutePath(), detTmp.getAbsolutePath(), BgCalPara);
|
analyseResult = EnergySpectrumHandler.bgReAnalyse(sampleTmp.getAbsolutePath(), gasTmp.getAbsolutePath(), detTmp.getAbsolutePath(), BgCalPara);
|
||||||
}
|
}
|
||||||
System.out.println("beta分析总耗时:"+ (System.currentTimeMillis() - start));
|
|
||||||
if (StringUtils.isNotBlank(analyseResult.error_log) && !analyseResult.error_log.equalsIgnoreCase("no error.")) {
|
if (StringUtils.isNotBlank(analyseResult.error_log) && !analyseResult.error_log.equalsIgnoreCase("no error.")) {
|
||||||
|
System.out.println(analyseResult.error_log);
|
||||||
bRet = false;
|
bRet = false;
|
||||||
return bRet;
|
return bRet;
|
||||||
} else {
|
} else {
|
||||||
|
@ -723,47 +717,6 @@ public class PHDFileUtil extends AbstractLogOrReport {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
public EnergySpectrumStruct analyzeFileSourceData(String filePath, String fileName) {
|
|
||||||
EnergySpectrumStruct struct = null;
|
|
||||||
FTPClient ftpClient = ftpUtil.LoginFTP();
|
|
||||||
InputStream inputStream = null;
|
|
||||||
File file = null;
|
|
||||||
try {
|
|
||||||
//切换被动模式
|
|
||||||
ftpClient.enterLocalPassiveMode();
|
|
||||||
ftpClient.setFileType(FTPClient.BINARY_FILE_TYPE);
|
|
||||||
// 设置编码,当文件中存在中文且上传后文件乱码时可使用此配置项
|
|
||||||
ftpClient.setControlEncoding("UTF-8");
|
|
||||||
ftpClient.setFileTransferMode(FTPClient.STREAM_TRANSFER_MODE);
|
|
||||||
ftpClient.changeWorkingDirectory(filePath);
|
|
||||||
inputStream = ftpClient.retrieveFileStream(fileName);
|
|
||||||
if (Objects.nonNull(inputStream)){
|
|
||||||
//声明一个临时文件
|
|
||||||
file = File.createTempFile("betaGamma", null);
|
|
||||||
//将ftp文件的输入流复制给临时文件
|
|
||||||
FileUtils.copyInputStreamToFile(inputStream, file);
|
|
||||||
struct = EnergySpectrumHandler.getSourceData(file.getAbsolutePath());
|
|
||||||
}
|
|
||||||
} catch (IOException e) {
|
|
||||||
throw new RuntimeException(e);
|
|
||||||
} finally {
|
|
||||||
try {
|
|
||||||
if (Objects.nonNull(ftpClient)){
|
|
||||||
ftpClient.disconnect();
|
|
||||||
}
|
|
||||||
if (Objects.nonNull(inputStream)){
|
|
||||||
inputStream.close();
|
|
||||||
}
|
|
||||||
if (Objects.nonNull(file)) {
|
|
||||||
file.delete();
|
|
||||||
}
|
|
||||||
} catch (IOException e) {
|
|
||||||
throw new RuntimeException(e);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return struct;
|
|
||||||
}
|
|
||||||
|
|
||||||
public EnergySpectrumStruct analyzeFileSourceData(File uploadFile) {
|
public EnergySpectrumStruct analyzeFileSourceData(File uploadFile) {
|
||||||
EnergySpectrumStruct struct = null;
|
EnergySpectrumStruct struct = null;
|
||||||
InputStream inputStream = null;
|
InputStream inputStream = null;
|
||||||
|
@ -1378,39 +1331,39 @@ public class PHDFileUtil extends AbstractLogOrReport {
|
||||||
out.append(System.lineSeparator());
|
out.append(System.lineSeparator());
|
||||||
//第十五行数据
|
//第十五行数据
|
||||||
String dataFormat2 = "%-45s%-11s";
|
String dataFormat2 = "%-45s%-11s";
|
||||||
out.append(rowFormat(dataFormat2, "Acquisition_live_sec[s] :", String.valueOf(sample.acquisition_live_time)));
|
out.append(rowFormat(dataFormat2, "Acquisition_live_sec[s] :", NumberFormatUtil.numberSixLen(String.valueOf(sample.acquisition_live_time))));
|
||||||
//换行
|
//换行
|
||||||
out.append(System.lineSeparator());
|
out.append(System.lineSeparator());
|
||||||
//第十六行数据
|
//第十六行数据
|
||||||
out.append(rowFormat(dataFormat2, "Acquisition_real_sec[s] :", String.valueOf(sample.acquisition_real_time)));
|
out.append(rowFormat(dataFormat2, "Acquisition_real_sec[s] :", NumberFormatUtil.numberSixLen(String.valueOf(sample.acquisition_real_time))));
|
||||||
//换行
|
//换行
|
||||||
out.append(System.lineSeparator());
|
out.append(System.lineSeparator());
|
||||||
//第十七行数据
|
//第十七行数据
|
||||||
long secs = 0;
|
double secs = 0.0;
|
||||||
try {
|
try {
|
||||||
Date stop = DateUtils.parseDate(sample.collection_stop_date + StringPool.SPACE + sample.collection_stop_time);
|
Date stop = DateUtils.parseDate(sample.collection_stop_date + StringPool.SPACE + sample.collection_stop_time);
|
||||||
Date start = DateUtils.parseDate(sample.collection_start_date + StringPool.SPACE + sample.collection_start_time);
|
Date start = DateUtils.parseDate(sample.collection_start_date + StringPool.SPACE + sample.collection_start_time);
|
||||||
secs = (stop.getTime() - start.getTime()) / 1000;
|
secs = (Double.valueOf(stop.getTime()) - Double.valueOf(start.getTime()))/1000;
|
||||||
} catch (ParseException e) {
|
} catch (ParseException e) {
|
||||||
throw new RuntimeException(e);
|
throw new RuntimeException(e);
|
||||||
}
|
}
|
||||||
out.append(rowFormat(dataFormat2, "Collection time[s] :", String.valueOf(secs)));
|
out.append(rowFormat(dataFormat2, "Collection time[s] :", String.valueOf(Math.round(secs))));
|
||||||
//换行
|
//换行
|
||||||
out.append(System.lineSeparator());
|
out.append(System.lineSeparator());
|
||||||
//第十八行数据
|
//第十八行数据
|
||||||
out.append(rowFormat(dataFormat2, "GASBK Spectrum acquisition_live_sec[s] :", String.valueOf(gas.acquisition_live_time)));
|
out.append(rowFormat(dataFormat2, "GASBK Spectrum acquisition_live_sec[s] :", NumberFormatUtil.numberSixLen(String.valueOf(gas.acquisition_live_time))));
|
||||||
//换行
|
//换行
|
||||||
out.append(System.lineSeparator());
|
out.append(System.lineSeparator());
|
||||||
//第十九行数据
|
//第十九行数据
|
||||||
out.append(rowFormat(dataFormat2, "GASBK Spectrum acquisition_real_sec[s] :", String.valueOf(gas.acquisition_real_time)));
|
out.append(rowFormat(dataFormat2, "GASBK Spectrum acquisition_real_sec[s] :", NumberFormatUtil.numberSixLen(String.valueOf(gas.acquisition_real_time))));
|
||||||
//换行
|
//换行
|
||||||
out.append(System.lineSeparator());
|
out.append(System.lineSeparator());
|
||||||
//第二十行数据
|
//第二十行数据
|
||||||
out.append(rowFormat(dataFormat2, "DETBK Spectrum acquisition_live_sec[s] :", String.valueOf(det.acquisition_live_time)));
|
out.append(rowFormat(dataFormat2, "DETBK Spectrum acquisition_live_sec[s] :", NumberFormatUtil.numberSixLen(String.valueOf(det.acquisition_live_time))));
|
||||||
//换行
|
//换行
|
||||||
out.append(System.lineSeparator());
|
out.append(System.lineSeparator());
|
||||||
//第二十一行数据
|
//第二十一行数据
|
||||||
out.append(rowFormat(dataFormat2, "DETBK Spectrum acquisition_real_sec[s] :", String.valueOf(det.acquisition_real_time)));
|
out.append(rowFormat(dataFormat2, "DETBK Spectrum acquisition_real_sec[s] :", NumberFormatUtil.numberSixLen(String.valueOf(det.acquisition_real_time))));
|
||||||
//换行
|
//换行
|
||||||
out.append(System.lineSeparator());
|
out.append(System.lineSeparator());
|
||||||
//换行
|
//换行
|
||||||
|
@ -1421,11 +1374,11 @@ public class PHDFileUtil extends AbstractLogOrReport {
|
||||||
out.append(System.lineSeparator());
|
out.append(System.lineSeparator());
|
||||||
//第二十三行数据
|
//第二十三行数据
|
||||||
String dataFormat3 = "%-21s%-11s";
|
String dataFormat3 = "%-21s%-11s";
|
||||||
out.append(rowFormat(dataFormat3, "XE volume[ml] :", String.valueOf(sample.sample_volume_of_Xe)));
|
out.append(rowFormat(dataFormat3, "XE volume[ml] :", NumberFormatUtil.numberSixLen(String.valueOf(sample.air_volume))));
|
||||||
//换行
|
//换行
|
||||||
out.append(System.lineSeparator());
|
out.append(System.lineSeparator());
|
||||||
//第二十四行数据
|
//第二十四行数据
|
||||||
out.append(rowFormat(dataFormat3, "Air volume[m3] :", String.valueOf(sample.air_volume)));
|
out.append(rowFormat(dataFormat3, "Air volume[m3] :", NumberFormatUtil.numberSixLen(String.valueOf(sample.sample_volume_of_Xe))));
|
||||||
//换行
|
//换行
|
||||||
out.append(System.lineSeparator());
|
out.append(System.lineSeparator());
|
||||||
//换行
|
//换行
|
||||||
|
@ -1469,9 +1422,9 @@ public class PHDFileUtil extends AbstractLogOrReport {
|
||||||
for (int i = 0; i < sample.ratio_id.size(); i++) {
|
for (int i = 0; i < sample.ratio_id.size(); i++) {
|
||||||
String low = String.valueOf(sample.ROI_num_lower_G_energy_ROI.get(i));
|
String low = String.valueOf(sample.ROI_num_lower_G_energy_ROI.get(i));
|
||||||
String high = String.valueOf(sample.ROI_num_highter_G_energy_ROI.get(i));
|
String high = String.valueOf(sample.ROI_num_highter_G_energy_ROI.get(i));
|
||||||
String ratio = String.valueOf(sample.count_ratio.get(i));
|
String ratio = String.format("%.6f", sample.count_ratio.get(i));
|
||||||
String uncert = String.valueOf(sample.count_ratio_uncertainty.get(i));
|
String uncert = String.format("%.6f", sample.count_ratio_uncertainty.get(i));
|
||||||
out.append(rowFormat(dataFormat5, low, high, ratio, uncert));
|
out.append(rowFormat(dataFormat5, high, low, ratio, uncert));
|
||||||
//换行
|
//换行
|
||||||
out.append(System.lineSeparator());
|
out.append(System.lineSeparator());
|
||||||
//换行
|
//换行
|
||||||
|
@ -1524,9 +1477,9 @@ public class PHDFileUtil extends AbstractLogOrReport {
|
||||||
//换行
|
//换行
|
||||||
out.append(System.lineSeparator());
|
out.append(System.lineSeparator());
|
||||||
//第三十六行数据
|
//第三十六行数据
|
||||||
if (bgAnalyseResult.s_b_fitting_c_e != null && CollectionUtils.isNotEmpty(bgAnalyseResult.s_b_fitting_c_e)) {
|
if (bgAnalyseResult.s_b_fitting_e_c != null && CollectionUtils.isNotEmpty(bgAnalyseResult.s_b_fitting_e_c)) {
|
||||||
if (bgAnalyseResult.s_b_fitting_c_e.size() > 0) {
|
if (bgAnalyseResult.s_b_fitting_e_c.size() > 0) {
|
||||||
out.append(rowFormat("energy to channel equation: CH(x) = (%s)+(%s)*x+(%s)x*x", NumberFormatUtil.numberFormat(String.valueOf(bgAnalyseResult.s_b_fitting_c_e.get(0))), NumberFormatUtil.numberFormat(String.valueOf(bgAnalyseResult.s_b_fitting_c_e.get(1))), NumberFormatUtil.numberFormat(String.valueOf(bgAnalyseResult.s_b_fitting_c_e.get(2)))));
|
out.append(rowFormat("energy to channel equation: CH(x) = (%s)+(%s)*x+(%s)x*x", NumberFormatUtil.numberFormat(String.valueOf(bgAnalyseResult.s_b_fitting_e_c.get(0))), NumberFormatUtil.numberFormat(String.valueOf(bgAnalyseResult.s_b_fitting_e_c.get(1))), NumberFormatUtil.numberFormat(String.valueOf(bgAnalyseResult.s_b_fitting_e_c.get(2)))));
|
||||||
} else {
|
} else {
|
||||||
out.append(rowFormat("energy to channel equation: CH(x) = (%s)+(%s)*x+(%s)x*x", "?1", "?2", "?3"));
|
out.append(rowFormat("energy to channel equation: CH(x) = (%s)+(%s)*x+(%s)x*x", "?1", "?2", "?3"));
|
||||||
}
|
}
|
||||||
|
@ -1536,9 +1489,9 @@ public class PHDFileUtil extends AbstractLogOrReport {
|
||||||
//换行
|
//换行
|
||||||
out.append(System.lineSeparator());
|
out.append(System.lineSeparator());
|
||||||
//第三十七行数据
|
//第三十七行数据
|
||||||
if (bgAnalyseResult.s_b_fitting_e_c != null && CollectionUtils.isNotEmpty(bgAnalyseResult.s_b_fitting_e_c)) {
|
if (bgAnalyseResult.s_b_fitting_c_e != null && CollectionUtils.isNotEmpty(bgAnalyseResult.s_b_fitting_c_e)) {
|
||||||
if (bgAnalyseResult.s_b_fitting_e_c.size() > 0) {
|
if (bgAnalyseResult.s_b_fitting_c_e.size() > 0) {
|
||||||
out.append(rowFormat("channel to energy equation: E(x) = (%s)+(%s)*x+(%s)x*x", NumberFormatUtil.numberFormat(String.valueOf(bgAnalyseResult.s_b_fitting_e_c.get(0))), NumberFormatUtil.numberFormat(String.valueOf(bgAnalyseResult.s_b_fitting_e_c.get(1))), NumberFormatUtil.numberFormat(String.valueOf(bgAnalyseResult.s_b_fitting_e_c.get(2)))));
|
out.append(rowFormat("channel to energy equation: E(x) = (%s)+(%s)*x+(%s)x*x", NumberFormatUtil.numberFormat(String.valueOf(bgAnalyseResult.s_b_fitting_c_e.get(0))), NumberFormatUtil.numberFormat(String.valueOf(bgAnalyseResult.s_b_fitting_c_e.get(1))), NumberFormatUtil.numberFormat(String.valueOf(bgAnalyseResult.s_b_fitting_c_e.get(2)))));
|
||||||
} else {
|
} else {
|
||||||
out.append(rowFormat("channel to energy equation: E(x) = (%s)+(%s)*x+(%s)x*x", "?1", "?2", "?3"));
|
out.append(rowFormat("channel to energy equation: E(x) = (%s)+(%s)*x+(%s)x*x", "?1", "?2", "?3"));
|
||||||
}
|
}
|
||||||
|
@ -1554,9 +1507,9 @@ public class PHDFileUtil extends AbstractLogOrReport {
|
||||||
//换行
|
//换行
|
||||||
out.append(System.lineSeparator());
|
out.append(System.lineSeparator());
|
||||||
//第三十六行数据
|
//第三十六行数据
|
||||||
if (bgAnalyseResult.s_g_fitting_c_e !=null && CollectionUtils.isNotEmpty(bgAnalyseResult.s_g_fitting_c_e)) {
|
if (bgAnalyseResult.s_g_fitting_e_c !=null && CollectionUtils.isNotEmpty(bgAnalyseResult.s_g_fitting_e_c)) {
|
||||||
if (bgAnalyseResult.s_g_fitting_c_e.size() > 0) {
|
if (bgAnalyseResult.s_g_fitting_e_c.size() > 0) {
|
||||||
out.append(rowFormat("energy to channel equation: CH(x) = (%s)+(%s)*x+(%s)x*x", NumberFormatUtil.numberFormat(String.valueOf(bgAnalyseResult.s_g_fitting_c_e.get(0))), NumberFormatUtil.numberFormat(String.valueOf(bgAnalyseResult.s_g_fitting_c_e.get(1))), NumberFormatUtil.numberFormat(String.valueOf(bgAnalyseResult.s_g_fitting_c_e.get(2)))));
|
out.append(rowFormat("energy to channel equation: CH(x) = (%s)+(%s)*x+(%s)x*x", NumberFormatUtil.numberFormat(String.valueOf(bgAnalyseResult.s_g_fitting_e_c.get(0))), NumberFormatUtil.numberFormat(String.valueOf(bgAnalyseResult.s_g_fitting_e_c.get(1))), NumberFormatUtil.numberFormat(String.valueOf(bgAnalyseResult.s_g_fitting_e_c.get(2)))));
|
||||||
} else {
|
} else {
|
||||||
out.append(rowFormat("energy to channel equation: CH(x) = (%s)+(%s)*x+(%s)x*x", "?1", "?2", "?3"));
|
out.append(rowFormat("energy to channel equation: CH(x) = (%s)+(%s)*x+(%s)x*x", "?1", "?2", "?3"));
|
||||||
}
|
}
|
||||||
|
@ -1566,9 +1519,9 @@ public class PHDFileUtil extends AbstractLogOrReport {
|
||||||
//换行
|
//换行
|
||||||
out.append(System.lineSeparator());
|
out.append(System.lineSeparator());
|
||||||
//第三十七行数据
|
//第三十七行数据
|
||||||
if (bgAnalyseResult.s_g_fitting_e_c != null && CollectionUtils.isNotEmpty(bgAnalyseResult.s_g_fitting_e_c)) {
|
if (bgAnalyseResult.s_g_fitting_c_e != null && CollectionUtils.isNotEmpty(bgAnalyseResult.s_g_fitting_c_e)) {
|
||||||
if (bgAnalyseResult.s_g_fitting_e_c.size() > 0) {
|
if (bgAnalyseResult.s_g_fitting_c_e.size() > 0) {
|
||||||
out.append(rowFormat("channel to energy equation: E(x) = (%s)+(%s)*x+(%s)x*x", NumberFormatUtil.numberFormat(String.valueOf(bgAnalyseResult.s_g_fitting_e_c.get(0))), NumberFormatUtil.numberFormat(String.valueOf(bgAnalyseResult.s_g_fitting_e_c.get(1))), NumberFormatUtil.numberFormat(String.valueOf(bgAnalyseResult.s_g_fitting_e_c.get(2)))));
|
out.append(rowFormat("channel to energy equation: E(x) = (%s)+(%s)*x+(%s)x*x", NumberFormatUtil.numberFormat(String.valueOf(bgAnalyseResult.s_g_fitting_c_e.get(0))), NumberFormatUtil.numberFormat(String.valueOf(bgAnalyseResult.s_g_fitting_c_e.get(1))), NumberFormatUtil.numberFormat(String.valueOf(bgAnalyseResult.s_g_fitting_c_e.get(2)))));
|
||||||
} else {
|
} else {
|
||||||
out.append(rowFormat("channel to energy equation: E(x) = (%s)+(%s)*x+(%s)x*x", "?1", "?2", "?3"));
|
out.append(rowFormat("channel to energy equation: E(x) = (%s)+(%s)*x+(%s)x*x", "?1", "?2", "?3"));
|
||||||
}
|
}
|
||||||
|
@ -1598,7 +1551,7 @@ public class PHDFileUtil extends AbstractLogOrReport {
|
||||||
//换行
|
//换行
|
||||||
out.append(System.lineSeparator());
|
out.append(System.lineSeparator());
|
||||||
//第四十一行数据
|
//第四十一行数据
|
||||||
String dataFormat7 = "ROI : %-7sBeta : %-2sto %-4schannels Gamma : %-4sto %-8s";
|
String dataFormat7 = "ROI : %-7sBeta : %-2s to %-4s channels Gamma : %-4s to %-8s";
|
||||||
for (int i = 0; i < bgAnalyseResult.S_ROI.size(); i++) {
|
for (int i = 0; i < bgAnalyseResult.S_ROI.size(); i++) {
|
||||||
String roi = String.valueOf(bgAnalyseResult.S_ROI.get(i));
|
String roi = String.valueOf(bgAnalyseResult.S_ROI.get(i));
|
||||||
String bStart = String.valueOf(bgAnalyseResult.S_ROI_B_Boundary_start.get(i));
|
String bStart = String.valueOf(bgAnalyseResult.S_ROI_B_Boundary_start.get(i));
|
||||||
|
@ -1624,9 +1577,9 @@ public class PHDFileUtil extends AbstractLogOrReport {
|
||||||
//换行
|
//换行
|
||||||
out.append(System.lineSeparator());
|
out.append(System.lineSeparator());
|
||||||
//第四十四行数据
|
//第四十四行数据
|
||||||
if (bgAnalyseResult.d_b_fitting_c_e != null && CollectionUtils.isNotEmpty(bgAnalyseResult.d_b_fitting_c_e)) {
|
if (bgAnalyseResult.d_b_fitting_e_c != null && CollectionUtils.isNotEmpty(bgAnalyseResult.d_b_fitting_e_c)) {
|
||||||
if (bgAnalyseResult.d_b_fitting_c_e.size() > 0) {
|
if (bgAnalyseResult.d_b_fitting_e_c.size() > 0) {
|
||||||
out.append(rowFormat("energy to channel equation: CH(x) = (%s)+(%s)*x+(%s)x*x", NumberFormatUtil.numberFormat(String.valueOf(bgAnalyseResult.d_b_fitting_c_e.get(0))), NumberFormatUtil.numberFormat(String.valueOf(bgAnalyseResult.d_b_fitting_c_e.get(1))), NumberFormatUtil.numberFormat(String.valueOf(bgAnalyseResult.d_b_fitting_c_e.get(2)))));
|
out.append(rowFormat("energy to channel equation: CH(x) = (%s)+(%s)*x+(%s)x*x", NumberFormatUtil.numberFormat(String.valueOf(bgAnalyseResult.d_b_fitting_e_c.get(0))), NumberFormatUtil.numberFormat(String.valueOf(bgAnalyseResult.d_b_fitting_e_c.get(1))), NumberFormatUtil.numberFormat(String.valueOf(bgAnalyseResult.d_b_fitting_e_c.get(2)))));
|
||||||
} else {
|
} else {
|
||||||
out.append(rowFormat("energy to channel equation: CH(x) = (%s)+(%s)*x+(%s)x*x", "?1", "?2", "?3"));
|
out.append(rowFormat("energy to channel equation: CH(x) = (%s)+(%s)*x+(%s)x*x", "?1", "?2", "?3"));
|
||||||
}
|
}
|
||||||
|
@ -1637,9 +1590,9 @@ public class PHDFileUtil extends AbstractLogOrReport {
|
||||||
//换行
|
//换行
|
||||||
out.append(System.lineSeparator());
|
out.append(System.lineSeparator());
|
||||||
//第四十五行数据
|
//第四十五行数据
|
||||||
if (bgAnalyseResult.d_b_fitting_e_c != null && CollectionUtils.isNotEmpty(bgAnalyseResult.d_b_fitting_e_c)) {
|
if (bgAnalyseResult.d_b_fitting_c_e != null && CollectionUtils.isNotEmpty(bgAnalyseResult.d_b_fitting_c_e)) {
|
||||||
if (bgAnalyseResult.d_b_fitting_e_c.size() > 0) {
|
if (bgAnalyseResult.d_b_fitting_c_e.size() > 0) {
|
||||||
out.append(rowFormat("channel to energy equation: E(x) = (%s)+(%s)*x+(%s)x*x", NumberFormatUtil.numberFormat(String.valueOf(bgAnalyseResult.d_b_fitting_e_c.get(0))), NumberFormatUtil.numberFormat(String.valueOf(bgAnalyseResult.d_b_fitting_e_c.get(1))), NumberFormatUtil.numberFormat(String.valueOf(bgAnalyseResult.d_b_fitting_e_c.get(2)))));
|
out.append(rowFormat("channel to energy equation: E(x) = (%s)+(%s)*x+(%s)x*x", NumberFormatUtil.numberFormat(String.valueOf(bgAnalyseResult.d_b_fitting_c_e.get(0))), NumberFormatUtil.numberFormat(String.valueOf(bgAnalyseResult.d_b_fitting_c_e.get(1))), NumberFormatUtil.numberFormat(String.valueOf(bgAnalyseResult.d_b_fitting_c_e.get(2)))));
|
||||||
} else {
|
} else {
|
||||||
out.append(rowFormat("channel to energy equation: E(x) = (%s)+(%s)*x+(%s)x*x", "?1", "?2", "?3"));
|
out.append(rowFormat("channel to energy equation: E(x) = (%s)+(%s)*x+(%s)x*x", "?1", "?2", "?3"));
|
||||||
}
|
}
|
||||||
|
@ -1655,9 +1608,9 @@ public class PHDFileUtil extends AbstractLogOrReport {
|
||||||
//换行
|
//换行
|
||||||
out.append(System.lineSeparator());
|
out.append(System.lineSeparator());
|
||||||
//第四十四行数据
|
//第四十四行数据
|
||||||
if (bgAnalyseResult.d_g_fitting_c_e != null && CollectionUtils.isNotEmpty(bgAnalyseResult.d_g_fitting_c_e)) {
|
if (bgAnalyseResult.d_g_fitting_e_c != null && CollectionUtils.isNotEmpty(bgAnalyseResult.d_g_fitting_e_c)) {
|
||||||
if (bgAnalyseResult.d_g_fitting_c_e.size() > 0) {
|
if (bgAnalyseResult.d_g_fitting_e_c.size() > 0) {
|
||||||
out.append(rowFormat("energy to channel equation: CH(x) = (%s)+(%s)*x+(%s)x*x", NumberFormatUtil.numberFormat(String.valueOf(bgAnalyseResult.d_g_fitting_c_e.get(0))), NumberFormatUtil.numberFormat(String.valueOf(bgAnalyseResult.d_g_fitting_c_e.get(1))), NumberFormatUtil.numberFormat(String.valueOf(bgAnalyseResult.d_g_fitting_c_e.get(2)))));
|
out.append(rowFormat("energy to channel equation: CH(x) = (%s)+(%s)*x+(%s)x*x", NumberFormatUtil.numberFormat(String.valueOf(bgAnalyseResult.d_g_fitting_e_c.get(0))), NumberFormatUtil.numberFormat(String.valueOf(bgAnalyseResult.d_g_fitting_e_c.get(1))), NumberFormatUtil.numberFormat(String.valueOf(bgAnalyseResult.d_g_fitting_e_c.get(2)))));
|
||||||
} else {
|
} else {
|
||||||
out.append(rowFormat("energy to channel equation: CH(x) = (%s)+(%s)*x+(%s)x*x", "?1", "?2", "?3"));
|
out.append(rowFormat("energy to channel equation: CH(x) = (%s)+(%s)*x+(%s)x*x", "?1", "?2", "?3"));
|
||||||
}
|
}
|
||||||
|
@ -1667,9 +1620,9 @@ public class PHDFileUtil extends AbstractLogOrReport {
|
||||||
//换行
|
//换行
|
||||||
out.append(System.lineSeparator());
|
out.append(System.lineSeparator());
|
||||||
//第四十五行数据
|
//第四十五行数据
|
||||||
if (bgAnalyseResult.d_g_fitting_e_c != null && CollectionUtils.isNotEmpty(bgAnalyseResult.d_g_fitting_e_c)) {
|
if (bgAnalyseResult.d_g_fitting_c_e != null && CollectionUtils.isNotEmpty(bgAnalyseResult.d_g_fitting_c_e)) {
|
||||||
if (bgAnalyseResult.d_g_fitting_e_c.size() > 0) {
|
if (bgAnalyseResult.d_g_fitting_c_e.size() > 0) {
|
||||||
out.append(rowFormat("channel to energy equation: E(x) = (%s)+(%s)*x+(%s)x*x", NumberFormatUtil.numberFormat(String.valueOf(bgAnalyseResult.d_g_fitting_e_c.get(0))), NumberFormatUtil.numberFormat(String.valueOf(bgAnalyseResult.d_g_fitting_e_c.get(1))), NumberFormatUtil.numberFormat(String.valueOf(bgAnalyseResult.d_g_fitting_e_c.get(2)))));
|
out.append(rowFormat("channel to energy equation: E(x) = (%s)+(%s)*x+(%s)x*x", NumberFormatUtil.numberFormat(String.valueOf(bgAnalyseResult.d_g_fitting_c_e.get(0))), NumberFormatUtil.numberFormat(String.valueOf(bgAnalyseResult.d_g_fitting_c_e.get(1))), NumberFormatUtil.numberFormat(String.valueOf(bgAnalyseResult.d_g_fitting_c_e.get(2)))));
|
||||||
} else {
|
} else {
|
||||||
out.append(rowFormat("channel to energy equation: E(x) = (%s)+(%s)*x+(%s)x*x", "?1", "?2", "?3"));
|
out.append(rowFormat("channel to energy equation: E(x) = (%s)+(%s)*x+(%s)x*x", "?1", "?2", "?3"));
|
||||||
}
|
}
|
||||||
|
@ -1693,12 +1646,12 @@ public class PHDFileUtil extends AbstractLogOrReport {
|
||||||
//换行
|
//换行
|
||||||
out.append(System.lineSeparator());
|
out.append(System.lineSeparator());
|
||||||
//第四十八行数据
|
//第四十八行数据
|
||||||
for (int i = 0; i < bgAnalyseResult.S_ROI.size(); i++) {
|
for (int i = 0; i < bgAnalyseResult.D_ROI.size(); i++) {
|
||||||
String roi = String.valueOf(bgAnalyseResult.S_ROI.get(i));
|
String roi = String.valueOf(bgAnalyseResult.D_ROI.get(i));
|
||||||
String bStart = String.valueOf(bgAnalyseResult.S_ROI_B_Boundary_start.get(i));
|
String bStart = String.valueOf(bgAnalyseResult.D_ROI_B_Boundary_start.get(i));
|
||||||
String bStop = String.valueOf(bgAnalyseResult.S_ROI_B_Boundary_stop.get(i));
|
String bStop = String.valueOf(bgAnalyseResult.D_ROI_B_Boundary_stop.get(i));
|
||||||
String gStart = String.valueOf(bgAnalyseResult.S_ROI_G_Boundary_start.get(i));
|
String gStart = String.valueOf(bgAnalyseResult.D_ROI_G_Boundary_start.get(i));
|
||||||
String gStop = String.valueOf(bgAnalyseResult.S_ROI_G_Boundary_stop.get(i));
|
String gStop = String.valueOf(bgAnalyseResult.D_ROI_G_Boundary_stop.get(i));
|
||||||
out.append(rowFormat(dataFormat7, roi, bStart, bStop, gStart, gStop));
|
out.append(rowFormat(dataFormat7, roi, bStart, bStop, gStart, gStop));
|
||||||
//换行
|
//换行
|
||||||
out.append(System.lineSeparator());
|
out.append(System.lineSeparator());
|
||||||
|
@ -1720,9 +1673,9 @@ public class PHDFileUtil extends AbstractLogOrReport {
|
||||||
//换行
|
//换行
|
||||||
out.append(System.lineSeparator());
|
out.append(System.lineSeparator());
|
||||||
//第五十一行数据
|
//第五十一行数据
|
||||||
if (bgAnalyseResult.g_b_fitting_c_e != null && CollectionUtils.isNotEmpty(bgAnalyseResult.g_b_fitting_c_e)) {
|
if (bgAnalyseResult.g_b_fitting_e_c != null && CollectionUtils.isNotEmpty(bgAnalyseResult.g_b_fitting_e_c)) {
|
||||||
if (bgAnalyseResult.g_b_fitting_c_e.size() > 0) {
|
if (bgAnalyseResult.g_b_fitting_e_c.size() > 0) {
|
||||||
out.append(rowFormat("energy to channel equation: CH(x) = (%s)+(%s)*x+(%s)x*x", NumberFormatUtil.numberFormat(String.valueOf(bgAnalyseResult.g_b_fitting_c_e.get(0))), NumberFormatUtil.numberFormat(String.valueOf(bgAnalyseResult.g_b_fitting_c_e.get(1))), NumberFormatUtil.numberFormat(String.valueOf(bgAnalyseResult.g_b_fitting_c_e.get(2)))));
|
out.append(rowFormat("energy to channel equation: CH(x) = (%s)+(%s)*x+(%s)x*x", NumberFormatUtil.numberFormat(String.valueOf(bgAnalyseResult.g_b_fitting_e_c.get(0))), NumberFormatUtil.numberFormat(String.valueOf(bgAnalyseResult.g_b_fitting_e_c.get(1))), NumberFormatUtil.numberFormat(String.valueOf(bgAnalyseResult.g_b_fitting_e_c.get(2)))));
|
||||||
} else {
|
} else {
|
||||||
out.append(rowFormat("energy to channel equation: CH(x) = (%s)+(%s)*x+(%s)x*x", "?1", "?2", "?3"));
|
out.append(rowFormat("energy to channel equation: CH(x) = (%s)+(%s)*x+(%s)x*x", "?1", "?2", "?3"));
|
||||||
}
|
}
|
||||||
|
@ -1732,9 +1685,9 @@ public class PHDFileUtil extends AbstractLogOrReport {
|
||||||
//换行
|
//换行
|
||||||
out.append(System.lineSeparator());
|
out.append(System.lineSeparator());
|
||||||
//第五十二行数据
|
//第五十二行数据
|
||||||
if (bgAnalyseResult.g_b_fitting_e_c != null && CollectionUtils.isNotEmpty(bgAnalyseResult.g_b_fitting_e_c)) {
|
if (bgAnalyseResult.g_b_fitting_c_e != null && CollectionUtils.isNotEmpty(bgAnalyseResult.g_b_fitting_c_e)) {
|
||||||
if (bgAnalyseResult.g_b_fitting_e_c.size() > 0) {
|
if (bgAnalyseResult.g_b_fitting_c_e.size() > 0) {
|
||||||
out.append(rowFormat("channel to energy equation: E(x) = (%s)+(%s)*x+(%s)x*x", NumberFormatUtil.numberFormat(String.valueOf(bgAnalyseResult.g_b_fitting_e_c.get(0))), NumberFormatUtil.numberFormat(String.valueOf(bgAnalyseResult.g_b_fitting_e_c.get(1))), NumberFormatUtil.numberFormat(String.valueOf(bgAnalyseResult.g_b_fitting_e_c.get(2)))));
|
out.append(rowFormat("channel to energy equation: E(x) = (%s)+(%s)*x+(%s)x*x", NumberFormatUtil.numberFormat(String.valueOf(bgAnalyseResult.g_b_fitting_c_e.get(0))), NumberFormatUtil.numberFormat(String.valueOf(bgAnalyseResult.g_b_fitting_c_e.get(1))), NumberFormatUtil.numberFormat(String.valueOf(bgAnalyseResult.g_b_fitting_c_e.get(2)))));
|
||||||
} else {
|
} else {
|
||||||
out.append(rowFormat("channel to energy equation: E(x) = (%s)+(%s)*x+(%s)x*x", "?1", "?2", "?3"));
|
out.append(rowFormat("channel to energy equation: E(x) = (%s)+(%s)*x+(%s)x*x", "?1", "?2", "?3"));
|
||||||
}
|
}
|
||||||
|
@ -1750,9 +1703,9 @@ public class PHDFileUtil extends AbstractLogOrReport {
|
||||||
//换行
|
//换行
|
||||||
out.append(System.lineSeparator());
|
out.append(System.lineSeparator());
|
||||||
//第五十四行数据
|
//第五十四行数据
|
||||||
if (bgAnalyseResult.g_g_fitting_c_e != null && CollectionUtils.isNotEmpty(bgAnalyseResult.g_g_fitting_c_e)) {
|
if (bgAnalyseResult.g_g_fitting_e_c != null && CollectionUtils.isNotEmpty(bgAnalyseResult.g_g_fitting_e_c)) {
|
||||||
if (bgAnalyseResult.g_g_fitting_c_e.size() > 0) {
|
if (bgAnalyseResult.g_g_fitting_e_c.size() > 0) {
|
||||||
out.append(rowFormat("energy to channel equation: CH(x) = (%s)+(%s)*x+(%s)x*x", NumberFormatUtil.numberFormat(String.valueOf(bgAnalyseResult.g_g_fitting_c_e.get(0))), NumberFormatUtil.numberFormat(String.valueOf(bgAnalyseResult.g_g_fitting_c_e.get(1))), NumberFormatUtil.numberFormat(String.valueOf(bgAnalyseResult.g_g_fitting_c_e.get(2)))));
|
out.append(rowFormat("energy to channel equation: CH(x) = (%s)+(%s)*x+(%s)x*x", NumberFormatUtil.numberFormat(String.valueOf(bgAnalyseResult.g_g_fitting_e_c.get(0))), NumberFormatUtil.numberFormat(String.valueOf(bgAnalyseResult.g_g_fitting_e_c.get(1))), NumberFormatUtil.numberFormat(String.valueOf(bgAnalyseResult.g_g_fitting_e_c.get(2)))));
|
||||||
} else {
|
} else {
|
||||||
out.append(rowFormat("energy to channel equation: CH(x) = (%s)+(%s)*x+(%s)x*x", "?1", "?2", "?3"));
|
out.append(rowFormat("energy to channel equation: CH(x) = (%s)+(%s)*x+(%s)x*x", "?1", "?2", "?3"));
|
||||||
}
|
}
|
||||||
|
@ -1762,9 +1715,9 @@ public class PHDFileUtil extends AbstractLogOrReport {
|
||||||
//换行
|
//换行
|
||||||
out.append(System.lineSeparator());
|
out.append(System.lineSeparator());
|
||||||
//第五十五行数据
|
//第五十五行数据
|
||||||
if (bgAnalyseResult.g_g_fitting_e_c != null && CollectionUtils.isNotEmpty(bgAnalyseResult.g_g_fitting_e_c)) {
|
if (bgAnalyseResult.g_g_fitting_c_e != null && CollectionUtils.isNotEmpty(bgAnalyseResult.g_g_fitting_c_e)) {
|
||||||
if (bgAnalyseResult.g_g_fitting_e_c.size() > 0) {
|
if (bgAnalyseResult.g_g_fitting_c_e.size() > 0) {
|
||||||
out.append(rowFormat("channel to energy equation: E(x) = (%s)+(%s)*x+(%s)x*x", NumberFormatUtil.numberFormat(String.valueOf(bgAnalyseResult.g_g_fitting_e_c.get(0))), NumberFormatUtil.numberFormat(String.valueOf(bgAnalyseResult.g_g_fitting_e_c.get(1))), NumberFormatUtil.numberFormat(String.valueOf(bgAnalyseResult.g_g_fitting_e_c.get(2)))));
|
out.append(rowFormat("channel to energy equation: E(x) = (%s)+(%s)*x+(%s)x*x", NumberFormatUtil.numberFormat(String.valueOf(bgAnalyseResult.g_g_fitting_c_e.get(0))), NumberFormatUtil.numberFormat(String.valueOf(bgAnalyseResult.g_g_fitting_c_e.get(1))), NumberFormatUtil.numberFormat(String.valueOf(bgAnalyseResult.g_g_fitting_c_e.get(2)))));
|
||||||
} else {
|
} else {
|
||||||
out.append(rowFormat("channel to energy equation: E(x) = (%s)+(%s)*x+(%s)x*x", "?1", "?2", "?3"));
|
out.append(rowFormat("channel to energy equation: E(x) = (%s)+(%s)*x+(%s)x*x", "?1", "?2", "?3"));
|
||||||
}
|
}
|
||||||
|
@ -1788,12 +1741,12 @@ public class PHDFileUtil extends AbstractLogOrReport {
|
||||||
//换行
|
//换行
|
||||||
out.append(System.lineSeparator());
|
out.append(System.lineSeparator());
|
||||||
//第五十八行数据
|
//第五十八行数据
|
||||||
for (int i = 0; i < bgAnalyseResult.S_ROI.size(); i++) {
|
for (int i = 0; i < bgAnalyseResult.G_ROI.size(); i++) {
|
||||||
String roi = String.valueOf(bgAnalyseResult.S_ROI.get(i));
|
String roi = String.valueOf(bgAnalyseResult.G_ROI.get(i));
|
||||||
String bStart = String.valueOf(bgAnalyseResult.S_ROI_B_Boundary_start.get(i));
|
String bStart = String.valueOf(bgAnalyseResult.G_ROI_B_Boundary_start.get(i));
|
||||||
String bStop = String.valueOf(bgAnalyseResult.S_ROI_B_Boundary_stop.get(i));
|
String bStop = String.valueOf(bgAnalyseResult.G_ROI_B_Boundary_stop.get(i));
|
||||||
String gStart = String.valueOf(bgAnalyseResult.S_ROI_G_Boundary_start.get(i));
|
String gStart = String.valueOf(bgAnalyseResult.G_ROI_G_Boundary_start.get(i));
|
||||||
String gStop = String.valueOf(bgAnalyseResult.S_ROI_G_Boundary_stop.get(i));
|
String gStop = String.valueOf(bgAnalyseResult.G_ROI_G_Boundary_stop.get(i));
|
||||||
out.append(rowFormat(dataFormat7, roi, bStart, bStop, gStart, gStop));
|
out.append(rowFormat(dataFormat7, roi, bStart, bStop, gStart, gStop));
|
||||||
//换行
|
//换行
|
||||||
out.append(System.lineSeparator());
|
out.append(System.lineSeparator());
|
||||||
|
@ -1812,8 +1765,8 @@ public class PHDFileUtil extends AbstractLogOrReport {
|
||||||
out.append(System.lineSeparator());
|
out.append(System.lineSeparator());
|
||||||
//第六十行数据
|
//第六十行数据
|
||||||
String dataFormat8 = "ROI : %-8sSample : %-8sGasBkgnd : %-7sDetBkgnd : %-8s";
|
String dataFormat8 = "ROI : %-8sSample : %-8sGasBkgnd : %-7sDetBkgnd : %-8s";
|
||||||
for (int i = 0; i < bgAnalyseResult.ROI.size(); i++) {
|
for (int i = 0; i < bgAnalyseResult.G_ROI_G_Boundary_stop.size(); i++) {
|
||||||
String roi = String.valueOf(bgAnalyseResult.ROI.get(i));
|
String roi = String.valueOf(bgAnalyseResult.G_ROI_G_Boundary_stop.get(i));
|
||||||
String sCts = String.valueOf(bgAnalyseResult.s_roi_cts.get(i));
|
String sCts = String.valueOf(bgAnalyseResult.s_roi_cts.get(i));
|
||||||
String gCts = String.valueOf(bgAnalyseResult.g_roi_cts.get(i));
|
String gCts = String.valueOf(bgAnalyseResult.g_roi_cts.get(i));
|
||||||
String dCts = String.valueOf(bgAnalyseResult.d_roi_cts.get(i));
|
String dCts = String.valueOf(bgAnalyseResult.d_roi_cts.get(i));
|
||||||
|
@ -1833,22 +1786,19 @@ public class PHDFileUtil extends AbstractLogOrReport {
|
||||||
out.append(System.lineSeparator());
|
out.append(System.lineSeparator());
|
||||||
//换行
|
//换行
|
||||||
out.append(System.lineSeparator());
|
out.append(System.lineSeparator());
|
||||||
//第六十二行信息
|
// //第六十二行信息
|
||||||
out.append(titleFormat("Net counts and Lc per ROI%s", 5, StringPool.DOT));
|
// out.append(titleFormat("Net counts and Lc per ROI%s", 5, StringPool.DOT));
|
||||||
//换行
|
// //换行
|
||||||
out.append(System.lineSeparator());
|
// out.append(System.lineSeparator());
|
||||||
//换行
|
// //换行
|
||||||
out.append(System.lineSeparator());
|
// out.append(System.lineSeparator());
|
||||||
//第六十三行数据
|
//第六十三行数据
|
||||||
String dataFormat9 = "ROI : %-8sNet count : %-2s+/- %-20sLC : %-12s";
|
String dataFormat9 = "ROI : %-8sNet count : %-2s+/- %-20sLC : %-12s";
|
||||||
for (int i = 0; i < bgAnalyseResult.ROI.size(); i++) {
|
for (int i = 0; i < bgAnalyseResult.G_ROI_G_Boundary_stop.size(); i++) {
|
||||||
String roi = String.valueOf(bgAnalyseResult.ROI.get(i));
|
String roi = String.valueOf(bgAnalyseResult.G_ROI_G_Boundary_stop.get(i));
|
||||||
String net = String.valueOf(bgAnalyseResult.ROI_net_coutns.get(i));
|
String net = NumberFormatUtil.numberFormat(String.valueOf(bgAnalyseResult.ROI_net_coutns.get(i)));
|
||||||
String netErr = String.valueOf(bgAnalyseResult.ROI_net_coutns_err.get(i));
|
String netErr = NumberFormatUtil.numberFormat(String.valueOf(bgAnalyseResult.ROI_net_coutns_err.get(i)));
|
||||||
String lcCts = "0";
|
String lcCts = NumberFormatUtil.numberFormat(String.valueOf(bgAnalyseResult.LC_CTS.get(i)));
|
||||||
if (i > 0) {
|
|
||||||
lcCts = String.valueOf(bgAnalyseResult.LC_CTS.get(i-1));
|
|
||||||
}
|
|
||||||
out.append(rowFormat(dataFormat9, roi, net, netErr, lcCts));
|
out.append(rowFormat(dataFormat9, roi, net, netErr, lcCts));
|
||||||
//换行
|
//换行
|
||||||
out.append(System.lineSeparator());
|
out.append(System.lineSeparator());
|
||||||
|
@ -1867,18 +1817,12 @@ public class PHDFileUtil extends AbstractLogOrReport {
|
||||||
out.append(System.lineSeparator());
|
out.append(System.lineSeparator());
|
||||||
//第六十五行数据
|
//第六十五行数据
|
||||||
String dataFormat10 = "ROI : %-8sConc : %-2s+/- %-2smBq/m3 LC : %-2smBq/m3 MDC : %-2smBq/m3 ";
|
String dataFormat10 = "ROI : %-8sConc : %-2s+/- %-2smBq/m3 LC : %-2smBq/m3 MDC : %-2smBq/m3 ";
|
||||||
for (int i = 0; i < bgAnalyseResult.ROI.size(); i++) {
|
for (int i = 0; i < bgAnalyseResult.G_ROI_G_Boundary_stop.size(); i++) {
|
||||||
String roi = String.valueOf(bgAnalyseResult.ROI.get(i));
|
String roi = String.valueOf(bgAnalyseResult.G_ROI_G_Boundary_stop.get(i));
|
||||||
String conc = String.valueOf(bgAnalyseResult.ROI_con_uncer.get(i));
|
String conc = NumberFormatUtil.numberFormat(String.valueOf(bgAnalyseResult.ROI_con_uncer.get(i)));
|
||||||
String concErr = String.valueOf(bgAnalyseResult.ROI_con_uncer_err.get(i));
|
String concErr = NumberFormatUtil.numberFormat(String.valueOf(bgAnalyseResult.ROI_con_uncer_err.get(i)));
|
||||||
String lc = "0";
|
String lc = NumberFormatUtil.numberFormat(String.valueOf(bgAnalyseResult.LC.get(i)));
|
||||||
if (i > 0) {
|
String mdc = NumberFormatUtil.numberFormat(String.valueOf(bgAnalyseResult.MDC.get(i)));
|
||||||
lc = String.valueOf(bgAnalyseResult.LC.get(i-1));
|
|
||||||
}
|
|
||||||
String mdc = "0";
|
|
||||||
if (i > 0) {
|
|
||||||
mdc = String.valueOf(bgAnalyseResult.MDC.get(i-1));
|
|
||||||
}
|
|
||||||
out.append(rowFormat(dataFormat10, roi, conc, concErr, lc, mdc));
|
out.append(rowFormat(dataFormat10, roi, conc, concErr, lc, mdc));
|
||||||
//换行
|
//换行
|
||||||
out.append(System.lineSeparator());
|
out.append(System.lineSeparator());
|
||||||
|
@ -1896,7 +1840,7 @@ public class PHDFileUtil extends AbstractLogOrReport {
|
||||||
//换行
|
//换行
|
||||||
out.append(System.lineSeparator());
|
out.append(System.lineSeparator());
|
||||||
//第六十七行数据
|
//第六十七行数据
|
||||||
out.append(rowFormat("XE-135 Conc : %-9s+/- %-8smBq/m3 LC : %-9smBq/m3 MDC : %-9smBq/m3 Nid flag : %-6s", NumberFormatUtil.numberFormat(String.valueOf(bgAnalyseResult.Xe135_con)),
|
out.append(rowFormat("XE-135 Conc : %-9s +/- %-8smBq/m3 LC : %-9smBq/m3 MDC : %-9s mBq/m3 Nid flag : %-6s", NumberFormatUtil.numberFormat(String.valueOf(bgAnalyseResult.Xe135_con)),
|
||||||
NumberFormatUtil.numberFormat(String.valueOf(bgAnalyseResult.Xe135_uncer)), NumberFormatUtil.numberFormat(String.valueOf(bgAnalyseResult.LC_Xe135)),
|
NumberFormatUtil.numberFormat(String.valueOf(bgAnalyseResult.Xe135_uncer)), NumberFormatUtil.numberFormat(String.valueOf(bgAnalyseResult.LC_Xe135)),
|
||||||
NumberFormatUtil.numberFormat(String.valueOf(bgAnalyseResult.MDC_Xe135)), String.valueOf(bgAnalyseResult.XE_135_NID_FLAG)));
|
NumberFormatUtil.numberFormat(String.valueOf(bgAnalyseResult.MDC_Xe135)), String.valueOf(bgAnalyseResult.XE_135_NID_FLAG)));
|
||||||
//换行
|
//换行
|
||||||
|
@ -1904,29 +1848,29 @@ public class PHDFileUtil extends AbstractLogOrReport {
|
||||||
//换行
|
//换行
|
||||||
out.append(System.lineSeparator());
|
out.append(System.lineSeparator());
|
||||||
//第六十八行数据
|
//第六十八行数据
|
||||||
out.append(rowFormat("XE-131M Conc : %-9s+/- %-8smBq/m3 LC : %-9smBq/m3 MDC : %-9smBq/m3 Nid flag : %-6s", NumberFormatUtil.numberFormat(String.valueOf(bgAnalyseResult.Xe131m_con)),
|
out.append(rowFormat("XE-131M Conc : %-9s +/- %-8smBq/m3 LC : %-9smBq/m3 MDC : %-9s mBq/m3 Nid flag : %-6s", NumberFormatUtil.numberFormat(String.valueOf(bgAnalyseResult.Xe131m_con)),
|
||||||
NumberFormatUtil.numberFormat(String.valueOf(bgAnalyseResult.Xe131m_uncer)), NumberFormatUtil.numberFormat(String.valueOf(bgAnalyseResult.LC_Xe131m)),
|
NumberFormatUtil.numberFormat(String.valueOf(bgAnalyseResult.Xe131m_uncer)), NumberFormatUtil.numberFormat(String.valueOf(bgAnalyseResult.LC_Xe131m)),
|
||||||
NumberFormatUtil.numberFormat(String.valueOf(bgAnalyseResult.MDC_Xe131m)), String.valueOf(bgAnalyseResult.XE_131m_NID_FLAG)));
|
NumberFormatUtil.numberFormat(String.valueOf(bgAnalyseResult.MDC_Xe131m)), String.valueOf(bgAnalyseResult.XE_131m_NID_FLAG)));
|
||||||
//换行
|
//换行
|
||||||
out.append(System.lineSeparator());
|
out.append(System.lineSeparator());
|
||||||
//换行
|
//换行
|
||||||
out.append(System.lineSeparator());
|
out.append(System.lineSeparator());
|
||||||
//第六十九行数据
|
|
||||||
out.append(rowFormat("XE-133 Conc : %-9s+/- %-8smBq/m3 LC : %-9smBq/m3 MDC : %-9smBq/m3 Nid flag : %-6s", NumberFormatUtil.numberFormat(String.valueOf(bgAnalyseResult.Xe133_con)),
|
|
||||||
NumberFormatUtil.numberFormat(String.valueOf(bgAnalyseResult.Xe133_uncer)), NumberFormatUtil.numberFormat(String.valueOf(bgAnalyseResult.LC_Xe133)),
|
|
||||||
NumberFormatUtil.numberFormat(String.valueOf(bgAnalyseResult.MDC_Xe133)), String.valueOf(bgAnalyseResult.XE_133_NID_FLAG)));
|
|
||||||
//换行
|
|
||||||
out.append(System.lineSeparator());
|
|
||||||
//换行
|
|
||||||
out.append(System.lineSeparator());
|
|
||||||
//第七十行数据
|
//第七十行数据
|
||||||
out.append(rowFormat("XE-133M Conc : %-9s+/- %-8smBq/m3 LC : %-9smBq/m3 MDC : %-9smBq/m3 Nid flag : %-6s", NumberFormatUtil.numberFormat(String.valueOf(bgAnalyseResult.Xe133m_con)),
|
out.append(rowFormat("XE-133M Conc : %-9s +/- %-8smBq/m3 LC : %-9smBq/m3 MDC : %-9s mBq/m3 Nid flag : %-6s", NumberFormatUtil.numberFormat(String.valueOf(bgAnalyseResult.Xe133m_con)),
|
||||||
NumberFormatUtil.numberFormat(String.valueOf(bgAnalyseResult.Xe133m_uncer)), NumberFormatUtil.numberFormat(String.valueOf(bgAnalyseResult.LC_Xe133m)),
|
NumberFormatUtil.numberFormat(String.valueOf(bgAnalyseResult.Xe133m_uncer)), NumberFormatUtil.numberFormat(String.valueOf(bgAnalyseResult.LC_Xe133m)),
|
||||||
NumberFormatUtil.numberFormat(String.valueOf(bgAnalyseResult.MDC_Xe133m)), String.valueOf(bgAnalyseResult.XE_133m_NID_FLAG)));
|
NumberFormatUtil.numberFormat(String.valueOf(bgAnalyseResult.MDC_Xe133m)), String.valueOf(bgAnalyseResult.XE_133m_NID_FLAG)));
|
||||||
//换行
|
//换行
|
||||||
out.append(System.lineSeparator());
|
out.append(System.lineSeparator());
|
||||||
//换行
|
//换行
|
||||||
out.append(System.lineSeparator());
|
out.append(System.lineSeparator());
|
||||||
|
//第六十九行数据
|
||||||
|
out.append(rowFormat("XE-133 Conc : %-9s +/- %-8smBq/m3 LC : %-9smBq/m3 MDC : %-9s mBq/m3 Nid flag : %-6s", NumberFormatUtil.numberFormat(String.valueOf(bgAnalyseResult.Xe133_con)),
|
||||||
|
NumberFormatUtil.numberFormat(String.valueOf(bgAnalyseResult.Xe133_uncer)), NumberFormatUtil.numberFormat(String.valueOf(bgAnalyseResult.LC_Xe133)),
|
||||||
|
NumberFormatUtil.numberFormat(String.valueOf(bgAnalyseResult.MDC_Xe133)), String.valueOf(bgAnalyseResult.XE_133_NID_FLAG)));
|
||||||
|
//换行
|
||||||
|
out.append(System.lineSeparator());
|
||||||
|
//换行
|
||||||
|
out.append(System.lineSeparator());
|
||||||
//第七十一行信息
|
//第七十一行信息
|
||||||
out.append(titleFormat("%s NCC analysis finished %s", 40, StringPool.DOT, StringPool.DOT));
|
out.append(titleFormat("%s NCC analysis finished %s", 40, StringPool.DOT, StringPool.DOT));
|
||||||
//换行
|
//换行
|
||||||
|
|
|
@ -226,21 +226,21 @@ public class SpectrumAnalysesController {
|
||||||
}
|
}
|
||||||
|
|
||||||
@PostMapping("saveToHTML")
|
@PostMapping("saveToHTML")
|
||||||
public void saveToHTML(HttpServletResponse response,
|
public void saveToHTML(HttpServletRequest request, HttpServletResponse response,
|
||||||
@RequestBody BgDataAnlyseResultIn anlyseResultIn){
|
@RequestBody BgDataAnlyseResultIn anlyseResultIn){
|
||||||
spectrumAnalysisService.saveToHTML(anlyseResultIn, response);
|
spectrumAnalysisService.saveToHTML(anlyseResultIn, request, response);
|
||||||
}
|
}
|
||||||
|
|
||||||
@PostMapping("saveToExcel")
|
@PostMapping("saveToExcel")
|
||||||
public void saveToExcel(HttpServletResponse response,
|
public void saveToExcel(HttpServletRequest request, HttpServletResponse response,
|
||||||
@RequestBody BgDataAnlyseResultIn anlyseResultIn){
|
@RequestBody BgDataAnlyseResultIn anlyseResultIn){
|
||||||
spectrumAnalysisService.saveToExcel(anlyseResultIn, response);
|
spectrumAnalysisService.saveToExcel(anlyseResultIn, request, response);
|
||||||
}
|
}
|
||||||
|
|
||||||
@PostMapping("saveToTxt")
|
@PostMapping("saveToTxt")
|
||||||
public void saveToTxt(HttpServletResponse response,
|
public void saveToTxt(HttpServletRequest request, HttpServletResponse response,
|
||||||
@RequestBody BgDataAnlyseResultIn anlyseResultIn){
|
@RequestBody BgDataAnlyseResultIn anlyseResultIn){
|
||||||
spectrumAnalysisService.saveToTxt(anlyseResultIn, response);
|
spectrumAnalysisService.saveToTxt(anlyseResultIn, request, response);
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -13,6 +13,14 @@ import java.util.List;
|
||||||
public class BetaDataFile implements Serializable {
|
public class BetaDataFile implements Serializable {
|
||||||
|
|
||||||
//基本数据信息
|
//基本数据信息
|
||||||
|
private String sampleTmpPath;
|
||||||
|
|
||||||
|
private String detTmpPath;
|
||||||
|
|
||||||
|
private String gasTmpPath;
|
||||||
|
|
||||||
|
private String qcTmpPath;
|
||||||
|
|
||||||
private String sampleFilePathName;
|
private String sampleFilePathName;
|
||||||
|
|
||||||
private String sampleFileName;
|
private String sampleFileName;
|
||||||
|
|
|
@ -580,7 +580,6 @@
|
||||||
WHERE
|
WHERE
|
||||||
ROI_CHANNELS.SAMPLE_ID = SAMPLE_DATA.SAMPLE_ID
|
ROI_CHANNELS.SAMPLE_ID = SAMPLE_DATA.SAMPLE_ID
|
||||||
AND ROI_CHANNELS.IDANALYSIS = #{idAnalysis}
|
AND ROI_CHANNELS.IDANALYSIS = #{idAnalysis}
|
||||||
AND ROI_CHANNELS.SAMPLE_ID = #{sampleId}
|
|
||||||
ORDER BY ROI_CHANNELS.ROI ASC
|
ORDER BY ROI_CHANNELS.ROI ASC
|
||||||
</select>
|
</select>
|
||||||
|
|
||||||
|
|
|
@ -80,9 +80,9 @@ public interface ISpectrumAnalysisService {
|
||||||
|
|
||||||
Result saveToDB(BgDataAnlyseResultIn anlyseResultIn, HttpServletRequest request);
|
Result saveToDB(BgDataAnlyseResultIn anlyseResultIn, HttpServletRequest request);
|
||||||
|
|
||||||
void saveToHTML(BgDataAnlyseResultIn anlyseResultIn, HttpServletResponse response);
|
void saveToHTML(BgDataAnlyseResultIn anlyseResultIn, HttpServletRequest request, HttpServletResponse response);
|
||||||
|
|
||||||
void saveToExcel(BgDataAnlyseResultIn anlyseResultIn, HttpServletResponse response);
|
void saveToExcel(BgDataAnlyseResultIn anlyseResultIn, HttpServletRequest request, HttpServletResponse response);
|
||||||
|
|
||||||
void saveToTxt(BgDataAnlyseResultIn anlyseResultIn, HttpServletResponse response);
|
void saveToTxt(BgDataAnlyseResultIn anlyseResultIn, HttpServletRequest request, HttpServletResponse response);
|
||||||
}
|
}
|
||||||
|
|
|
@ -157,17 +157,20 @@ public class GammaServiceImpl extends AbstractLogOrReport implements IGammaServi
|
||||||
}
|
}
|
||||||
String pathName = ftpUtil.getFtpRootPath() + spectrumPathProperties.getSaveFilePath() + StringPool.SLASH + sampleFilePath.substring(0, sampleFilePath.lastIndexOf(StringPool.SLASH));
|
String pathName = ftpUtil.getFtpRootPath() + spectrumPathProperties.getSaveFilePath() + StringPool.SLASH + sampleFilePath.substring(0, sampleFilePath.lastIndexOf(StringPool.SLASH));
|
||||||
String fileName = sampleFilePath.substring(sampleFilePath.lastIndexOf(StringPool.SLASH) + 1);
|
String fileName = sampleFilePath.substring(sampleFilePath.lastIndexOf(StringPool.SLASH) + 1);
|
||||||
|
// 读取文件内容
|
||||||
|
// 调用加载文件的方法 传入文件路径,文件名称,全局变量phd,响应结果result
|
||||||
boolean flag = gammaFileUtil.loadFile(pathName, fileName, phd, result);
|
boolean flag = gammaFileUtil.loadFile(pathName, fileName, phd, result);
|
||||||
|
// 如果文件加载失败 返回失败原因
|
||||||
if (!flag) {
|
if (!flag) {
|
||||||
return result;
|
return result;
|
||||||
}
|
}
|
||||||
// 声明基础数组信息
|
// 加载phd数据所需的lc,scac,baseline数据
|
||||||
if (dbName.equals("auto")) {
|
if (dbName.equals("auto")) {
|
||||||
gammaFileUtil.SetBaseInfo(phd, "RNAUTO");
|
gammaFileUtil.SetBaseInfo(phd, "RNAUTO");
|
||||||
} else if (dbName.equals("man")) {
|
} else if (dbName.equals("man")) {
|
||||||
gammaFileUtil.SetBaseInfo(phd, userName);
|
gammaFileUtil.SetBaseInfo(phd, userName);
|
||||||
}
|
}
|
||||||
// 从数据库中读取相关信息
|
// 从数据库中读取phd其他相关信息
|
||||||
boolean bRet = getResultFromDB(dbName, userName, sampleId, phd, result);
|
boolean bRet = getResultFromDB(dbName, userName, sampleId, phd, result);
|
||||||
if (!redisUtil.hasKey(userName+"-"+phd.getHeader().getSystem_type())) {
|
if (!redisUtil.hasKey(userName+"-"+phd.getHeader().getSystem_type())) {
|
||||||
// 查询当前用户关联的核素信息
|
// 查询当前用户关联的核素信息
|
||||||
|
@ -177,17 +180,24 @@ public class GammaServiceImpl extends AbstractLogOrReport implements IGammaServi
|
||||||
if (CollectionUtils.isEmpty(userLib)) {
|
if (CollectionUtils.isEmpty(userLib)) {
|
||||||
userLib = defaultNuclideSpectrumService.findNuclidesByUserName("admin", phd.getHeader().getSystem_type().toUpperCase());
|
userLib = defaultNuclideSpectrumService.findNuclidesByUserName("admin", phd.getHeader().getSystem_type().toUpperCase());
|
||||||
}
|
}
|
||||||
|
userLib = userLib.stream().sorted().collect(Collectors.toList());
|
||||||
Map<String, NuclideLines> nuclideMap = GetNuclideLines(userLib);
|
Map<String, NuclideLines> nuclideMap = GetNuclideLines(userLib);
|
||||||
redisUtil.set(userName+"-"+phd.getHeader().getSystem_type(), nuclideMap);
|
redisUtil.set(userName+"-"+phd.getHeader().getSystem_type(), nuclideMap);
|
||||||
}
|
}
|
||||||
|
// 判断数据库信息是否读取正常
|
||||||
if (!bRet) {
|
if (!bRet) {
|
||||||
return result;
|
return result;
|
||||||
}
|
}
|
||||||
|
gammaFileUtil.Qcstate(phd);
|
||||||
key = fileName + "-" + userName;
|
key = fileName + "-" + userName;
|
||||||
} else {
|
} else {
|
||||||
String pathName = ftpUtil.getFtpRootPath() + spectrumPathProperties.getUploadPath() + StringPool.SLASH + userName;
|
String pathName = ftpUtil.getFtpRootPath() + spectrumPathProperties.getUploadPath() + StringPool.SLASH + userName;
|
||||||
String fileName = samfileName;
|
String fileName = samfileName;
|
||||||
boolean flag = gammaFileUtil.loadFile(pathName, fileName, phd, result);
|
// 加载文件内容
|
||||||
|
boolean bRet = gammaFileUtil.loadFile(pathName, fileName, phd, result);
|
||||||
|
if (!bRet) {
|
||||||
|
return result;
|
||||||
|
}
|
||||||
if (!redisUtil.hasKey(userName+"-"+phd.getHeader().getSystem_type())) {
|
if (!redisUtil.hasKey(userName+"-"+phd.getHeader().getSystem_type())) {
|
||||||
// 查询当前用户关联的核素信息
|
// 查询当前用户关联的核素信息
|
||||||
List<String> userLib = new LinkedList<>();
|
List<String> userLib = new LinkedList<>();
|
||||||
|
@ -196,12 +206,10 @@ public class GammaServiceImpl extends AbstractLogOrReport implements IGammaServi
|
||||||
if (CollectionUtils.isEmpty(userLib)) {
|
if (CollectionUtils.isEmpty(userLib)) {
|
||||||
userLib = defaultNuclideSpectrumService.findNuclidesByUserName("admin", phd.getHeader().getSystem_type().toUpperCase());
|
userLib = defaultNuclideSpectrumService.findNuclidesByUserName("admin", phd.getHeader().getSystem_type().toUpperCase());
|
||||||
}
|
}
|
||||||
Map<String, NuclideLines> nuclideMap = GetNuclideLines(userLib);
|
userLib = userLib.stream().sorted().collect(Collectors.toList());
|
||||||
|
Map<String, NuclideLines> nuclideMap = GetNuclideLinesLocal(userLib);
|
||||||
redisUtil.set(userName+"-"+phd.getHeader().getSystem_type(), nuclideMap);
|
redisUtil.set(userName+"-"+phd.getHeader().getSystem_type(), nuclideMap);
|
||||||
}
|
}
|
||||||
if (!flag) {
|
|
||||||
return result;
|
|
||||||
}
|
|
||||||
key = fileName + "-" + userName;
|
key = fileName + "-" + userName;
|
||||||
}
|
}
|
||||||
phdCache.put(key, phd);
|
phdCache.put(key, phd);
|
||||||
|
@ -480,6 +488,7 @@ public class GammaServiceImpl extends AbstractLogOrReport implements IGammaServi
|
||||||
if (CollectionUtils.isEmpty(userLib)) {
|
if (CollectionUtils.isEmpty(userLib)) {
|
||||||
userLib = defaultNuclideSpectrumService.findNuclidesByUserName("admin", phd.getHeader().getSystem_type().toUpperCase());
|
userLib = defaultNuclideSpectrumService.findNuclidesByUserName("admin", phd.getHeader().getSystem_type().toUpperCase());
|
||||||
}
|
}
|
||||||
|
userLib = userLib.stream().sorted().collect(Collectors.toList());
|
||||||
Map<String, NuclideLines> nuclideMap = GetNuclideLines(userLib);
|
Map<String, NuclideLines> nuclideMap = GetNuclideLines(userLib);
|
||||||
redisUtil.set(userName+"-"+phd.getHeader().getSystem_type(), nuclideMap);
|
redisUtil.set(userName+"-"+phd.getHeader().getSystem_type(), nuclideMap);
|
||||||
}
|
}
|
||||||
|
@ -845,7 +854,8 @@ public class GammaServiceImpl extends AbstractLogOrReport implements IGammaServi
|
||||||
if (CollectionUtils.isEmpty(userLib)) {
|
if (CollectionUtils.isEmpty(userLib)) {
|
||||||
userLib = defaultNuclideSpectrumService.findNuclidesByUserName("admin", phd.getHeader().getSystem_type().toUpperCase());
|
userLib = defaultNuclideSpectrumService.findNuclidesByUserName("admin", phd.getHeader().getSystem_type().toUpperCase());
|
||||||
}
|
}
|
||||||
Map<String, NuclideLines> nuclideMap = GetNuclideLines(userLib);
|
userLib = userLib.stream().sorted().collect(Collectors.toList());
|
||||||
|
Map<String, NuclideLines> nuclideMap = GetNuclideLinesLocal(userLib);//GetNuclideLinesLocal(userLib);
|
||||||
redisUtil.set(userName+"-"+phd.getHeader().getSystem_type(), nuclideMap);
|
redisUtil.set(userName+"-"+phd.getHeader().getSystem_type(), nuclideMap);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -880,6 +890,12 @@ public class GammaServiceImpl extends AbstractLogOrReport implements IGammaServi
|
||||||
LoginUser loginUser = (LoginUser) SecurityUtils.getSubject().getPrincipal();
|
LoginUser loginUser = (LoginUser) SecurityUtils.getSubject().getPrincipal();
|
||||||
String username = loginUser.getUsername();
|
String username = loginUser.getUsername();
|
||||||
String key = fileName + StrUtil.DASHED + username;
|
String key = fileName + StrUtil.DASHED + username;
|
||||||
|
Cache<String, PHDFile> phdCache = localCache.getPHDCache();
|
||||||
|
PHDFile phdFile = phdCache.getIfPresent(key);
|
||||||
|
if (StringUtils.isNotBlank(phdFile.getTmpFilePath())) {
|
||||||
|
File file = new File(phdFile.getTmpFilePath());
|
||||||
|
file.delete();
|
||||||
|
}
|
||||||
// 删除指定key的Cache
|
// 删除指定key的Cache
|
||||||
localCache.deletePHDCache(key);
|
localCache.deletePHDCache(key);
|
||||||
}
|
}
|
||||||
|
@ -1036,13 +1052,6 @@ public class GammaServiceImpl extends AbstractLogOrReport implements IGammaServi
|
||||||
phd.setXmlFilePath(parameterProperties.getFilePath());
|
phd.setXmlFilePath(parameterProperties.getFilePath());
|
||||||
// 获取当前角色的颜色配置
|
// 获取当前角色的颜色配置
|
||||||
Map<String, String> colorMap = sysUserColorService.initColor(userName);
|
Map<String, String> colorMap = sysUserColorService.initColor(userName);
|
||||||
// // 查询当前用户关联的核素信息
|
|
||||||
// List<String> nuclides = new LinkedList<>();
|
|
||||||
// // 从postgreSql中获取当前用户关注的核素信息 如果当前用户没有 则返回管理员的
|
|
||||||
// nuclides = defaultNuclideSpectrumService.findNuclidesByUserName(userName, phd.getHeader().getSystem_type().toUpperCase());
|
|
||||||
// if (CollectionUtils.isEmpty(nuclides)) {
|
|
||||||
// nuclides = defaultNuclideSpectrumService.findNuclidesByUserName("admin", phd.getHeader().getSystem_type().toUpperCase());
|
|
||||||
// }
|
|
||||||
// 分析文件数据
|
// 分析文件数据
|
||||||
int flag = gammaFileUtil.AnalyseData(phd);
|
int flag = gammaFileUtil.AnalyseData(phd);
|
||||||
if (flag == 0) {
|
if (flag == 0) {
|
||||||
|
@ -1063,10 +1072,11 @@ public class GammaServiceImpl extends AbstractLogOrReport implements IGammaServi
|
||||||
Map<String, NuclideLines> nuclideLinesMap = (Map<String, NuclideLines>) redisUtil.get(userName+"-"+phd.getHeader().getSystem_type());//GetNuclideLines(nuclides);
|
Map<String, NuclideLines> nuclideLinesMap = (Map<String, NuclideLines>) redisUtil.get(userName+"-"+phd.getHeader().getSystem_type());//GetNuclideLines(nuclides);
|
||||||
gammaFileUtil.AnalyseSpectrum(phd, nuclideLinesMap);
|
gammaFileUtil.AnalyseSpectrum(phd, nuclideLinesMap);
|
||||||
// 重新分析各峰值对应的核素信息
|
// 重新分析各峰值对应的核素信息
|
||||||
// gammaFileUtil.NuclidesIdent(phd, nuclideLinesMap);
|
gammaFileUtil.NuclidesIdent(phd, nuclideLinesMap);
|
||||||
Map<String, Object> map = new HashMap<>();
|
Map<String, Object> map = new HashMap<>();
|
||||||
gammaFileUtil.UpdateChart(phd, map, colorMap);
|
gammaFileUtil.UpdateChart(phd, map, colorMap);
|
||||||
// 更新 ‘QC Flags’ 状态
|
// 更新 ‘QC Flags’ 状态
|
||||||
|
phd.getQcItems().clear();
|
||||||
List<String> qcstate = gammaFileUtil.Qcstate(phd);
|
List<String> qcstate = gammaFileUtil.Qcstate(phd);
|
||||||
map.put("QCFlag", qcstate);
|
map.put("QCFlag", qcstate);
|
||||||
map.put("bAnalyed", phd.isBAnalyed());
|
map.put("bAnalyed", phd.isBAnalyed());
|
||||||
|
@ -1111,6 +1121,55 @@ public class GammaServiceImpl extends AbstractLogOrReport implements IGammaServi
|
||||||
return mapLines;
|
return mapLines;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public Map<String, NuclideLines> GetNuclideLinesLocal(List<String> nuclideList) {
|
||||||
|
Map<String, NuclideLines> mapLines = new HashMap<>();
|
||||||
|
if (nuclideList.size() < 1) {
|
||||||
|
return mapLines;
|
||||||
|
}
|
||||||
|
//需要查询的两个表的名称
|
||||||
|
String table_lines = "lines";
|
||||||
|
String table_lib = "libdata";
|
||||||
|
// 建立数据库连接
|
||||||
|
Connection conn = dbUtil.openDB();
|
||||||
|
Statement statement = null;
|
||||||
|
try {
|
||||||
|
statement = conn.createStatement();
|
||||||
|
for (String name : nuclideList) {
|
||||||
|
NuclideLines nlines = new NuclideLines();
|
||||||
|
//根据核素名称查询相关核素信息
|
||||||
|
String sql1 = "select fullname, energy, energy_uncert, yield, yield_uncert, key_flag from "+table_lines+" where name = '"+name+"' Order by energy";
|
||||||
|
ResultSet resultSet = statement.executeQuery(sql1);
|
||||||
|
int j=0;
|
||||||
|
while (resultSet.next()) {
|
||||||
|
nlines.getFullNames().add(resultSet.getString("fullname"));
|
||||||
|
nlines.getVenergy().add(resultSet.getDouble("energy"));
|
||||||
|
nlines.getVuncertE().add(resultSet.getDouble("energy_uncert"));
|
||||||
|
nlines.getVyield().add(resultSet.getDouble("yield") / 100);
|
||||||
|
nlines.getVuncertY().add(resultSet.getDouble("yield_uncert"));
|
||||||
|
if (Objects.nonNull(resultSet.getInt("key_flag")) && resultSet.getInt("key_flag") > 0) {
|
||||||
|
nlines.key_flag = j;
|
||||||
|
nlines.maxYeildIdx = j;
|
||||||
|
}
|
||||||
|
++j;
|
||||||
|
}
|
||||||
|
mapLines.put(name, nlines);
|
||||||
|
}
|
||||||
|
String names = "'" + String.join("','", nuclideList) + "'";
|
||||||
|
String sql2 = "select name, halflife from "+table_lib+" where name in ("+names+")";
|
||||||
|
ResultSet executeQuery = statement.executeQuery(sql2);
|
||||||
|
while (executeQuery.next()) {
|
||||||
|
NuclideLines nuclideLines = mapLines.get(executeQuery.getString("name"));
|
||||||
|
nuclideLines.setHalflife(Double.valueOf(executeQuery.getDouble("halflife")) == null ? 0 : executeQuery.getDouble("halflife") * 86400);// 将天转换成秒
|
||||||
|
mapLines.put(executeQuery.getString("name"), nuclideLines);
|
||||||
|
}
|
||||||
|
} catch (SQLException e) {
|
||||||
|
throw new RuntimeException(e);
|
||||||
|
} finally {
|
||||||
|
dbUtil.close(statement, conn);
|
||||||
|
}
|
||||||
|
return mapLines;
|
||||||
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public Result InteractiveTool(Integer sampleId, String fileName, HttpServletRequest request) {
|
public Result InteractiveTool(Integer sampleId, String fileName, HttpServletRequest request) {
|
||||||
Result result = new Result();
|
Result result = new Result();
|
||||||
|
@ -3677,24 +3736,11 @@ public class GammaServiceImpl extends AbstractLogOrReport implements IGammaServi
|
||||||
if (StringUtils.isBlank(reportPath)) {
|
if (StringUtils.isBlank(reportPath)) {
|
||||||
throw new RuntimeException("The automatic handler generated report does not exist!");
|
throw new RuntimeException("The automatic handler generated report does not exist!");
|
||||||
}
|
}
|
||||||
String pathName = ftpUtil.getFtpRootPath() + spectrumPathProperties.getSaveFilePath() + StringPool.SLASH + reportPath.substring(0, reportPath.lastIndexOf(StringPool.SLASH));
|
String pathFileName = ftpUtil.getFtpRootPath() + spectrumPathProperties.getSaveFilePath() + StringPool.SLASH + reportPath + ".txt";
|
||||||
String fileName = reportPath.substring(reportPath.lastIndexOf(StringPool.SLASH) + 1) + ".txt";
|
|
||||||
// 连接ftp
|
|
||||||
FTPClient ftpClient = ftpUtil.LoginFTP();
|
|
||||||
if (Objects.isNull(ftpClient)) {
|
|
||||||
throw new RuntimeException("ftp connection failed");
|
|
||||||
}
|
|
||||||
InputStream inputStream = null;
|
InputStream inputStream = null;
|
||||||
ServletOutputStream outputStream = null;
|
ServletOutputStream outputStream = null;
|
||||||
try {
|
try {
|
||||||
// 切换被动模式
|
inputStream = ftpUtil.downloadFileStream(pathFileName);
|
||||||
ftpClient.enterLocalPassiveMode();
|
|
||||||
ftpClient.setFileType(FTPClient.BINARY_FILE_TYPE);
|
|
||||||
// 设置编码,当文件中存在中文且上传后文件乱码时可使用此配置项
|
|
||||||
ftpClient.setControlEncoding("UTF-8");
|
|
||||||
ftpClient.setFileTransferMode(FTPClient.STREAM_TRANSFER_MODE);
|
|
||||||
ftpClient.changeWorkingDirectory(pathName);
|
|
||||||
inputStream = ftpClient.retrieveFileStream(fileName);
|
|
||||||
if (Objects.nonNull(inputStream)) {
|
if (Objects.nonNull(inputStream)) {
|
||||||
outputStream = response.getOutputStream();
|
outputStream = response.getOutputStream();
|
||||||
byte[] buffer = new byte[1024];
|
byte[] buffer = new byte[1024];
|
||||||
|
@ -3708,9 +3754,6 @@ public class GammaServiceImpl extends AbstractLogOrReport implements IGammaServi
|
||||||
throw new RuntimeException(e);
|
throw new RuntimeException(e);
|
||||||
} finally {
|
} finally {
|
||||||
try {
|
try {
|
||||||
if (Objects.nonNull(ftpClient)) {
|
|
||||||
ftpClient.disconnect();
|
|
||||||
}
|
|
||||||
if (ObjectUtil.isNotNull(inputStream)) {
|
if (ObjectUtil.isNotNull(inputStream)) {
|
||||||
inputStream.close();
|
inputStream.close();
|
||||||
}
|
}
|
||||||
|
@ -3727,24 +3770,11 @@ public class GammaServiceImpl extends AbstractLogOrReport implements IGammaServi
|
||||||
public void exportARR(Integer sampleId, HttpServletResponse response) {
|
public void exportARR(Integer sampleId, HttpServletResponse response) {
|
||||||
// 获取自动处理生成的报告地址
|
// 获取自动处理生成的报告地址
|
||||||
String reportPath = spectrumAnalysisMapper.viewARR(sampleId);
|
String reportPath = spectrumAnalysisMapper.viewARR(sampleId);
|
||||||
String pathName = ftpUtil.getFtpRootPath() + spectrumPathProperties.getSaveFilePath() + StringPool.SLASH + reportPath.substring(0, reportPath.lastIndexOf(StringPool.SLASH));
|
String pathFileName = ftpUtil.getFtpRootPath() + spectrumPathProperties.getSaveFilePath() + StringPool.SLASH + reportPath + ".txt";
|
||||||
String fileName = reportPath.substring(reportPath.lastIndexOf(StringPool.SLASH) + 1) + ".txt";
|
|
||||||
// 连接ftp
|
|
||||||
FTPClient ftpClient = ftpUtil.LoginFTP();
|
|
||||||
if (Objects.isNull(ftpClient)) {
|
|
||||||
throw new RuntimeException("ftp connection failed");
|
|
||||||
}
|
|
||||||
InputStream inputStream = null;
|
InputStream inputStream = null;
|
||||||
ServletOutputStream outputStream = null;
|
ServletOutputStream outputStream = null;
|
||||||
try {
|
try {
|
||||||
// 切换被动模式
|
inputStream = ftpUtil.downloadFileStream(pathFileName);
|
||||||
ftpClient.enterLocalPassiveMode();
|
|
||||||
ftpClient.setFileType(FTPClient.BINARY_FILE_TYPE);
|
|
||||||
// 设置编码,当文件中存在中文且上传后文件乱码时可使用此配置项
|
|
||||||
ftpClient.setControlEncoding("UTF-8");
|
|
||||||
ftpClient.setFileTransferMode(FTPClient.STREAM_TRANSFER_MODE);
|
|
||||||
ftpClient.changeWorkingDirectory(pathName);
|
|
||||||
inputStream = ftpClient.retrieveFileStream(fileName);
|
|
||||||
if (Objects.nonNull(inputStream)) {
|
if (Objects.nonNull(inputStream)) {
|
||||||
// 设置响应类型
|
// 设置响应类型
|
||||||
response.setContentType("application/octet-stream");
|
response.setContentType("application/octet-stream");
|
||||||
|
@ -3762,9 +3792,6 @@ public class GammaServiceImpl extends AbstractLogOrReport implements IGammaServi
|
||||||
throw new RuntimeException(e);
|
throw new RuntimeException(e);
|
||||||
} finally {
|
} finally {
|
||||||
try {
|
try {
|
||||||
if (Objects.nonNull(ftpClient)) {
|
|
||||||
ftpClient.disconnect();
|
|
||||||
}
|
|
||||||
if (ObjectUtil.isNotNull(inputStream)) {
|
if (ObjectUtil.isNotNull(inputStream)) {
|
||||||
inputStream.close();
|
inputStream.close();
|
||||||
}
|
}
|
||||||
|
@ -4266,24 +4293,11 @@ public class GammaServiceImpl extends AbstractLogOrReport implements IGammaServi
|
||||||
if (StringUtils.isBlank(logPath)) {
|
if (StringUtils.isBlank(logPath)) {
|
||||||
throw new RuntimeException("The log generated by the automatic processor does not exist!");
|
throw new RuntimeException("The log generated by the automatic processor does not exist!");
|
||||||
}
|
}
|
||||||
String pathName = ftpUtil.getFtpRootPath() + spectrumPathProperties.getLogPath() + StringPool.SLASH + logPath.substring(0, logPath.lastIndexOf(StringPool.SLASH));
|
String pathFileName = ftpUtil.getFtpRootPath() + spectrumPathProperties.getLogPath() + StringPool.SLASH + logPath;
|
||||||
String fileName = logPath.substring(logPath.lastIndexOf(StringPool.SLASH) + 1);
|
|
||||||
// 连接ftp
|
|
||||||
FTPClient ftpClient = ftpUtil.LoginFTP();
|
|
||||||
if (Objects.isNull(ftpClient)) {
|
|
||||||
throw new RuntimeException("ftp connection failed");
|
|
||||||
}
|
|
||||||
InputStream inputStream = null;
|
InputStream inputStream = null;
|
||||||
ServletOutputStream outputStream = null;
|
ServletOutputStream outputStream = null;
|
||||||
try {
|
try {
|
||||||
// 切换被动模式
|
inputStream = ftpUtil.downloadFileStream(pathFileName);
|
||||||
ftpClient.enterLocalPassiveMode();
|
|
||||||
ftpClient.setFileType(FTPClient.BINARY_FILE_TYPE);
|
|
||||||
// 设置编码,当文件中存在中文且上传后文件乱码时可使用此配置项
|
|
||||||
ftpClient.setControlEncoding("UTF-8");
|
|
||||||
ftpClient.setFileTransferMode(FTPClient.STREAM_TRANSFER_MODE);
|
|
||||||
ftpClient.changeWorkingDirectory(pathName);
|
|
||||||
inputStream = ftpClient.retrieveFileStream(fileName);
|
|
||||||
if (Objects.nonNull(inputStream)) {
|
if (Objects.nonNull(inputStream)) {
|
||||||
outputStream = response.getOutputStream();
|
outputStream = response.getOutputStream();
|
||||||
byte[] buffer = new byte[1024];
|
byte[] buffer = new byte[1024];
|
||||||
|
@ -4297,9 +4311,6 @@ public class GammaServiceImpl extends AbstractLogOrReport implements IGammaServi
|
||||||
throw new RuntimeException(e);
|
throw new RuntimeException(e);
|
||||||
} finally {
|
} finally {
|
||||||
try {
|
try {
|
||||||
if (Objects.nonNull(ftpClient)) {
|
|
||||||
ftpClient.disconnect();
|
|
||||||
}
|
|
||||||
if (ObjectUtil.isNotNull(inputStream)) {
|
if (ObjectUtil.isNotNull(inputStream)) {
|
||||||
inputStream.close();
|
inputStream.close();
|
||||||
}
|
}
|
||||||
|
|
File diff suppressed because it is too large
Load Diff
Loading…
Reference in New Issue
Block a user