fix: gamma 自动处理

This commit is contained in:
orgin 2023-09-19 08:55:05 +08:00
parent a794d8f357
commit 9e745b6d84
18 changed files with 5533 additions and 27 deletions

View File

@ -510,4 +510,14 @@ public interface CommonConstant {
String BETA = "Beta";
String GAMMA = "Gamma";
/**
* 自动处理Gamma报告前缀
*/
String REPORT_PREFIX_AUTO = "RNAUTO_";
/**
* 自动处理报告后缀
*/
String REPORT_SUFFIX_AUTO = "_rpt";
}

View File

@ -26,7 +26,13 @@ public enum FileTypeEnum {
flv(".flv","video","视频"),
mp4(".mp4","video","视频"),
zip(".zip","zip","压缩包"),
pdf(".pdf","pdf","pdf");
pdf(".pdf","pdf","pdf"),
baseline(".baseline","baseline","基线数据"),
lc(".lc","lc","lc数据基线"),
scac(".scac","scac","scac数据"),
log(".log","log","日志"),
arr(".txt","arr","自动处理报告"),
rrr(".txt","rrr","人工交互分析报告");
private String type;
private String value;

View File

@ -0,0 +1,22 @@
package org.jeecg.common.constant.enums;
/**
* 能谱 系统类型
* @author: xiao
*/
public enum SpectrumSystemType {
/**
* particulate
*/
P,
/**
* G :all other gas systems (high-resolution
* γ-spectrometry or 2-D β-γ coincidence detection)
*/
G,
/**
* gas with 3-D β-γ coincidence detection
*/
B;
}

View File

@ -1,10 +1,13 @@
package org.jeecg.common.properties;
import lombok.Data;
import org.jeecg.common.constant.StringConstant;
import org.jeecg.common.constant.enums.FileTypeEnum;
import org.springframework.boot.context.properties.ConfigurationProperties;
import org.springframework.stereotype.Component;
import java.io.Serializable;
import java.time.LocalDate;
import java.util.Map;
/**
@ -55,4 +58,35 @@ public class SpectrumPathProperties implements Serializable {
* 能谱文件存储路径以能谱系统类型/能谱类型为key以存储路径为value
*/
private Map<String,String> filePathMap;
public String getSavePath(String fileType, String systemType, String dataType) {
// systemType dataTYpe year month fileName fileType
// Spectrum/Particulates /Samplephd /2023 /06 /RNAUTO_ARP01_001-20230603_1452_S_PREL_57371 .baseline
// Spectrum/Xenon/Spalax /Samplephd /2023 /09 /RNAUTO_CAX05_001-20230910_1528_S_FULL_37564 .baseline
StringBuilder path = new StringBuilder();
final int year = LocalDate.now().getYear();
final int month = LocalDate.now().getMonth().getValue();
path.append(this.getRootPath()).append(StringConstant.SLASH);
// 自动处理报告
if (fileType.equals(FileTypeEnum.arr.getValue())) {
path.append(this.getArrPath()).append(StringConstant.SLASH);
}
// todo 人工交互分析报告地址
if (fileType.equals(FileTypeEnum.rrr.getValue())) {
}
// systemType
path.append(this.getFilePathMap().get(systemType)).append(StringConstant.SLASH);
// dataType
path.append(this.getFilePathMap().get(dataType)).append(StringConstant.SLASH);
// year
path.append(year).append(StringConstant.SLASH);
// month
path.append(month >= 10 ? month : "0" + month).append(StringConstant.SLASH);
return path.toString();
}
}

View File

@ -304,6 +304,11 @@ public class MyLogFormatUtil {
if(data.size() > 0) {
List<List<String>> tempData = new LinkedList<>();
//
for(int row = 0; row < data.size(); row++) {
List<String> columns = new LinkedList<>();
columns.add(data.get(row).getAttribute());
tempData.add(columns);
}
for(int row = 0; row < data.get(0).getContext().size(); row++) {
List<String> columns = new LinkedList<>();
tempData.add(columns);

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,196 @@
package org.jeecg.common;
import com.baomidou.mybatisplus.core.toolkit.StringPool;
import org.jeecg.common.util.DateUtils;
import org.jeecg.modules.base.enums.DataType;
import org.jeecg.modules.base.enums.SystemType;
import org.jeecg.modules.entity.vo.PHDFile;
import org.springframework.stereotype.Component;
import java.math.BigDecimal;
import java.math.RoundingMode;
import java.text.ParseException;
import java.util.HashMap;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
@Component
public class NameStandUtil {
public String GetSysTemSubdir(String systemType) {
StringBuffer path = new StringBuffer();
if(systemType.contains(SystemType.BETA.getType())) {
path.append(StringPool.SLASH+"Spectrum");
path.append(StringPool.SLASH+"Xenon");
path.append(StringPool.SLASH+"Sauna");
} else if(systemType.contains(SystemType.GAMMA.getType())) {
path.append(StringPool.SLASH+"Spectrum");
path.append(StringPool.SLASH+"Xenon");
path.append(StringPool.SLASH+"Spalax");
} else if(systemType.contains(SystemType.PARTICULATE.getType())) {
path.append(StringPool.SLASH+"Spectrum");
path.append(StringPool.SLASH+"Particulates");
}
return path.toString();
}
public String GetDateTypeSubdir(String dataType){
StringBuffer path = new StringBuffer();
if(dataType.contains(DataType.SAMPLEPHD.getType()))
{
path.append(StringPool.SLASH+"Samplephd");
}
else if(dataType.contains(DataType.QCPHD.getType()))
{
path.append(StringPool.SLASH+"Qcphd");
}
else if(dataType.contains(DataType.DETBKPHD.getType()))
{
path.append(StringPool.SLASH+"Detbkphd");
}
else if(dataType.contains(DataType.GASBKPHD.getType()))
{
path.append(StringPool.SLASH+"Gasbkphd");
}
else if(dataType.contains(DataType.SOH.getType()))
{
path.append(StringPool.SLASH+"Soh");
}
else if(dataType.contains(DataType.MET.getType()))
{
path.append(StringPool.SLASH+"Met");
}
else if(dataType.contains(DataType.ALERT_FLOW.getType())||
dataType.contains(DataType.ALERT_SYSTEM.getType())||
dataType.contains(DataType.ALERT_TEMP.getType())||
dataType.contains(DataType.ALERT_UPS.getType()))
{
path.append(StringPool.SLASH+"Alert");
}
else if(dataType.contains(DataType.ERROR.getType()))
{
path.append(StringPool.SLASH+"Error");
}
else
{
path.append(StringPool.SLASH+"Other");
}
return path.toString();
}
public Map<String, String> NameStandard(String path, PHDFile fileAnlyse) {
String suffix = GetSuffix(fileAnlyse.getMsgInfo().getData_type(),fileAnlyse.getHeader().getSystem_type(),fileAnlyse.getHeader().getSpectrum_quantity(),String.valueOf(fileAnlyse.getAcq().getAcquisition_live_time()));
Map<String, String> fileNames = NameStandardByName(path, fileAnlyse.getFilename(), fileAnlyse.getHeader().getMeasurement_id(),suffix);
return fileNames;
}
public String GetSuffix(String dataType,String sysType,String Fulltype,String LT) {
String rData = "";
BigDecimal bd = new BigDecimal(LT);
if(dataType.contains(DataType.SAMPLEPHD.getType())) {
bd = bd.setScale(1, RoundingMode.HALF_UP);
rData = "_S_"+Fulltype+"_"+bd+".PHD";
} else if(dataType.contains(DataType.GASBKPHD.getType())){
bd = bd.setScale(1, RoundingMode.HALF_UP);
rData = "_G_"+Fulltype+"_"+bd+".PHD";
}else if(dataType.contains(DataType.DETBKPHD.getType())){
bd = bd.setScale(0, RoundingMode.HALF_UP);
rData = "_D_"+Fulltype+"_"+bd+".PHD";
}else if(dataType.contains(DataType.QCPHD.getType())){
bd = bd.setScale(2, RoundingMode.HALF_UP);
rData = "_Q_"+Fulltype+"_"+bd+".PHD";
}
return rData;
}
public Map<String, String> NameStandardByName(String path, String fileName, String dateTimeFormat, String suffix) {
Map<String, String> map = new HashMap<>();
String StandardFileName="";
String measurementName = GetFileNameFromDateTime(dateTimeFormat, suffix);
String fileDir = path + StringPool.SLASH;
if(measurementName != fileName) {
StandardFileName = fileDir+measurementName;
}
String fileSuffix = "PHD";
String m_lcFileName = StandardFileName;
String m_baseLineFileName = StandardFileName;
String m_scacFileName = StandardFileName;
String m_logfileName = StandardFileName;
String m_reportFileName = StandardFileName;
m_lcFileName.replace(fileSuffix,"lc");
m_baseLineFileName.replace(fileSuffix,"baseline");
m_scacFileName.replace(fileSuffix,"scac");
m_logfileName.replace(fileSuffix,"log");
m_reportFileName.replace("."+fileSuffix,"_rpt");
String m_saveFileName = StandardFileName;
map.put("lc", m_lcFileName);
map.put("baseline", m_baseLineFileName);
map.put("scac", m_scacFileName);
map.put("log", m_logfileName);
map.put("rpt", m_reportFileName);
map.put("saveFile", m_saveFileName);
return map;
}
public String GetFileNameFromDateTime(String dateTimeFormat, String suffix){
String rData = "";
int pos = dateTimeFormat.indexOf("-");
if(-1 != pos) {
String dateTime = dateTimeFormat;
if (pos+17>dateTime.length()){
dateTime = dateTime.substring(pos+1);
}else {
dateTime = dateTime.substring(pos+1, pos+17);
}
dateTime = dateTime.replace(" ","-");
String fileHeader = dateTimeFormat.substring(0, pos+1);
String temp = DateTimeStandardToFileFormat(dateTime);
rData = fileHeader+ temp + suffix;
}
return rData;
}
public String DateTimeStandardToFileFormat(String data) {
String dateTime = "";
try {
if ( data.indexOf("-") > 0 ){
dateTime = DateUtils.formatDate(DateUtils.parseDate(data,"yyyy/MM/dd-HH:mm"), "yyyyMMdd_HHmm");
} else if( data.indexOf(" ") > 0 ) {
dateTime = DateUtils.formatDate(DateUtils.parseDate(data,"yyyy/MM/dd HH:mm"), "yyyyMMdd_HHmm");
} else if( data.indexOf("-")<0 && data.indexOf(" ") < 0) {
dateTime = DateUtils.formatDate(DateUtils.parseDate(data,"yyyy/MM/dd"), "yyyyMMdd");
}
} catch (ParseException e) {
throw new RuntimeException(e);
}
return dateTime;
}
public String SetFileDir(String path, String saveFileName) {
String input_file_name = path;
//添加文件名日期
List<String> dateSub = GetSubDirByFileName(saveFileName);
for(int pos=0;pos<dateSub.size();pos++) {
input_file_name=input_file_name+dateSub.get(pos)+StringPool.SLASH;
}
return input_file_name;
}
public List<String> GetSubDirByFileName(String fileName) {
List<String> rData = new LinkedList<>();
int pos = fileName.indexOf('-');
if(-1 == pos) {
//
} else if(fileName.length()>=pos+7) {
rData.add(fileName.substring(pos+1, pos+5));
rData.add(fileName.substring(pos+5, pos+7));
}
return rData;
}
}

View File

@ -0,0 +1,149 @@
package org.jeecg.modules.eneity;
import com.fasterxml.jackson.annotation.JsonFormat;
import lombok.Data;
import org.springframework.format.annotation.DateTimeFormat;
import java.io.Serializable;
import java.util.Date;
@Data
public class GardsAnalysesSpectrum implements Serializable {
/**
* 分析ID号
*/
private Integer idAnalysis;
/**
* 样品id
*/
private Integer sampleId;
/**
* 分析开始时间
*/
@JsonFormat(pattern = "yyyy-MM-dd HH:mm:ss")
@DateTimeFormat(pattern = "yyyy-MM-dd HH:mm:ss")
private Date analysisBegin;
/**
* 开始时间-字符串
*/
private String analysisBeginStr;
/**
* 分析结束时间
*/
@JsonFormat(pattern = "yyyy-MM-dd HH:mm:ss")
@DateTimeFormat(pattern = "yyyy-MM-dd HH:mm:ss")
private Date analysisEnd;
/**
* 结束时间-字符串
*/
private String analysisEndStr;
/**
* Reviewed:交互,auto:自动
*/
private String type;
/**
* 使用的软件名称
*/
private String software;
/**
* 软件版本号
*/
private String swVersion;
/**
* 分析员名称
*/
private String analyst;
/**
* 基线计数方法描述
*/
private String baselineMethod;
/**
* 寻峰方法描述
*/
private String peaksMethod;
/**
* 核素识别方法描述
*/
private String nuclideMethod;
/**
* 不确定度计算描述
*/
private String uncCalcMethod;
/**
* Lc计算方法描述
*/
private String lcMethod;
/**
* 寻峰起始道
*/
private Integer searchStartChannel;
/**
* 寻峰结束道
*/
private Integer searchEndChannel;
/**
* 寻峰阈值
*/
private Double searchThreshold;
/**
* 峰数目
*/
private Integer numberOfPeaks;
/**
* 总计数
*/
private Float totalCounts;
/**
* 分级结果
*/
private Integer category;
/**
* 注释
*/
private String comments;
@JsonFormat(pattern = "yyyy-MM-dd HH:mm:ss")
@DateTimeFormat(pattern = "yyyy-MM-dd HH:mm:ss")
private Date moddate;
private String usedgasphd;
private String useddetphd;
private Integer usedgasphdId;
private Integer useddetphdId;
private String baselinePath;
private String lcPath;
private String scacPath;
private String logPath;
private String reportPath;
}

View File

@ -0,0 +1,60 @@
package org.jeecg.modules.eneity;
import com.fasterxml.jackson.annotation.JsonFormat;
import lombok.Data;
import org.springframework.format.annotation.DateTimeFormat;
import java.io.Serializable;
import java.util.Date;
@Data
public class GardsCalibrationPairsSpectrum implements Serializable {
/**
* 样品id
*/
private Integer sampleId;
/**
* 分析ID号
*/
private Integer idAnalysis;
/**
* Ggamma探测器的数据#g_Bbeta探测器的数据,#b_
*/
private String sampleType;
/**
* energy能量刻度
* efficiency效率刻度
* Resolution分辨率刻度
*/
private String caltype;
/**
* PHD代表数据来自PHD文件External代表数据来自外部如刻度工具其它文件等
*/
private String input;
/**
* 刻度点ID号
*/
private Integer idCalPoint;
private Double xValue;
private Double yValue;
private String decayMode;
/**
* y值不确定度
*/
private String uncYValue;
@JsonFormat(pattern = "yyyy-MM-dd HH:mm:ss")
@DateTimeFormat(pattern = "yyyy-MM-dd HH:mm:ss")
private Date moddate;
}

View File

@ -0,0 +1,106 @@
package org.jeecg.modules.eneity;
import com.fasterxml.jackson.annotation.JsonFormat;
import lombok.Data;
import org.springframework.format.annotation.DateTimeFormat;
import java.io.Serializable;
import java.util.Date;
@Data
public class GardsCalibrationSpectrum implements Serializable {
/**
* 样品id
*/
private Integer sampleId;
/**
* 分析ID号
*/
private Integer idAnalysis;
/**
* Ggamma探测器的数据#g_Bbeta探测器的数据,#b_
*/
private String sampleType;
/**
* energy能量刻度
* efficiency效率刻度
* Resolution分辨率刻度
*/
private String calType;
/**
* 拟合方程ID号统一定义
*/
private Integer function;
/**
* 拟合方程描述
*/
private String functionDef;
/**
* 拟合的起始值
*/
private Integer startOfRange;
/**
* 拟合的结束值
*/
private Integer endOfRange;
/**
* 拟合系数1
*/
private Double coeff1;
/**
* 拟合系数2
*/
private Double coeff2;
/**
* 拟合系数3
*/
private Double coeff3;
/**
* 拟合系数4
*/
private Double coeff4;
/**
* 拟合系数5
*/
private Double coeff5;
/**
* 拟合系数6
*/
private Double coeff6;
/**
* 拟合系数7
*/
private Double coeff7;
/**
* 拟合系数8
*/
private Double coeff8;
/**
* 拟合系数9
*/
private Double coeff9;
@JsonFormat(pattern = "yyyy-MM-dd HH:mm:ss")
@DateTimeFormat(pattern = "yyyy-MM-dd HH:mm:ss")
private Date moddate;
private String coeffString;
}

View File

@ -0,0 +1,103 @@
package org.jeecg.modules.eneity;
import com.fasterxml.jackson.annotation.JsonFormat;
import lombok.Data;
import org.springframework.format.annotation.DateTimeFormat;
import java.io.Serializable;
import java.util.Date;
@Data
public class GardsNuclIdedSpectrum implements Serializable {
/**
* 样品id
*/
private Integer sampleId;
/**
* 分析ID号
*/
private Integer idAnalysis;
/**
* 核素名称
*/
private String nuclideName;
/**
* 核素类型
*/
private String type;
/**
* 核素半衰期
*/
private String halflife;
/**
* 平均活度值
*/
private String aveActiv;
/**
* 平均活度值不确定度
*/
private Double aveActivErr;
/**
* 主射线活度值
*/
private Double activKey;
/**
* 主射线活度值不确定度
*/
private Double activKeyErr;
/**
* 核素的最小可探测活度
*/
private String mda;
/**
* 核素的最小可探测活度不确定度
*/
private Double mdaErr;
/**
* 核素识别标志
*/
private Integer nidFlag;
private Double activDecay;
private Double activDecayErr;
/**
* 符合相加校正因子无设为1
*/
private Double cscRatio;
/**
* 符合相加校正因子不确定度无设为0
*/
private Double cscRatioErr;
/**
* 活度是否经过符合相加校正
*/
private Integer cscModFlag;
@JsonFormat(pattern = "yyyy-MM-dd HH:mm:ss", timezone = "GMT+8")
@DateTimeFormat(pattern = "yyyy-MM-dd HH:mm:ss")
private Date moddate;
private String mdc;
private String concentration;
private String keyEnergy;
private String keyYield;
}

View File

@ -0,0 +1,108 @@
package org.jeecg.modules.eneity;
import com.fasterxml.jackson.annotation.JsonFormat;
import lombok.Data;
import org.springframework.format.annotation.DateTimeFormat;
import java.io.Serializable;
import java.util.Date;
@Data
public class GardsNuclLinesIdedSpectrum implements Serializable {
/**
* 样品id
*/
private Integer sampleId;
/**
* 峰序号
*/
private Integer idPeak;
/**
* 分析ID号
*/
private Integer idAnalysis;
/**
* 核素名称
*/
private String nuclideName;
@JsonFormat(pattern = "yyyy-MM-dd HH:mm:ss")
@DateTimeFormat(pattern = "yyyy-MM-dd HH:mm:ss")
private Date moddate;
/**
* 核素库中核素对应峰的能量keV
*/
private Double energy;
/**
* 核素库中核素对应峰的能量不确定度keV
*/
private Double uncEnergy;
/**
* 核素库中核素对应峰的发射几率
*/
private Double abundance;
/**
* 核素库中核素对应峰的发射几率不确定度
*/
private Double uncAbundance;
/**
* 利用该峰计算得到的活度
*/
private String activity;
/**
* 利用该峰计算得到的活度不确定度
*/
private Double uncActivity;
/**
* 该峰处的探测效率
*/
private Double effic;
/**
* 该峰处的探测效率不确定度
*/
private Double unEffic;
/**
* 利用该峰计算得到的最小可探测活度
*/
private Double mda;
/**
* 主射线标识01
*/
private Double keyFlag;
/**
* 符合相加校正因子无设为1
*/
private Double cscRatio;
/**
* 符合相加校正因子不确定度无设为0
*/
private Double cscRatioErr;
/**
* 活度是否经过符合相加校正
*/
private Double cscModFlag;
private String nuclidefullname;
private String mdc;
private String concentration;
}

View File

@ -0,0 +1,131 @@
package org.jeecg.modules.eneity;
import com.fasterxml.jackson.annotation.JsonFormat;
import lombok.Data;
import org.springframework.format.annotation.DateTimeFormat;
import java.io.Serializable;
import java.util.Date;
@Data
public class GardsPeaksSpectrum implements Serializable {
/**
* 样品id
*/
private Integer sampleId;
/**
* 峰序号
*/
private Integer idPeak;
/**
* 分析ID号
*/
private Integer idAnalysis;
@JsonFormat(pattern = "yyyy-MM-dd HH:mm:ss")
@DateTimeFormat(pattern = "yyyy-MM-dd HH:mm:ss")
private Date moddate;
/**
* 峰中心道道址
*/
private Double centroidChannel;
/**
* 峰中心道不确定度道址
*/
private Double uncCentroidChannel;
/**
* 峰中心道能量keV
*/
private Double energy;
/**
* 峰中心道能量不确定度keV
*/
private Double uncEnergy;
/**
* 峰面积计数已扣除基线面积但未扣除空白样品计数和探测器本底计数
*/
private Double area;
/**
* 峰面积不确定度计数
*/
private Double uncArea;
/**
* 峰的净计数率1/s=峰面积/活时间
*/
private Double netCountRate;
/**
* 峰的净计数率的不确定度1/s
*/
private Double uncNetCountRate;
/**
* 测量系统在峰能量处的绝对效率
*/
private Double efficiency;
/**
* 测量系统在峰能量处的绝对效率的不确定度
*/
private Double uncefficiency;
/**
* 峰的半高宽
*/
private Double fwhm;
/**
* 峰的十分之一高宽
*/
private Double fwtm;
/**
* 峰的重要性因子
*/
private Double significance;
/**
* 峰的可探测线Lc
*/
private Double lc;
/**
* 峰的感兴趣区的起始道
*/
private Double roiStart;
/**
* 峰的感兴趣区的结束道
*/
private Double roiEnd;
private Double mulitiIndex;
private Double tail;
private Double tailAlpha;
private Double upperTail;
private Double upperTailAlpha;
private Double bwwidthchan;
private Double recoildeltachan;
private Double stepraio;
private Double ld;
private Double sensitivity;
private Double backgroundarea;
private Double meanbackcount;
private Double recoilbetachan;
private String peakcomments;
}

View File

@ -0,0 +1,29 @@
package org.jeecg.modules.eneity;
import com.fasterxml.jackson.annotation.JsonFormat;
import lombok.Data;
import org.springframework.format.annotation.DateTimeFormat;
import java.io.Serializable;
import java.util.Date;
@Data
public class GardsQcCheckSpectrum implements Serializable {
private Integer sampleId;
private Integer idanalysis;
private String qcName;
private Double qcValue;
private String qcStandard;
private Integer qcResult;
@DateTimeFormat(pattern = "yyyy-MM-dd HH:mm:ss")
@JsonFormat(pattern = "yyyy-MM-dd HH:mm:ss", timezone = "GMT+8")
private Date moddate;
}

View File

@ -0,0 +1,38 @@
package org.jeecg.modules.eneity;
import com.baomidou.mybatisplus.annotation.TableField;
import com.baomidou.mybatisplus.annotation.TableName;
import lombok.Data;
import org.jeecg.modules.base.entity.original.GardsSampleData;
import org.jeecgframework.poi.excel.annotation.Excel;
@Data
@TableName("ORIGINAL.GARDS_SAMPLE_DATA")
public class GardsSampleDataSpectrum extends GardsSampleData {
/**
* 台站名称
*/
@TableField(exist = false)
@Excel(name = "STATION" ,orderNum = "2")
private String stationName;
/**
* 探测器名称
*/
@TableField(exist = false)
private String detectorsName;
@TableField(exist = false)
@Excel(name = "CALIB REPORTS" ,orderNum = "7")
private String calibReports;
@TableField(exist = false)
private String dbName;
@TableField(exist = false)
@Excel(name = "NO" ,orderNum = "1")
private Integer no;
}

View File

@ -0,0 +1,51 @@
package org.jeecg.modules.mapper;
import org.apache.ibatis.annotations.Mapper;
import org.apache.ibatis.annotations.Param;
import org.jeecg.modules.base.entity.configuration.GardsNuclLib;
import org.jeecg.modules.base.entity.configuration.GardsNuclLinesLib;
import org.jeecg.modules.base.entity.rnman.GardsAnalySetting;
import org.jeecg.modules.eneity.*;
import org.jeecg.modules.entity.vo.*;
import java.util.List;
@Mapper
public interface SpectrumAnalysisMapper {
String getStatus(@Param(value = "sampleId") Integer sampleId);
GardsAnalysesSpectrum getAnalysis(@Param(value = "dbName") String dbName, @Param(value = "sampleId") Integer sampleId);
List<GardsPeaksSpectrum> getPeaks(@Param(value = "dbName") String dbName, @Param(value = "idAnalysis") Integer idAnalysis);
List<GardsCalibrationPairsSpectrum> getCalibrationPairs(@Param(value = "dbName") String dbName, @Param(value = "idAnalysis") Integer idAnalysis);
List<GardsCalibrationSpectrum> getPara(@Param(value = "dbName") String dbName, @Param(value = "idAnalysis") Integer idAnalysis);
List<GardsNuclLinesIdedSpectrum> getNuclLinesIded(@Param(value = "dbName") String dbName, @Param(value = "idAnalysis") Integer idAnalysis);
List<GardsNuclIdedSpectrum> getNuclIded(@Param(value = "dbName") String dbName, @Param(value = "idAnalysis") Integer idAnalysis);
List<GardsQcCheckSpectrum> getQcCheck(@Param(value = "dbName") String dbName, @Param(value = "idAnalysis") Integer idAnalysis);
GardsAnalySetting getAnalySetting(@Param(value = "idAnalysis") Integer idAnalysis);
List<NuclideLine> getNuclideLines(@Param(value = "name") String name);
List<HalfData> getHalf(@Param(value = "names") List<String> names);
List<GardsNuclLinesLib> getNuclideLine(@Param(value = "min") Double min, @Param(value = "max") Double max, @Param(value = "name") String name);
GardsNuclLib getNuclideInfo(@Param(value = "name") String name);
GardsNuclLib getParentAndDaughter(@Param(value = "name") String name);
List<String> findNuclideList(@Param(value = "min") Double min, @Param(value = "max") Double max, @Param(value = "nuclides") List<String> nuclides);
List<GardsNuclLinesLib> getNuclideTable( @Param(value = "name") String name, @Param(value = "span") Long span);
Integer getSampleId(@Param(value = "filePathName") String filePathName);
}

View File

@ -0,0 +1,13 @@
package org.jeecg.modules.native_jni;
import java.util.List;
public class CalValuesOut {
public int rowNum;
public int colNum;
public List<Double> counts;
}

View File

@ -6,9 +6,16 @@ import cn.hutool.core.date.DateUtil;
import cn.hutool.core.map.MapUtil;
import cn.hutool.core.util.NumberUtil;
import cn.hutool.core.util.StrUtil;
import com.google.common.collect.Maps;
import lombok.Data;
import org.jeecg.common.constant.DateConstant;
import org.jeecg.common.constant.Setting;
import org.apache.commons.lang3.StringUtils;
import org.jeecg.common.GammaFileUtil;
import org.jeecg.common.constant.*;
import org.jeecg.common.constant.enums.SpectrumSystemType;
import org.jeecg.common.properties.SpectrumPathProperties;
import org.jeecg.common.util.RedisUtil;
import org.jeecg.modules.base.bizVo.AttributeItemVo;
import org.jeecg.common.util.MyLogFormatUtil;
import org.jeecg.modules.base.dto.*;
import org.jeecg.modules.base.entity.original.GardsSampleData;
import org.jeecg.modules.base.entity.rnauto.*;
@ -31,11 +38,58 @@ public class Sample_G_Analysis {
// Sample谱结构体数据
private EnergySpectrumStruct energySpectrumStruct;
// 能谱文件存储路径属性
private SpectrumPathProperties spectrumPathProperties;
private RedisUtil redisUtil;
/**
* 系统类型
*/
private String systemType;
/**
* 数据类型
*/
private String dataType;
/**
* 样品谱地址
*/
private String sampleInputFilename;
/**
* 样品谱名称
*/
private String sampleFilename;
/**
* 日志文件路径
*/
private String logFilePath;
/**
* 日志文件名称
*/
private String logFileName;
/**
* 报告文件路径
*/
private String arrFilePath;
/**
* 报告文件名称
*/
private String arrFileName;
public Sample_G_Analysis(EnergySpectrumStruct energySpectrumStruct,SpectrumServiceQuotes serviceQuotes,
GardsSampleData sampleData) {
this.sampleData = sampleData;
this.serviceQuotes = serviceQuotes;
this.energySpectrumStruct = energySpectrumStruct;
this.systemType = energySpectrumStruct.system_type;
this.dataType = energySpectrumStruct.data_type;
this.sampleInputFilename = sampleData.getInputFileName();
this.sampleFilename = StringUtils.substring(sampleData.getInputFileName(),
sampleData.getInputFileName().lastIndexOf((StringConstant.SLASH)+1));
}
public void analysis(){
@ -44,6 +98,24 @@ public class Sample_G_Analysis {
GStoreMiddleProcessData middleData = new GStoreMiddleProcessData();
Integer sampleId = sampleData.getSampleId();
GammaFileUtil gammaFileUtil = new GammaFileUtil();
PHDFile phdFile = new PHDFile();
// todo 获取数据库 Gamma 默认参数
// todo 文件路径
middleData.setAnalyses_save_filePath(this.sampleInputFilename);
// todo 读取文件内容并附值
this.setPHDFile(phdFile, this.energySpectrumStruct);
// todo 根据系统类型传入不同的核素参数
Map<String, NuclideLines> nuclideLibs = new HashMap<>();
if (this.systemType.equals(SpectrumSystemType.P.name())) {
nuclideLibs = this.getNuclideLinesP();
}
if (this.systemType.equals(SpectrumSystemType.G.name())) {
nuclideLibs = this.getNuclideLinesG();
}
gammaFileUtil.GetMiddleData(phdFile, CommonConstant.REPORT_PREFIX_AUTO, nuclideLibs, middleData, "1");
// 保存分析结果 ==> INSERT INTO RNAUTO.GARDS_ANALYSES
saveAnalysis(middleData,sampleId);
// 获取分析结果ID ==> SELECT IDANALYSIS
@ -72,6 +144,312 @@ public class Sample_G_Analysis {
}
}
private void writeLog(String logFilePath, GStoreMiddleProcessData middleData) {
String sampleId = middleData.getSample_id();
MyLogFormatUtil myLogFormatUtil = new MyLogFormatUtil();
List<String> writes = new LinkedList<>();
// 自动处理分析开始
String analyseBegin = MyLogFormatUtil.analyseResultsBegin.replace("%1", "date");
writes.add(MyLogFormatUtil.getTitleFormat(analyseBegin, MyLogFormatUtil.FILE_TITLE_FLAG));
/* Read calibration data */
writes.add(MyLogFormatUtil.getTitleFormat(MyLogFormatUtil.titleCalibration));
List<AttributeItemVo> data = new ArrayList<>();
writes.add(MyLogFormatUtil.getHeaderFormat(MyLogFormatUtil.SetSampleGEnergyChannel.replace("%1", sampleId)));
AttributeItemVo item = null;
item = new AttributeItemVo("Channel", MyLogFormatUtil.getValuePoint(middleData.getCalibration_pairs_S_E_xValue(), 3));
data.add(item);
item = new AttributeItemVo("Energy", MyLogFormatUtil.getValuePoint(middleData.getCalibration_pairs_S_E_yValue(),3));
data.add(item);
item = new AttributeItemVo("Error", MyLogFormatUtil.getValuePoint(middleData.getCalibration_pairs_S_E_uncYValue(), 3));
data.add(item);
writes.addAll(MyLogFormatUtil.getBlockContext(data));
// Reading gamma Efficiency pairs(sampleID: %1)
writes.add(MyLogFormatUtil.getHeaderFormat(MyLogFormatUtil.GSetSampleEfficiencyChannel.replace("%1", sampleId)));
item = new AttributeItemVo("Channel", MyLogFormatUtil.getValuePoint(middleData.getCalibration_pairs_S_EF_xValue(), 3));
data.add(item);
item = new AttributeItemVo("Energy", MyLogFormatUtil.getValuePoint(middleData.getCalibration_pairs_S_EF_xValue(), 3));
data.add(item);
item = new AttributeItemVo("Error", MyLogFormatUtil.getValuePoint(middleData.getCalibration_pairs_S_EF_xValue(), 3));
data.add(item);
writes.addAll(MyLogFormatUtil.getBlockContext(data));
// Reading gamma Resolution pairs(sampleID: %1)
writes.add(MyLogFormatUtil.getHeaderFormat(MyLogFormatUtil.GSetSampleResolutionChannel.replace("%1", sampleId)));
item = new AttributeItemVo("Channel", MyLogFormatUtil.getValuePoint(middleData.getCalibration_pairs_S_EF_xValue(), 3));
data.add(item);
item = new AttributeItemVo("Energy", MyLogFormatUtil.getValuePoint(middleData.getCalibration_pairs_S_R_yValue(),3));
data.add(item);
item = new AttributeItemVo("Error", MyLogFormatUtil.getValuePoint(middleData.getCalibration_pairs_S_R_uncYValue(),3));
data.add(item);
writes.addAll(MyLogFormatUtil.getBlockContext(data));
// Reading gamma TotalEfficiency pairs(sampleID: %1)
writes.add(MyLogFormatUtil.getHeaderFormat(MyLogFormatUtil.GSetSampleTotalEfficiencyChannel.replace("%1", sampleId)));
item = new AttributeItemVo("Channel", middleData.getCalibration_pairs_S_T_xValue());
data.add(item);
item = new AttributeItemVo("Energy", MyLogFormatUtil.getValuePoint(middleData.getCalibration_pairs_S_T_yValue(), 3));
data.add(item);
item = new AttributeItemVo("Error", MyLogFormatUtil.getValuePoint(middleData.getCalibration_pairs_S_T_uncYValue(), 3));
data.add(item);
writes.addAll(MyLogFormatUtil.getBlockContext(data));
// Reading sample information
writes.add(MyLogFormatUtil.getHeaderFormat(MyLogFormatUtil.sampleInfo));
Map<String, Object> infoMap = new LinkedHashMap<>();
infoMap.put("Collection Start", middleData.sample_collection_start);
infoMap.put("Collection Stop", middleData.sample_collection_stop);
infoMap.put("Sampling Time[h]", middleData.sample_time);
infoMap.put("Sample Quantity[m3]", middleData.sample_quantity);
infoMap.put("Decay Time[h]", middleData.sample_decay_time);
infoMap.put("Acquisition Start", middleData.sample_acquisiton_start);
infoMap.put("Acquisition Stop", middleData.sample_acquistion_stop);
infoMap.put("Acquisition Time[s]", middleData.sample_acquistion_time);
writes.addAll(MyLogFormatUtil.getBlockContext(infoMap));
// Read calibration finished
writes.add(MyLogFormatUtil.getTitleFormat(MyLogFormatUtil.titleCalibrationIdEnd));
/* Starting Calibration */
writes.add(MyLogFormatUtil.getTitleFormat(MyLogFormatUtil.GTitleCalibration));
// Energy Calibration [%1 ]
if(middleData.calibration_pairs_E_idCalPoint.size()>0) {
infoMap = new LinkedHashMap<>();
infoMap.put("Energy Calibration [%1 ]".replace("%1", middleData.calibration_pairs_R_Input), "");
infoMap.put("Function", middleData.calibration_E_functionTypeDef);
infoMap.put("E", middleData.calibration_E_functionDef);
infoMap.put("P", middleData.calibration_E_coeff_string);
infoMap.put("Err", middleData.calibration_E_uncoeff_string);
infoMap.put("Data", middleData.calibration_pairs_E_Input);
writes.addAll(MyLogFormatUtil.getBlockContext(infoMap));
}
// Resolution Calibration [%1 ]
if(middleData.calibration_pairs_R_idCalPoint.size()>0) {
infoMap = new LinkedHashMap<>();
infoMap.put("Resolution Calibration [%1 ]".replace("%1", middleData.calibration_pairs_R_Input) , "");
infoMap.put("Function", middleData.calibration_R_functionTypeDef);
infoMap.put("E", middleData.calibration_R_functionDef);
infoMap.put("P", middleData.calibration_R_coeff_string);
infoMap.put("Err", middleData.calibration_R_uncoeff_string);
infoMap.put("Data", middleData.calibration_pairs_R_Input);
writes.addAll(MyLogFormatUtil.getBlockContext(infoMap));
}
// Efficiency Calibration [PHD ]
if(middleData.calibration_pairs_EF_idCalPoint.size()>0) {
infoMap = new LinkedHashMap<>();
infoMap.put("Efficiency Calibration [%1 ]".replace("%1", middleData.calibration_pairs_EF_Input), "");
infoMap.put("Function", middleData.calibration_pairs_EF_Input);
infoMap.put("E", middleData.calibration_EF_functionTypeDef);
infoMap.put("P", middleData.calibration_EF_functionDef);
infoMap.put("Err", middleData.calibration_EF_coeff_string);
infoMap.put("Data", middleData.calibration_EF_uncoeff_string);
writes.addAll(MyLogFormatUtil.getBlockContext(infoMap));
}
if(middleData.calibration_pairs_T_idCalPoint.size()>0) {
infoMap = new LinkedHashMap<>();
infoMap.put("Tot_efficiency Calibration [%1 ]".replace("%1", middleData.calibration_pairs_T_Input), "");
infoMap.put("Function", middleData.calibration_pairs_T_Input);
infoMap.put("E", middleData.calibration_T_functionTypeDef);
infoMap.put("P", middleData.calibration_T_functionDef);
infoMap.put("Err", middleData.calibration_T_coeff_string);
infoMap.put("Data", middleData.calibration_T_uncoeff_string);
writes.addAll(MyLogFormatUtil.getBlockContext(infoMap));
}
// Calibration Finished
writes.add(MyLogFormatUtil.getTitleFormat(MyLogFormatUtil.GTitleCalibrationEnd));
/* Starting Spectrum Analysis */
writes.add(MyLogFormatUtil.getTitleFormat(MyLogFormatUtil.GTitleSpectrum));
// PROCESSING PARAMETERS.....
writes.add(MyLogFormatUtil.getHeaderFormat(MyLogFormatUtil.GGetPROCESSING));
infoMap = new LinkedHashMap<>();
infoMap.put("ECutAnalysis_Low", middleData.setting_specSetup.getECutAnalysis_Low());
infoMap.put("ECutAnalysis_High", middleData.setting_specSetup.getECutAnalysis_High());
infoMap.put("EnergyTolerance", middleData.setting_specSetup.getEnergyTolerance());
infoMap.put("BaseImprovePSS", middleData.setting_specSetup.getBaseImprovePSS());
infoMap.put("PSS_low", middleData.setting_specSetup.getPSS_low());
infoMap.put("k_back", middleData.setting_specSetup.getK_back());
infoMap.put("k_alpha", middleData.setting_specSetup.getK_alpha());
infoMap.put("k_beta", middleData.setting_specSetup.getK_beta());
infoMap.put("RiskLevelK", middleData.setting_specSetup.getRiskLevelK());
infoMap.put("refTime_act", middleData.setting_specSetup.getRefTime_act());
infoMap.put("refTime_conc", middleData.setting_specSetup.getRefTime_conc());
writes.addAll(MyLogFormatUtil.getBlockContext(infoMap));
// CALIBRATION PARAMETERS.....
writes.add(MyLogFormatUtil.getHeaderFormat(MyLogFormatUtil.GGetCALIBRATION));
infoMap = new LinkedHashMap<>();
infoMap.put("CalibrationPSS_low", middleData.setting_specSetup.getCalibrationPSS_low());
infoMap.put("CalibrationPSS_high", middleData.setting_specSetup.getCalibrationPSS_high());
infoMap.put("bUpdateCal", middleData.setting_specSetup.isBUpdateCal());
infoMap.put("KeepCalPeakSearchPeaks", middleData.setting_specSetup.isKeepCalPeakSearchPeaks());
writes.addAll(MyLogFormatUtil.getBlockContext(infoMap));
// Nuclide Identified.....
writes.add(MyLogFormatUtil.getHeaderFormat(MyLogFormatUtil.GGetPeakSearchResult));
infoMap = new LinkedHashMap<>();
int idPeakSize = middleData.peaks_idPeak.size();
double peaksUsed = 0;
for(int m=0;m<middleData.peaks_Nuclide_name.size();m++)
{
if(!middleData.peaks_Nuclide_name.get(m).isEmpty())
{
peaksUsed++;
}
}
infoMap.put("%1 peaks reported".replace("%1", idPeakSize + ""), "");
infoMap.put("%1 peaks with ID (%2%)"
.replace("%1", peaksUsed+"")
.replace("%2", (peaksUsed / idPeakSize * 100) + ""), "");
infoMap.put("%1 peaks without ID(%2%)"
.replace("%1", (idPeakSize - peaksUsed) + "")
.replace("%2", ((idPeakSize - peaksUsed) / (idPeakSize * 100)) + ""), "");
writes.addAll(MyLogFormatUtil.getBlockContext(infoMap));
data = new ArrayList<>();
item = new AttributeItemVo("PeakID", middleData.getPeaks_idPeak());
data.add(item);
item = new AttributeItemVo("Energy", MyLogFormatUtil.getValuePoint(middleData.getPeaks_Energy(), 3));
data.add(item);
item = new AttributeItemVo("Centroid", MyLogFormatUtil.getValuePoint(middleData.getPeaks_peakCentroid(), 3));
data.add(item);
item = new AttributeItemVo("FWHM", MyLogFormatUtil.getValuePoint(middleData.getPeaks_Fwhm(), 3));
data.add(item);
item = new AttributeItemVo("NetArea", MyLogFormatUtil.getValuePoint(middleData.getPeaks_Area(), 3));
data.add(item);
List<String> percentData = MyLogFormatUtil.getPercent(middleData.getPeaks_areaErr(), middleData.getPeaks_Area());
item = new AttributeItemVo("NAErr%", MyLogFormatUtil.getValuePoint(percentData, 3));
data.add(item);
item = new AttributeItemVo("Signif", MyLogFormatUtil.getValuePoint(middleData.getPeaks_Significance(), 3));
data.add(item);
item = new AttributeItemVo("Sensit", MyLogFormatUtil.getValuePoint(middleData.getPeaks_Sensitivity(), 3));
data.add(item);
item = new AttributeItemVo("Nuclide", middleData.getPeaks_Nuclide_name());
data.add(item);
writes.addAll(MyLogFormatUtil.getBlock(MyLogFormatUtil.GSetSampleEfficiencyChannel, sampleId, data));
// Nuclide Identified.....
writes.add(MyLogFormatUtil.getHeaderFormat(MyLogFormatUtil.GGetPeakSearchResult));
StringBuilder qsNuclidesName = new StringBuilder();
for(int m = 0; m < middleData.nucl_ided_Nuclidename.size() -1; m++)
{
qsNuclidesName.append(middleData.nucl_ided_Nuclidename.get(m)).append(",");
}
if(middleData.nucl_ided_Nuclidename.size() > 1)
{
qsNuclidesName.append(middleData.nucl_ided_Nuclidename.get(middleData.nucl_ided_Nuclidename.size() - 1));
}
infoMap = new LinkedHashMap<>();
infoMap.put("Nuclides Identified", qsNuclidesName.toString());
infoMap.put("Keyline Activities for Nuclides with defined Reference Line", "");
infoMap.put("Activity Reference Time", middleData.getSample_acquisiton_start());
infoMap.put("Concentration Reference Time", middleData.getSample_collection_start());
writes.addAll(MyLogFormatUtil.getBlockContext(infoMap));
data = new ArrayList<>();
item = new AttributeItemVo("Nuclide", middleData.getNucl_ided_Nuclidename());
data.add(item);
item = new AttributeItemVo("Halflife", middleData.getNucl_ided_Halflife());
data.add(item);
item = new AttributeItemVo("Activity",
MyLogFormatUtil.getValuePoint(middleData.getNucl_ided_activ_key(), 3), "bq");
data.add(item);
List<String> ideaPercent = MyLogFormatUtil.getPercent(middleData.getNucl_ided_activ_key_err(), middleData.getNucl_ided_activ_key());
item = new AttributeItemVo("RelErr", ideaPercent, "%");
data.add(item);
item = new AttributeItemVo("Conc", middleData.getNucl_ided_Concentration(), "uBq/m^3");
data.add(item);
item = new AttributeItemVo("MDC", middleData.getNucl_ided_MDC(), "uBq/m^3");
data.add(item);
writes.addAll(MyLogFormatUtil.getBlockContext(infoMap));
// Starting Data Quality.....
writes.add(MyLogFormatUtil.getHeaderFormat(MyLogFormatUtil.GGetDataQuality));
data = new ArrayList<>();
item = new AttributeItemVo("Name", middleData.getQC_CHECK_QC_NAME());
data.add(item);
item = new AttributeItemVo("Pass/Fail", MyLogFormatUtil.getPass(middleData.getQC_CHECK_QC_RESULT()));
data.add(item);
item = new AttributeItemVo("Value", MyLogFormatUtil.getValuePoint(middleData.getQC_CHECK_QC_VALUE(), 3));
data.add(item);
item = new AttributeItemVo("Test", middleData.getQC_CHECK_QC_STANDARD());
data.add(item);
writes.addAll(MyLogFormatUtil.getBlockContext(infoMap));
// Spectrum Analysis Finished
writes.add(MyLogFormatUtil.getTitleFormat(MyLogFormatUtil.GTitleSpectrumEnd));
// todo Data store successfully .....
// todo Instance status successfully set to: P.....
// todo Error info: data has NULL INF.....
// Sample Analyse Successfully at %1
String analyseEnd = MyLogFormatUtil.analyseResultsEnd.replace("%1", "date");
writes.add(MyLogFormatUtil.getTitleFormat(analyseEnd));
}
private static List<String> getAttribute(List<String> channel, String channelUnit, List<String> energys, String energyUnit,
List<String> errors, String errorUnit){
List<List<String>> data = new ArrayList<>();
List<String> result = new ArrayList<>();
// 初始化数据
for (int i = 0; i < channel.size(); i++) {
data.add(Arrays.asList(
"Channel : " + channel.get(i) + " " + channelUnit,
"Energy : " + energys.get(i) + " " + energyUnit,
"Error : " + errors.get(i) + " " + errorUnit)
);
}
// 计算每列数据中最长的字符串长度
int[] columnWidths = new int[data.get(0).size()];
for (List<String> row : data) {
for (int i = 0; i < row.size(); i++) {
columnWidths[i] = Math.max(columnWidths[i], row.get(i).length());
}
}
// 构造格式化字符串
StringBuilder formatBuilder = new StringBuilder();
for (int i = 0; i < columnWidths.length; i++) {
formatBuilder.append("%-" + (columnWidths[i] + 4) + "s");
}
formatBuilder.append("\n");
String format = formatBuilder.toString();
// 格式化输出日志
for (List<String> row : data) {
String log = String.format(format, row.toArray());
result.add(log);
System.out.print(log);
}
return result;
}
public static void main(String[] args) {
List<String> channels = Arrays.asList("59.541", "88.034", "122.061", "165.857", "391.698", "661.657", "834.838", "898.036", "1115.540", "1173.230", "1332.490", "1836.050");
List<String> energies = Arrays.asList("0.168", "0.176", "0.174", "0.155", "0.092", "0.059", "0.051", "0.040", "0.040", "0.031", "0.028", "0.022");
List<String> errors = Arrays.asList("0.003", "0.004", "0.003", "0.003", "0.002", "0.001", "0.001", "0.001", "0.001", "0.001", "0.001", "0.000");
getAttribute(channels,"",energies,"",errors,"");
}
public void saveAnalysis(GStoreMiddleProcessData middleData,Integer sampleId){
GardsAnalyses analysis = toAnalysis(middleData);
analysis.setSampleId(sampleId);
@ -303,12 +681,14 @@ public class Sample_G_Analysis {
}
}
public void getNuclideLinesG(){
public Map<String, NuclideLines> getNuclideLinesG() {
Object nuclideLibs = redisUtil.get(RedisConstant.NUCLIDE_LINES_LIB + "G");
return Objects.isNull(nuclideLibs) ? Maps.newHashMap() : (Map<String, NuclideLines>) nuclideLibs;
}
public void getNuclideLinesP(){
public Map<String, NuclideLines> getNuclideLinesP(){
Object nuclideLibs = redisUtil.get(RedisConstant.NUCLIDE_LINES_LIB + "P");
return Objects.isNull(nuclideLibs) ? Maps.newHashMap() : (Map<String, NuclideLines>) nuclideLibs;
}
public Map<String,String> fieldMap(){
@ -407,7 +787,7 @@ public class Sample_G_Analysis {
setting.setECutAnalysis_High(Double.POSITIVE_INFINITY);
}
String systemType = energySpectrumStruct.system_type;
if (StrUtil.equals(systemType,"P")){
if (StrUtil.equals(systemType, SpectrumSystemType.P.name())){
String ECutAnalysis_Low = mapSetting.get(Setting.ECUTANALYSIS_LOW_P);
if (StrUtil.isNotBlank(ECutAnalysis_Low))
setting.setECutAnalysis_Low(Double.parseDouble(ECutAnalysis_Low));
@ -524,4 +904,43 @@ public class Sample_G_Analysis {
return new ArrayList<>();
}
}
private void setPHDFile(PHDFile phdFile, EnergySpectrumStruct spectrumStruct) {
HeaderBlock headerBlock = new HeaderBlock();
BeanUtil.copyProperties(spectrumStruct, headerBlock);
phdFile.setHeader(headerBlock);
CollectionBlock collectionBlock = new CollectionBlock();
BeanUtil.copyProperties(spectrumStruct, collectionBlock);
phdFile.setCollect(collectionBlock);
// MessageInfo
MessageInfo messageInfo = new MessageInfo();
BeanUtil.copyProperties(spectrumStruct, messageInfo);
phdFile.setMsgInfo(messageInfo);
// AcquisitionBlock
AcquisitionBlock acquisitionBlock = new AcquisitionBlock();
BeanUtil.copyProperties(spectrumStruct, acquisitionBlock);
phdFile.setAcq(acquisitionBlock);
// GSpectrumBlock
GSpectrumBlock gSpectrumBlock = new GSpectrumBlock();
BeanUtil.copyProperties(spectrumStruct, gSpectrumBlock);
phdFile.setSpec(gSpectrumBlock);
// ProcessingBlock
ProcessingBlock processingBlock = new ProcessingBlock();
BeanUtil.copyProperties(spectrumStruct, processingBlock);
phdFile.setProcess(processingBlock);
// CalibrationBlock
CalibrationBlock calibrationBlock = new CalibrationBlock();
BeanUtil.copyProperties(spectrumStruct, calibrationBlock);
phdFile.setCalibration(calibrationBlock);
// SampleBlock
SampleBlock sampleBlock = new SampleBlock();
BeanUtil.copyProperties(spectrumStruct, sampleBlock);
phdFile.setSampleBlock(sampleBlock);
// CertificateBlock
CertificateBlock certificateBlock = new CertificateBlock();
BeanUtil.copyProperties(spectrumStruct, certificateBlock);
phdFile.setCertificate(certificateBlock);
}
}