Merge remote-tracking branch 'origin/station' into station
This commit is contained in:
commit
24910d27f4
|
@ -26,6 +26,11 @@ public class SpectrumPathProperties implements Serializable {
|
|||
*/
|
||||
private String rootPath;
|
||||
|
||||
/**
|
||||
* 日志文件存储路径
|
||||
*/
|
||||
private String logPath;
|
||||
|
||||
/**
|
||||
* 能谱文件存储路径以能谱系统类型/能谱类型为key,以存储路径为value
|
||||
*/
|
||||
|
|
|
@ -0,0 +1,44 @@
|
|||
package org.jeecg.modules.base.entity.configuration;
|
||||
|
||||
import com.baomidou.mybatisplus.annotation.TableField;
|
||||
import com.baomidou.mybatisplus.annotation.TableName;
|
||||
import com.fasterxml.jackson.annotation.JsonFormat;
|
||||
import lombok.Data;
|
||||
import org.springframework.format.annotation.DateTimeFormat;
|
||||
|
||||
import java.io.Serializable;
|
||||
import java.util.Date;
|
||||
|
||||
@Data
|
||||
@TableName(value = "CONFIGURATION.GARDS_NUCL_LINES_LIB")
|
||||
public class GardsNuclLinesLib implements Serializable {
|
||||
@TableField(value = "NAME")
|
||||
private String name;
|
||||
|
||||
@TableField(value = "ENERGY")
|
||||
private Integer energy;
|
||||
|
||||
@TableField(value = "ENERGY_UNCERT")
|
||||
private Integer energyUncert;
|
||||
|
||||
@TableField(value = "YIELD")
|
||||
private Integer yield;
|
||||
|
||||
@TableField(value = "YIELD_UNCERT")
|
||||
private Integer yieldUncert;
|
||||
|
||||
@TableField(value = "KEY_FLAG")
|
||||
private Integer keyFlag;
|
||||
|
||||
@TableField(value = "NUCLIDE_ID")
|
||||
private Integer nuclideId;
|
||||
|
||||
@TableField(value = "FULLNAME")
|
||||
private String fullName;
|
||||
|
||||
@TableField(value = "MODDATE")
|
||||
@DateTimeFormat(pattern = "yyyy-MM-dd HH:mm:ss")
|
||||
@JsonFormat(pattern = "yyyy-MM-dd HH:mm:ss", timezone = "GMT+8")
|
||||
private Date MODDATE;
|
||||
|
||||
}
|
|
@ -29,6 +29,12 @@
|
|||
<artifactId>ejml-simple</artifactId>
|
||||
<version>0.39</version>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>com.google.guava</groupId>
|
||||
<artifactId>guava</artifactId>
|
||||
<version>23.0</version>
|
||||
</dependency>
|
||||
</dependencies>
|
||||
|
||||
</project>
|
33
jeecg-module-spectrum-analysis/src/main/java/org/jeecg/common/cache/LocalCache.java
vendored
Normal file
33
jeecg-module-spectrum-analysis/src/main/java/org/jeecg/common/cache/LocalCache.java
vendored
Normal file
|
@ -0,0 +1,33 @@
|
|||
package org.jeecg.common.cache;
|
||||
|
||||
import com.google.common.cache.Cache;
|
||||
import com.google.common.cache.CacheBuilder;
|
||||
import org.jeecg.modules.entity.vo.PHDFile;
|
||||
import org.springframework.stereotype.Component;
|
||||
|
||||
import java.util.concurrent.TimeUnit;
|
||||
|
||||
@Component
|
||||
public class LocalCache {
|
||||
private Cache<String,PHDFile> phdCache = CacheBuilder.newBuilder()
|
||||
//设置缓存初始大小,应该合理设置,后续会扩容
|
||||
.initialCapacity(10)
|
||||
//最大值
|
||||
.maximumSize(100)
|
||||
//并发数设置
|
||||
.concurrencyLevel(5)
|
||||
//缓存过期时间,写入后5秒钟过期
|
||||
.expireAfterWrite(5, TimeUnit.HOURS)
|
||||
//统计缓存命中率
|
||||
.recordStats()
|
||||
.build();
|
||||
|
||||
public Cache<String, PHDFile> getPHDCache() {
|
||||
return phdCache;
|
||||
}
|
||||
|
||||
public void setPHDCache(Cache<String, PHDFile> phdCache) {
|
||||
this.phdCache = phdCache;
|
||||
}
|
||||
|
||||
}
|
File diff suppressed because it is too large
Load Diff
|
@ -1,16 +1,17 @@
|
|||
package org.jeecg.modules.controller;
|
||||
|
||||
import org.jeecg.common.api.vo.Result;
|
||||
import org.jeecg.modules.entity.vo.CoeffData;
|
||||
import org.jeecg.modules.entity.vo.InputData;
|
||||
import org.jeecg.modules.service.IDataService;
|
||||
import org.jeecg.modules.service.IGammaService;
|
||||
import org.jeecg.modules.service.ISpectrumAnalysisService;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.web.bind.annotation.GetMapping;
|
||||
import org.springframework.web.bind.annotation.RequestMapping;
|
||||
import org.springframework.web.bind.annotation.RequestParam;
|
||||
import org.springframework.web.bind.annotation.RestController;
|
||||
import org.springframework.web.bind.annotation.*;
|
||||
|
||||
import javax.servlet.http.HttpServletRequest;
|
||||
import javax.servlet.http.HttpServletResponse;
|
||||
import java.util.List;
|
||||
|
||||
@RestController
|
||||
@RequestMapping("gamma")
|
||||
|
@ -36,10 +37,113 @@ public class GammaController {
|
|||
return gammaService.configure(sampleId);
|
||||
}
|
||||
|
||||
@GetMapping("Reprocessing")
|
||||
@PostMapping("Reprocessing")
|
||||
public Result Reprocessing(@RequestParam Integer sampleId){
|
||||
return gammaService.Reprocessing(sampleId);
|
||||
}
|
||||
|
||||
@GetMapping("InteractiveTool")
|
||||
public Result InteractiveTool(@RequestParam Integer sampleId){
|
||||
return gammaService.InteractiveTool(sampleId);
|
||||
}
|
||||
|
||||
@GetMapping("ZeroTime")
|
||||
public Result ZeroTime(@RequestParam Integer sampleId){
|
||||
return gammaService.ZeroTime(sampleId);
|
||||
}
|
||||
|
||||
@GetMapping("Korsum")
|
||||
public Result Korsum(){
|
||||
return gammaService.Korsum();
|
||||
}
|
||||
|
||||
@PostMapping("KorSumInput")
|
||||
public Result KorSumInput(@RequestBody CoeffData coeffData){
|
||||
Result result = new Result();
|
||||
List<InputData> input = gammaService.KorSumInput(coeffData);
|
||||
result.setSuccess(true);
|
||||
result.setResult(input);
|
||||
return result;
|
||||
}
|
||||
|
||||
@GetMapping("energyCalibration")
|
||||
public Result energyCalibration(@RequestParam Integer sampleId){
|
||||
return gammaService.energyCalibration(sampleId);
|
||||
}
|
||||
|
||||
@GetMapping("resolutionCalibration")
|
||||
public Result resolutionCalibration(@RequestParam Integer sampleId){
|
||||
return gammaService.resolutionCalibration(sampleId);
|
||||
}
|
||||
|
||||
@GetMapping("EfficiencyCalibration")
|
||||
public Result EfficiencyCalibration(@RequestParam Integer sampleId){
|
||||
return gammaService.EfficiencyCalibration(sampleId);
|
||||
}
|
||||
|
||||
@GetMapping("NuclideLibrary")
|
||||
public Result NuclideLibrary(String editEnergy, double err, String libraryName, String nuclideName, HttpServletRequest request){
|
||||
return gammaService.NuclideLibrary(editEnergy, err, libraryName, nuclideName, request);
|
||||
}
|
||||
|
||||
@GetMapping("configUserLibrary")
|
||||
public Result configUserLibrary(HttpServletRequest request){
|
||||
return gammaService.configUserLibrary(request);
|
||||
}
|
||||
|
||||
@GetMapping("viewComment")
|
||||
public Result viewComment(@RequestParam Integer sampleId){
|
||||
return gammaService.viewComment(sampleId);
|
||||
}
|
||||
|
||||
@GetMapping("peakInformation")
|
||||
public Result peakInformation(@RequestParam Integer sampleId){
|
||||
return gammaService.peakInformation(sampleId);
|
||||
}
|
||||
|
||||
@GetMapping("viewARR")
|
||||
public Result viewARR(Integer sampleId, HttpServletResponse response){
|
||||
return gammaService.viewARR(sampleId, response);
|
||||
}
|
||||
|
||||
@GetMapping("viewRRR")
|
||||
public Result viewRRR(Integer sampleId){
|
||||
return gammaService.viewRRR(sampleId);
|
||||
}
|
||||
|
||||
@GetMapping("radionuclideActivity")
|
||||
public Result radionuclideActivity(Integer sampleId){
|
||||
return gammaService.radionuclideActivity(sampleId);
|
||||
}
|
||||
|
||||
@GetMapping("Spectrum")
|
||||
public Result Spectrum(Integer sampleId){
|
||||
return gammaService.Spectrum(sampleId);
|
||||
}
|
||||
|
||||
@GetMapping("sampleInformation")
|
||||
public Result sampleInformation(Integer sampleId){
|
||||
return gammaService.sampleInformation(sampleId);
|
||||
}
|
||||
|
||||
@GetMapping("viewQCResult")
|
||||
public Result viewQCResult(Integer sampleId){
|
||||
return gammaService.viewQCResult(sampleId);
|
||||
}
|
||||
|
||||
@GetMapping("viewRLR")
|
||||
public Result viewRLR(Integer sampleId){
|
||||
return gammaService.viewRLR(sampleId);
|
||||
}
|
||||
|
||||
@GetMapping("viewAutomaticAnalysisLog")
|
||||
public Result viewAutomaticAnalysisLog(Integer sampleId, HttpServletResponse response){
|
||||
return gammaService.viewAutomaticAnalysisLog(sampleId, response);
|
||||
}
|
||||
|
||||
@GetMapping("viewGammaviewerLog")
|
||||
public Result viewGammaviewerLog(Integer sampleId){
|
||||
return gammaService.viewGammaviewerLog(sampleId);
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -73,7 +73,7 @@ public class GardsNuclLinesIdedSpectrum implements Serializable {
|
|||
/**
|
||||
* 该峰处的探测效率不确定度
|
||||
*/
|
||||
private Double uncEffic;
|
||||
private Double unEffic;
|
||||
|
||||
/**
|
||||
* 利用该峰计算得到的最小可探测活度
|
||||
|
|
|
@ -0,0 +1,37 @@
|
|||
package org.jeecg.modules.entity.vo;
|
||||
|
||||
import lombok.Data;
|
||||
|
||||
import java.io.Serializable;
|
||||
import java.util.List;
|
||||
|
||||
@Data
|
||||
public class CoeffData implements Serializable {
|
||||
|
||||
public Double totalEf1;
|
||||
|
||||
public Double totalEf2;
|
||||
|
||||
public Double totalEf3;
|
||||
|
||||
public Double totalEf4;
|
||||
|
||||
public Double totalEf5;
|
||||
|
||||
public Double totalEf6;
|
||||
|
||||
public Double Effciency1;
|
||||
|
||||
public Double Effciency2;
|
||||
|
||||
public Double Effciency3;
|
||||
|
||||
public Double Effciency4;
|
||||
|
||||
public Double Effciency5;
|
||||
|
||||
public Double Effciency6;
|
||||
|
||||
public List<Double> energys;
|
||||
|
||||
}
|
|
@ -0,0 +1,9 @@
|
|||
package org.jeecg.modules.entity.vo;
|
||||
|
||||
import lombok.Data;
|
||||
|
||||
import java.io.Serializable;
|
||||
|
||||
@Data
|
||||
public class ConfigureData implements Serializable {
|
||||
}
|
|
@ -0,0 +1,18 @@
|
|||
package org.jeecg.modules.entity.vo;
|
||||
|
||||
import lombok.Data;
|
||||
|
||||
import java.io.Serializable;
|
||||
|
||||
@Data
|
||||
public class EfficiencyData implements Serializable {
|
||||
|
||||
private String Energy;
|
||||
|
||||
private String Efficiency;
|
||||
|
||||
private String Fit;
|
||||
|
||||
private String Delta;
|
||||
|
||||
}
|
|
@ -0,0 +1,18 @@
|
|||
package org.jeecg.modules.entity.vo;
|
||||
|
||||
import lombok.Data;
|
||||
|
||||
import java.io.Serializable;
|
||||
|
||||
@Data
|
||||
public class EnergyData implements Serializable {
|
||||
|
||||
public String channel;
|
||||
|
||||
public String Energy;
|
||||
|
||||
public String Fit;
|
||||
|
||||
public String Delta;
|
||||
|
||||
}
|
|
@ -0,0 +1,423 @@
|
|||
package org.jeecg.modules.entity.vo;
|
||||
|
||||
import lombok.Data;
|
||||
|
||||
import java.io.Serializable;
|
||||
import java.util.HashMap;
|
||||
import java.util.LinkedList;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
@Data
|
||||
public class GStoreMiddleProcessData implements Serializable {
|
||||
|
||||
public boolean dbWriteFlag; //数据库存储标示 例如
|
||||
public String dbWriteStatusFlag; //数据写入状态标示 例如 P U F 等
|
||||
public String ErrorInfo; //错误信息
|
||||
//gards_analyses数据表数据
|
||||
public String IdAnalysis; //分析ID号
|
||||
public String sample_id; //样品ID号
|
||||
public String analyses_analysisBegin; //分析开始时
|
||||
public String analyses_analysisEnd; //分析的结束时间
|
||||
public String analyses_type; //Reviewed:交互 Auto:自动
|
||||
public String analyses_software; //使用的软件
|
||||
public String analyses_swVersion; //软件版本号
|
||||
public String analyses_analyst; //分析员名称
|
||||
public String analyses_baseline_filePath; //基线数据路径
|
||||
public String analyses_lc_filePath; //lc数据基线路径
|
||||
public String analyses_scac_filePath; //scac数据路径
|
||||
public String analyses_save_filePath; //原始文件存储文件名
|
||||
public String analyses_baselineMethod; //基线计数方
|
||||
public String analyses_peaksMethod; //寻峰方法描
|
||||
public String analyses_nuclideMethod; //核素识别方
|
||||
public String analyses_uncCalcMethod; //不确定度计
|
||||
public String analyses_lcMethod; //Lc计算方法
|
||||
public String analyses_LogPath; //日志路径
|
||||
public String analyses_ReportPath; //报告路径
|
||||
|
||||
public String analyses_baseline_absolute_filePath; //基线数据绝对路径
|
||||
public String analyses_lc_absolute_filePath; //lc数据基线绝对路径
|
||||
public String analyses_scac_absolute_filePath; //scac数据绝对路径
|
||||
public String analyses_save_absolute_filePath; //原始文件存储绝对文件名
|
||||
public String analyses_absolute_LogPath; //日志绝对路径
|
||||
public String analyses_absolute_ReportPath; //报告绝对路径
|
||||
|
||||
public double analyses_searchStartChannel; //寻峰起始道
|
||||
public double analyses_searchEndChannel; //寻峰结束道
|
||||
public double analyses_searchThreshold; //寻峰阈值
|
||||
public double analyses_numberOfPeaks; //峰数目
|
||||
public double analyses_totalCounts; //总计数 未知
|
||||
public double analyses_category; //分级结果
|
||||
public String analyses_comments; //注释
|
||||
|
||||
//gards_ calibration_pairs数据表
|
||||
public String calibration_pairs_sample_type; //G:gamma探测器的数据,#g_;B:beta探测器的数据,#b_
|
||||
|
||||
public String calibration_pairs_E_Caltype; //energy:能量刻度
|
||||
public String calibration_pairs_E_Input; //PHD:代表数据来自PHD文件;External:代表数据来自外部,如刻度工具、其它文件等
|
||||
public List<String> calibration_pairs_E_idCalPoint; //刻度点ID号
|
||||
public List<String> calibration_pairs_E_xValue; //
|
||||
public List<String> calibration_pairs_E_yValue; //
|
||||
public List<String> calibration_pairs_E_uncYValue; //
|
||||
|
||||
public String calibration_pairs_EF_Caltype; //Efficiency:效率刻度;
|
||||
public String calibration_pairs_EF_Input; //PHD:代表数据来自PHD文件;External:代表数据来自外部,如刻度工具、其它文件等
|
||||
public List<String> calibration_pairs_EF_idCalPoint; //刻度点ID号
|
||||
public List<String> calibration_pairs_EF_xValue; //
|
||||
public List<String> calibration_pairs_EF_yValue; //
|
||||
public List<String> calibration_pairs_EF_uncYValue; //
|
||||
|
||||
public String calibration_pairs_R_Caltype; //Resolution:分辨率刻度;
|
||||
public String calibration_pairs_R_Input; //PHD:代表数据来自PHD文件;External:代表数据来自外部,如刻度工具、其它文件等
|
||||
public List<String> calibration_pairs_R_idCalPoint; //刻度点ID号
|
||||
public List<String> calibration_pairs_R_xValue; //
|
||||
public List<String> calibration_pairs_R_yValue; //
|
||||
public List<String> calibration_pairs_R_uncYValue; //
|
||||
|
||||
public String calibration_pairs_T_Caltype; //TotalEfficiency:总效率刻度;
|
||||
public String calibration_pairs_T_Input; //PHD:代表数据来自PHD文件;External:代表数据来自外部,如刻度工具、其它文件等
|
||||
public List<String> calibration_pairs_T_idCalPoint; //刻度点ID号
|
||||
public List<String> calibration_pairs_T_xValue; //
|
||||
public List<String> calibration_pairs_T_yValue; //
|
||||
public List<String> calibration_pairs_T_uncYValue; //
|
||||
|
||||
//原始谱 数据
|
||||
public String calibration_pairs_S_E_Caltype; //energy:能量刻度
|
||||
public String calibration_pairs_S_E_Input; //PHD:代表数据来自PHD文件;External:代表数据来自外部,如刻度工具、其它文件等
|
||||
public List<String> calibration_pairs_S_E_idCalPoint; //刻度点ID号
|
||||
public List<String> calibration_pairs_S_E_xValue; //
|
||||
public List<String> calibration_pairs_S_E_yValue; //
|
||||
public List<String> calibration_pairs_S_E_uncYValue; //
|
||||
|
||||
public String calibration_pairs_S_EF_Caltype; //Efficiency:效率刻度;
|
||||
public String calibration_pairs_S_EF_Input; //PHD:代表数据来自PHD文件;External:代表数据来自外部,如刻度工具、其它文件等
|
||||
public List<String> calibration_pairs_S_EF_idCalPoint; //刻度点ID号
|
||||
public List<String> calibration_pairs_S_EF_xValue; //
|
||||
public List<String> calibration_pairs_S_EF_yValue; //
|
||||
public List<String> calibration_pairs_S_EF_uncYValue; //
|
||||
|
||||
public String calibration_pairs_S_R_Caltype; //Resolution:分辨率刻度;
|
||||
public String calibration_pairs_S_R_Input; //PHD:代表数据来自PHD文件;External:代表数据来自外部,如刻度工具、其它文件等
|
||||
public List<String> calibration_pairs_S_R_idCalPoint; //刻度点ID号
|
||||
public List<String> calibration_pairs_S_R_xValue; //
|
||||
public List<String> calibration_pairs_S_R_yValue; //
|
||||
public List<String> calibration_pairs_S_R_uncYValue; //
|
||||
|
||||
public String calibration_pairs_S_T_Caltype; //TotalEfficiency:总效率刻度;
|
||||
public String calibration_pairs_S_T_Input; //PHD:代表数据来自PHD文件;External:代表数据来自外部,如刻度工具、其它文件等
|
||||
public List<String> calibration_pairs_S_T_idCalPoint; //刻度点ID号
|
||||
public List<String> calibration_pairs_S_T_xValue; //
|
||||
public List<String> calibration_pairs_S_T_yValue; //
|
||||
public List<String> calibration_pairs_S_T_uncYValue; //
|
||||
|
||||
//gards_ calibration 数据表
|
||||
public String calibration_sample_type; //G:gamma探测器的数据,#g_;B:beta探测器的数据,#b_
|
||||
public String calibration_E_Caltype; //energy:能量刻度
|
||||
public double calibration_E_function; //拟合方程ID号(统一定义)
|
||||
public String calibration_E_functionTypeDef; //函数类型描述
|
||||
public String calibration_E_functionDef; //拟合方程描述
|
||||
public double calibration_E_startOfRange; //拟合的起始值
|
||||
public double calibration_E_endOfRange; //拟合的结束值
|
||||
public String calibration_E_coeff_string; //拟合系数
|
||||
public String calibration_E_uncoeff_string; //拟合系数1不确定度
|
||||
|
||||
public String calibration_EF_Caltype; //Efficiency:效率刻度;
|
||||
public double calibration_EF_function; //拟合方程ID号(统一定义)
|
||||
public String calibration_EF_functionTypeDef; //函数类型描述
|
||||
public String calibration_EF_functionDef; //拟合方程描述
|
||||
public double calibration_EF_startOfRange; //拟合的起始值
|
||||
public double calibration_EF_endOfRange; //拟合的结束值
|
||||
public String calibration_EF_coeff_string; //拟合系数
|
||||
public String calibration_EF_uncoeff_string; //拟合系数1不确定度
|
||||
|
||||
public String calibration_R_Caltype; //Resolution:分辨率刻度;
|
||||
public double calibration_R_function; //拟合方程ID号(统一定义)
|
||||
public String calibration_R_functionTypeDef; //函数类型描述
|
||||
public String calibration_R_functionDef; //拟合方程描述
|
||||
public double calibration_R_startOfRange; //拟合的起始值
|
||||
public double calibration_R_endOfRange; //拟合的结束值
|
||||
public String calibration_R_coeff_string; //拟合系数
|
||||
public String calibration_R_uncoeff_string; //拟合系数1不确定度
|
||||
|
||||
public String calibration_T_Caltype; //TotalEfficiency:总效率刻度;
|
||||
public double calibration_T_function; //拟合方程ID号(统一定义)
|
||||
public String calibration_T_functionTypeDef; //函数类型描述
|
||||
public String calibration_T_functionDef; //拟合方程描述
|
||||
public double calibration_T_startOfRange; //拟合的起始值
|
||||
public double calibration_T_endOfRange; //拟合的结束值
|
||||
public String calibration_T_coeff_string; //拟合系数
|
||||
public String calibration_T_uncoeff_string; //拟合系数1不确定度
|
||||
|
||||
//gards_ peaks数据表
|
||||
public List<String> peaks_idPeak; //峰序号
|
||||
public List<String> peaks_peakCentroid; //峰中心道(道址)
|
||||
public List<String> peaks_uncpeakCentroid; //峰中心道不确定度(道址)
|
||||
public List<String> peaks_Energy; //峰中心道能量(keV)
|
||||
public List<String> peaks_uncEnergy; //峰中心道能量不确定度(keV)
|
||||
public List<String> peaks_Area; //峰面积(计数)。已扣除基线面积,但未扣除空白样品计数和探测器本底计数
|
||||
public List<String> peaks_areaErr; //峰面积不确定度(以计数为单位)
|
||||
public List<String> peaks_netCountRate; //峰的净计数率(1/s)=峰面积/活时间 未知
|
||||
public List<String> peaks_uncNetCountRate; //峰的净计数率的不确定度(1/s) 未知
|
||||
public List<String> peaks_Efficiency; //测量系统在峰能量处的绝对效率
|
||||
public List<String> peaks_Uncefficiency; //测量系统在峰能量处的绝对效率的不确定度
|
||||
public List<String> peaks_Fwhm; //峰的半高宽(keV)
|
||||
public List<String> peaks_Fwhmc; //峰的半高宽(Channel)
|
||||
public List<String> peaks_Significance; //峰的灵敏度
|
||||
public List<String> peaks_Sensitivity; //重峰序号
|
||||
public List<String> peaks_multiIndex; //峰的感兴趣区的起始道left
|
||||
public List<String> peaks_ROIstart; //峰的感兴趣区的结束道right
|
||||
public List<String> peaks_ROIend; //峰序号
|
||||
public List<String> peaks_tail; //
|
||||
public List<String> peaks_tailAlpha; //
|
||||
public List<String> peaks_upperTail; //
|
||||
public List<String> peaks_upperTailAlpha; //
|
||||
public List<String> peaks_BWWidthChan; //
|
||||
public List<String> peaks_recoilBetaChan;
|
||||
public List<String> peaks_recoilDeltaChan; //
|
||||
public List<String> peaks_stepRatio; //
|
||||
public List<String> peaks_backgroundArea; //在峰区域下的基线面积(计数)
|
||||
public List<String> peaks_meanBackCount; //基线面积/道数(计数)
|
||||
public List<String> peaks_Lc; //峰的可探测线Lc
|
||||
public List<String> peaks_Ld; //峰的可探测线Ld
|
||||
public List<String> peaks_Nuclide_name; //文件名称
|
||||
public List<String> peaks_comments;
|
||||
// gards_ nucl_lines_ided数据表
|
||||
private Map<String,GStoreMiddleProcessDataNuclLinesIded> nucl_lines_ided_data;
|
||||
//gards_ nucl_ided数据表
|
||||
public List<String> nucl_ided_Nuclidename; // 核素名称
|
||||
public List<String> nucl_ided_Type; //核素类型 未知
|
||||
public List<String> nucl_ided_Halflife; //核素半衰期
|
||||
public List<String> nucl_ided_ave_activ; //平均活度值NULL
|
||||
public List<String> nucl_ided_ave_activ_err; //平均活度值不确定度NULL
|
||||
public List<String> nucl_ided_activ_key; //主射线活度值 未知
|
||||
public List<String> nucl_ided_activ_key_err; //主射线活度值不确定度
|
||||
public List<String> nucl_ided_mda; //核素的最小可探测活度
|
||||
public List<String> nucl_ided_mda_err; //核素的最小可探测活度不确定度
|
||||
public List<String> nucl_ided_nid_flag; //核素识别标志未知
|
||||
public List<String> nucl_ided_csc_ratio; //符合相加校正因子(无设为1)
|
||||
public List<String> nucl_ided_csc_ratio_err; //符合相加校正因子不确定度(无设为0)
|
||||
public List<String> nucl_ided_csc_mod_flag; //活度是否经过符合相加校正0
|
||||
public List<String> nucl_ided_MDC;
|
||||
public List<String> nucl_ided_Concentration;
|
||||
public List<String> nucl_ided_Key_Energy; //
|
||||
public List<String> nucl_ided_Key_Yield; //
|
||||
|
||||
//GARDS_QC_CHECK
|
||||
public List<String> QC_CHECK_QC_NAME;
|
||||
public List<String> QC_CHECK_QC_VALUE;
|
||||
public List<String> QC_CHECK_QC_STANDARD;
|
||||
public List<String> QC_CHECK_QC_RESULT;
|
||||
|
||||
//sample information
|
||||
public String sample_collection_start;
|
||||
public String sample_collection_stop;
|
||||
public String sample_time;
|
||||
public String sample_quantity;
|
||||
public String sample_decay_time;
|
||||
public String sample_acquisiton_start;
|
||||
public String sample_acquistion_stop;
|
||||
|
||||
public String sample_acquistion_time;
|
||||
public String sample_Geometry;
|
||||
public String sample_Avg_Flow_Rate;
|
||||
public String sample_stationID; //
|
||||
public String sample_detectID; //
|
||||
public String sample_Type; //
|
||||
|
||||
public String Collection_Station_Comments;
|
||||
public String NDC_Analysis_General_Comments;
|
||||
|
||||
//SpecSetup
|
||||
public SpecSetup setting_specSetup;
|
||||
|
||||
public GStoreMiddleProcessData(){
|
||||
dbWriteFlag = false;
|
||||
dbWriteStatusFlag = "";
|
||||
ErrorInfo = "";
|
||||
IdAnalysis = "";
|
||||
sample_id = "";
|
||||
analyses_analysisBegin = "";
|
||||
analyses_analysisEnd = "";
|
||||
analyses_type = "";
|
||||
analyses_software = "";
|
||||
analyses_swVersion = "";
|
||||
analyses_analyst = "";
|
||||
analyses_baseline_filePath = "";
|
||||
analyses_lc_filePath = "";
|
||||
analyses_scac_filePath = "";
|
||||
analyses_save_filePath = "";
|
||||
analyses_baselineMethod = "";
|
||||
analyses_peaksMethod = "";
|
||||
analyses_nuclideMethod = "";
|
||||
analyses_uncCalcMethod = "";
|
||||
analyses_lcMethod = "";
|
||||
analyses_LogPath = "";
|
||||
analyses_ReportPath = "";
|
||||
analyses_baseline_absolute_filePath = "";
|
||||
analyses_lc_absolute_filePath = "";
|
||||
analyses_scac_absolute_filePath = "";
|
||||
analyses_save_absolute_filePath = "";
|
||||
analyses_absolute_LogPath = "";
|
||||
analyses_absolute_ReportPath = "";
|
||||
analyses_searchStartChannel = 0;
|
||||
analyses_searchEndChannel = 0;
|
||||
analyses_searchThreshold = 0;
|
||||
analyses_numberOfPeaks = 0;
|
||||
analyses_totalCounts = 0;
|
||||
analyses_category = 0;
|
||||
analyses_comments = "";
|
||||
calibration_pairs_sample_type = "";
|
||||
calibration_pairs_E_Caltype = "";
|
||||
calibration_pairs_E_Input = "";
|
||||
calibration_pairs_E_idCalPoint = new LinkedList<>();
|
||||
calibration_pairs_E_xValue = new LinkedList<>();
|
||||
calibration_pairs_E_yValue = new LinkedList<>();
|
||||
calibration_pairs_E_uncYValue = new LinkedList<>();
|
||||
calibration_pairs_EF_Caltype = "";
|
||||
calibration_pairs_EF_Input = "";
|
||||
calibration_pairs_EF_idCalPoint = new LinkedList<>();
|
||||
calibration_pairs_EF_xValue = new LinkedList<>();
|
||||
calibration_pairs_EF_yValue = new LinkedList<>();
|
||||
calibration_pairs_EF_uncYValue = new LinkedList<>();
|
||||
calibration_pairs_R_Caltype = "";
|
||||
calibration_pairs_R_Input = "";
|
||||
calibration_pairs_R_idCalPoint = new LinkedList<>();
|
||||
calibration_pairs_R_xValue = new LinkedList<>();
|
||||
calibration_pairs_R_yValue = new LinkedList<>();
|
||||
calibration_pairs_R_uncYValue = new LinkedList<>();
|
||||
calibration_pairs_T_Caltype = "";
|
||||
calibration_pairs_T_Input = "";
|
||||
calibration_pairs_T_idCalPoint = new LinkedList<>();
|
||||
calibration_pairs_T_xValue = new LinkedList<>();
|
||||
calibration_pairs_T_yValue = new LinkedList<>();
|
||||
calibration_pairs_T_uncYValue = new LinkedList<>();
|
||||
calibration_pairs_S_E_Caltype = "";
|
||||
calibration_pairs_S_E_Input = "";
|
||||
calibration_pairs_S_E_idCalPoint = new LinkedList<>();
|
||||
calibration_pairs_S_E_xValue = new LinkedList<>();
|
||||
calibration_pairs_S_E_yValue = new LinkedList<>();
|
||||
calibration_pairs_S_E_uncYValue = new LinkedList<>();
|
||||
calibration_pairs_S_EF_Caltype = "";
|
||||
calibration_pairs_S_EF_Input = "";
|
||||
calibration_pairs_S_EF_idCalPoint = new LinkedList<>();
|
||||
calibration_pairs_S_EF_xValue = new LinkedList<>();
|
||||
calibration_pairs_S_EF_yValue = new LinkedList<>();
|
||||
calibration_pairs_S_EF_uncYValue = new LinkedList<>();
|
||||
calibration_pairs_S_R_Caltype = "";
|
||||
calibration_pairs_S_R_Input = "";
|
||||
calibration_pairs_S_R_idCalPoint = new LinkedList<>();
|
||||
calibration_pairs_S_R_xValue = new LinkedList<>();
|
||||
calibration_pairs_S_R_yValue = new LinkedList<>();
|
||||
calibration_pairs_S_R_uncYValue = new LinkedList<>();
|
||||
calibration_pairs_S_T_Caltype = "";
|
||||
calibration_pairs_S_T_Input = "";
|
||||
calibration_pairs_S_T_idCalPoint = new LinkedList<>();
|
||||
calibration_pairs_S_T_xValue = new LinkedList<>();
|
||||
calibration_pairs_S_T_yValue = new LinkedList<>();
|
||||
calibration_pairs_S_T_uncYValue = new LinkedList<>();
|
||||
calibration_sample_type = "";
|
||||
calibration_E_Caltype = "";
|
||||
calibration_E_function = 0;
|
||||
calibration_E_functionTypeDef = "";
|
||||
calibration_E_functionDef = "";
|
||||
calibration_E_startOfRange = 0;
|
||||
calibration_E_endOfRange = 0;
|
||||
calibration_E_coeff_string = "";
|
||||
calibration_E_uncoeff_string = "";
|
||||
calibration_EF_Caltype = "";
|
||||
calibration_EF_function = 0;
|
||||
calibration_EF_functionTypeDef = "";
|
||||
calibration_EF_functionDef = "";
|
||||
calibration_EF_startOfRange = 0;
|
||||
calibration_EF_endOfRange = 0;
|
||||
calibration_EF_coeff_string = "";
|
||||
calibration_EF_uncoeff_string = "";
|
||||
calibration_R_Caltype = "";
|
||||
calibration_R_function = 0;
|
||||
calibration_R_functionTypeDef = "";
|
||||
calibration_R_functionDef = "";
|
||||
calibration_R_startOfRange = 0;
|
||||
calibration_R_endOfRange = 0;
|
||||
calibration_R_coeff_string = "";
|
||||
calibration_R_uncoeff_string = "";
|
||||
calibration_T_Caltype = "";
|
||||
calibration_T_function = 0;
|
||||
calibration_T_functionTypeDef = "";
|
||||
calibration_T_functionDef = "";
|
||||
calibration_T_startOfRange = 0;
|
||||
calibration_T_endOfRange = 0;
|
||||
calibration_T_coeff_string = "";
|
||||
calibration_T_uncoeff_string = "";
|
||||
peaks_idPeak = new LinkedList<>();
|
||||
peaks_peakCentroid = new LinkedList<>();
|
||||
peaks_uncpeakCentroid = new LinkedList<>();
|
||||
peaks_Energy = new LinkedList<>();
|
||||
peaks_uncEnergy = new LinkedList<>();
|
||||
peaks_Area = new LinkedList<>();
|
||||
peaks_areaErr = new LinkedList<>();
|
||||
peaks_netCountRate = new LinkedList<>();
|
||||
peaks_uncNetCountRate = new LinkedList<>();
|
||||
peaks_Efficiency = new LinkedList<>();
|
||||
peaks_Uncefficiency = new LinkedList<>();
|
||||
peaks_Fwhm = new LinkedList<>();
|
||||
peaks_Fwhmc = new LinkedList<>();
|
||||
peaks_Significance = new LinkedList<>();
|
||||
peaks_Sensitivity = new LinkedList<>();
|
||||
peaks_multiIndex = new LinkedList<>();
|
||||
peaks_ROIstart = new LinkedList<>();
|
||||
peaks_ROIend = new LinkedList<>();
|
||||
peaks_tail = new LinkedList<>();
|
||||
peaks_tailAlpha = new LinkedList<>();
|
||||
peaks_upperTail = new LinkedList<>();
|
||||
peaks_upperTailAlpha = new LinkedList<>();
|
||||
peaks_BWWidthChan = new LinkedList<>();
|
||||
peaks_recoilBetaChan = new LinkedList<>();
|
||||
peaks_recoilDeltaChan = new LinkedList<>();
|
||||
peaks_stepRatio = new LinkedList<>();
|
||||
peaks_backgroundArea = new LinkedList<>();
|
||||
peaks_meanBackCount = new LinkedList<>();
|
||||
peaks_Lc = new LinkedList<>();
|
||||
peaks_Ld = new LinkedList<>();
|
||||
peaks_Nuclide_name = new LinkedList<>();
|
||||
peaks_comments = new LinkedList<>();
|
||||
nucl_lines_ided_data = new HashMap<>();
|
||||
nucl_ided_Nuclidename = new LinkedList<>();
|
||||
nucl_ided_Type = new LinkedList<>();
|
||||
nucl_ided_Halflife = new LinkedList<>();
|
||||
nucl_ided_ave_activ = new LinkedList<>();
|
||||
nucl_ided_ave_activ_err = new LinkedList<>();
|
||||
nucl_ided_activ_key = new LinkedList<>();
|
||||
nucl_ided_activ_key_err = new LinkedList<>();
|
||||
nucl_ided_mda = new LinkedList<>();
|
||||
nucl_ided_mda_err = new LinkedList<>();
|
||||
nucl_ided_nid_flag = new LinkedList<>();
|
||||
nucl_ided_csc_ratio = new LinkedList<>();
|
||||
nucl_ided_csc_ratio_err = new LinkedList<>();
|
||||
nucl_ided_csc_mod_flag = new LinkedList<>();
|
||||
nucl_ided_MDC = new LinkedList<>();
|
||||
nucl_ided_Concentration = new LinkedList<>();
|
||||
nucl_ided_Key_Energy = new LinkedList<>();
|
||||
nucl_ided_Key_Yield = new LinkedList<>();
|
||||
QC_CHECK_QC_NAME = new LinkedList<>();
|
||||
QC_CHECK_QC_VALUE = new LinkedList<>();
|
||||
QC_CHECK_QC_STANDARD = new LinkedList<>();
|
||||
QC_CHECK_QC_RESULT = new LinkedList<>();
|
||||
sample_collection_start = "";
|
||||
sample_collection_stop = "";
|
||||
sample_time = "";
|
||||
sample_quantity = "";
|
||||
sample_decay_time = "";
|
||||
sample_acquisiton_start = "";
|
||||
sample_acquistion_stop = "";
|
||||
sample_acquistion_time = "";
|
||||
sample_Geometry = "";
|
||||
sample_Avg_Flow_Rate = "";
|
||||
sample_stationID = "";
|
||||
sample_detectID = "";
|
||||
sample_Type = "";
|
||||
Collection_Station_Comments = "";
|
||||
NDC_Analysis_General_Comments = "";
|
||||
setting_specSetup = new SpecSetup();
|
||||
}
|
||||
|
||||
}
|
|
@ -0,0 +1,51 @@
|
|||
package org.jeecg.modules.entity.vo;
|
||||
|
||||
import lombok.Data;
|
||||
|
||||
import java.io.Serializable;
|
||||
import java.util.LinkedList;
|
||||
import java.util.List;
|
||||
|
||||
@Data
|
||||
public class GStoreMiddleProcessDataNuclLinesIded implements Serializable {
|
||||
|
||||
// gards_ nucl_lines_ided数据表
|
||||
public List<String> nuclideFullname; //FullName
|
||||
public List<String> idPeak; //峰序号
|
||||
public List<String> Energy; //核素库中核素对应的能量(keV)
|
||||
public List<String> uncEnergy; //核素库中核素对应峰的能量不确定度(keV)
|
||||
public List<String> Abundance; //核素库中核素对应峰的发射几率
|
||||
public List<String> uncAbundance; //核素库中核素对应峰的发射几率不确定度
|
||||
public List<String> Activity; //利用该峰计算得到的活度
|
||||
public List<String> uncActivity; //利用该峰计算得到的活度不确定度
|
||||
public List<String> Effic; //该峰处的探测效率
|
||||
public List<String> uncEffic; //该峰处的探测效率不确定度
|
||||
public List<String> Mda; //利用该峰计算得到的最小可探测活度
|
||||
public List<String> key_flag; //主射线标识:0-否;1-是
|
||||
public List<String> csc_ratio; //符合相加校正因子(无设为1)
|
||||
public List<String> csc_ratio_err; //符合相加校正因子不确定度(无设为0)
|
||||
public List<String> csc_mod_flag; //活度是否经过符合相加校正0 未知
|
||||
public List<String> MDC;
|
||||
public List<String> Concentration;
|
||||
|
||||
public GStoreMiddleProcessDataNuclLinesIded(){
|
||||
nuclideFullname = new LinkedList<>();
|
||||
idPeak = new LinkedList<>();
|
||||
Energy = new LinkedList<>();
|
||||
uncEnergy = new LinkedList<>();
|
||||
Abundance = new LinkedList<>();
|
||||
uncAbundance = new LinkedList<>();
|
||||
Activity = new LinkedList<>();
|
||||
uncActivity = new LinkedList<>();
|
||||
Effic = new LinkedList<>();
|
||||
uncEffic = new LinkedList<>();
|
||||
Mda = new LinkedList<>();
|
||||
key_flag = new LinkedList<>();
|
||||
csc_ratio = new LinkedList<>();
|
||||
csc_ratio_err = new LinkedList<>();
|
||||
csc_mod_flag = new LinkedList<>();
|
||||
MDC = new LinkedList<>();
|
||||
Concentration = new LinkedList<>();
|
||||
}
|
||||
|
||||
}
|
|
@ -0,0 +1,18 @@
|
|||
package org.jeecg.modules.entity.vo;
|
||||
|
||||
import lombok.Data;
|
||||
|
||||
import java.io.Serializable;
|
||||
|
||||
@Data
|
||||
public class InputData implements Serializable {
|
||||
|
||||
private Double energy;
|
||||
|
||||
private Double TotalEffi;
|
||||
|
||||
private Double PeakEffi;
|
||||
|
||||
private Double uncertain;
|
||||
|
||||
}
|
|
@ -9,26 +9,42 @@ import java.util.List;
|
|||
@Data
|
||||
public class NuclideActMda implements Serializable {
|
||||
private boolean bCalculateMDA;
|
||||
|
||||
private int calculateIdx;
|
||||
|
||||
private int key_flag; // 第 key_flag 条射线是主射线,从 0 开始编号, key_flag < 0 表示无主射线
|
||||
|
||||
private double halflife;
|
||||
|
||||
private double activity;
|
||||
|
||||
private double act_err;
|
||||
|
||||
private double mda;
|
||||
|
||||
private double mdc;
|
||||
|
||||
private double efficiency;
|
||||
|
||||
private double effi_err;
|
||||
|
||||
private double concentration;
|
||||
|
||||
private List<String> fullNames;
|
||||
|
||||
private List<Double> vEnergy; // 匹配的γ射线能量
|
||||
|
||||
private List<Double> vUncertE;
|
||||
|
||||
private List<Double> vYield; // 匹配的γ射线分支比
|
||||
|
||||
private List<Double> vUncertY;
|
||||
|
||||
private List<Integer> vPeakIdx; // 匹配的峰序号, 从 1 开始
|
||||
|
||||
public NuclideActMda() {
|
||||
bCalculateMDA = false;
|
||||
calculateIdx = -1;
|
||||
calculateIdx = 0;
|
||||
key_flag = -1;
|
||||
halflife = 0.0;
|
||||
activity = 0.0;
|
||||
|
|
|
@ -1,5 +1,6 @@
|
|||
package org.jeecg.modules.entity.vo;
|
||||
|
||||
import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
|
||||
import lombok.Data;
|
||||
|
||||
import java.io.Serializable;
|
||||
|
@ -9,92 +10,140 @@ import java.util.List;
|
|||
import java.util.Map;
|
||||
|
||||
@Data
|
||||
@JsonIgnoreProperties(ignoreUnknown = true)
|
||||
public class PHDFile implements Serializable {
|
||||
|
||||
private boolean isValid; // 是否有效谱
|
||||
|
||||
private boolean bAnalyed; // 记录是否被分析
|
||||
|
||||
private String analy_start_time;
|
||||
|
||||
private String filename; // 谱文件名称
|
||||
|
||||
private String filepath; // 谱文件路径
|
||||
|
||||
private String log_path;
|
||||
|
||||
private String report_path;
|
||||
|
||||
private String baseline_path;
|
||||
|
||||
private String lc_path;
|
||||
|
||||
private String scac_path;
|
||||
|
||||
private String totalCmt; // 谱文件总注释
|
||||
|
||||
private String oriTotalCmt;// 原始谱总注释
|
||||
|
||||
SpecSetup usedSetting; // 当前使用的分析设置
|
||||
private SpecSetup usedSetting; // 当前使用的分析设置
|
||||
|
||||
SpecSetup setting; // 新修改的分析设置
|
||||
private SpecSetup setting; // 新修改的分析设置
|
||||
|
||||
// 分析结果
|
||||
private List<PeakInfo> vPeak;
|
||||
|
||||
private List<Double> vEnergy;
|
||||
|
||||
private List<Double> vBase;
|
||||
|
||||
private List<Double> vLc;
|
||||
|
||||
private List<Double> vScac;
|
||||
//stdvec vFwhm;
|
||||
//stdvec vFwhmc;
|
||||
BaseControls baseCtrls;
|
||||
|
||||
private BaseControls baseCtrls;
|
||||
|
||||
// 当前修改的刻度名称
|
||||
private String newEner;
|
||||
|
||||
private String newReso;
|
||||
|
||||
private String newEffi;
|
||||
|
||||
private String newTotE;
|
||||
|
||||
// 当前寻峰结果所用的刻度名称
|
||||
private String usedEner;
|
||||
|
||||
private String usedReso;
|
||||
|
||||
private String usedEffi;
|
||||
|
||||
private String usedTotE;
|
||||
|
||||
// 当前寻峰结果所用的刻度数据
|
||||
private GEnergyBlock usedEnerKD;
|
||||
|
||||
private GResolutionBlock usedResoKD;
|
||||
|
||||
private GEfficiencyBlock usedEffiKD;
|
||||
|
||||
private TotaleffBlock usedTotEKD;
|
||||
|
||||
// 存储所有的刻度数据
|
||||
private Map<String, GEnergyBlock> mapEnerKD; // 能量刻度
|
||||
|
||||
private Map<String, GResolutionBlock> mapResoKD; // 分辨率刻度
|
||||
|
||||
private Map<String, GEfficiencyBlock> mapEffiKD; // 效率刻度
|
||||
|
||||
private Map<String, TotaleffBlock> mapTotEKD; // 总效率刻度
|
||||
|
||||
// 当前寻峰结果所用的刻度参数
|
||||
private ParameterInfo usedEnerPara;
|
||||
|
||||
private ParameterInfo usedResoPara;
|
||||
|
||||
private ParameterInfo usedEffiPara;
|
||||
|
||||
private ParameterInfo usedTotEPara;
|
||||
|
||||
// 存储所有的刻度参数
|
||||
private Map<String, ParameterInfo> mapEnerPara;
|
||||
|
||||
private Map<String, ParameterInfo> mapResoPara;
|
||||
|
||||
private Map<String, ParameterInfo> mapEffiPara;
|
||||
|
||||
private Map<String, ParameterInfo> mapTotEPara;
|
||||
|
||||
// 其他参数,目前存储的是默认值
|
||||
private ParameterInfo para_stepRatio;
|
||||
|
||||
private ParameterInfo para_tail;
|
||||
|
||||
private ParameterInfo para_tailAlpha;
|
||||
|
||||
private ParameterInfo para_tailRight;
|
||||
|
||||
private ParameterInfo para_tailRightAlpha;
|
||||
|
||||
// 谱基本信息
|
||||
private String id_sample;
|
||||
|
||||
private String id_analysis;
|
||||
|
||||
private String status;
|
||||
|
||||
private String category;
|
||||
|
||||
private HeaderBlock header;
|
||||
|
||||
private MessageInfo msgInfo;
|
||||
|
||||
private GSpectrumBlock Spec;
|
||||
|
||||
private AcquisitionBlock acq;
|
||||
|
||||
private CollectionBlock collect;
|
||||
|
||||
private ProcessingBlock process;
|
||||
|
||||
private CalibrationBlock calibration;
|
||||
|
||||
private SampleBlock sampleBlock;
|
||||
|
||||
private CertificateBlock certificate;
|
||||
|
||||
// QC Check
|
||||
|
|
|
@ -53,9 +53,9 @@ public class PeakInfo implements Serializable {
|
|||
|
||||
public double BWWidthChan;
|
||||
|
||||
public double recoilBetaChan;
|
||||
public String recoilBetaChan;
|
||||
|
||||
public double recoilDeltaChan;
|
||||
public String recoilDeltaChan;
|
||||
|
||||
public String comments;
|
||||
|
||||
|
|
|
@ -0,0 +1,18 @@
|
|||
package org.jeecg.modules.entity.vo;
|
||||
|
||||
import lombok.Data;
|
||||
|
||||
import java.io.Serializable;
|
||||
|
||||
@Data
|
||||
public class ResolutionData implements Serializable {
|
||||
|
||||
private String Energy;
|
||||
|
||||
private String FWHM;
|
||||
|
||||
private String Fit;
|
||||
|
||||
private String Delta;
|
||||
|
||||
}
|
|
@ -1,6 +1,8 @@
|
|||
package org.jeecg.modules.entity.vo;
|
||||
|
||||
import com.fasterxml.jackson.annotation.JsonFormat;
|
||||
import lombok.Data;
|
||||
import org.springframework.format.annotation.DateTimeFormat;
|
||||
|
||||
import java.io.Serializable;
|
||||
import java.util.Date;
|
||||
|
@ -34,8 +36,12 @@ public class SpecSetup implements Serializable {
|
|||
|
||||
private boolean KeepCalPeakSearchPeaks;
|
||||
|
||||
@DateTimeFormat(pattern = "yyyy-MM-dd HH:mm:ss")
|
||||
@JsonFormat(pattern = "yyyy-MM-dd HH:mm:ss", timezone = "GMT+8")
|
||||
private Date refTime_act; //活度参考时间,默认是AcqStartTime
|
||||
|
||||
@DateTimeFormat(pattern = "yyyy-MM-dd HH:mm:ss")
|
||||
@JsonFormat(pattern = "yyyy-MM-dd HH:mm:ss", timezone = "GMT+8")
|
||||
private Date refTime_conc; //默认是CollectStartTime
|
||||
|
||||
public SpecSetup(){
|
||||
|
|
|
@ -0,0 +1,14 @@
|
|||
package org.jeecg.modules.entity.vo;
|
||||
|
||||
import lombok.Data;
|
||||
|
||||
import java.io.Serializable;
|
||||
|
||||
@Data
|
||||
public class TableAssociation implements Serializable {
|
||||
|
||||
private String exLevel;
|
||||
|
||||
private String identified;
|
||||
|
||||
}
|
|
@ -0,0 +1,16 @@
|
|||
package org.jeecg.modules.entity.vo;
|
||||
|
||||
import lombok.Data;
|
||||
|
||||
import java.io.Serializable;
|
||||
|
||||
@Data
|
||||
public class TableDaughter implements Serializable {
|
||||
|
||||
private String daughters;
|
||||
|
||||
private String branchingratios;
|
||||
|
||||
private String daughtersstable;
|
||||
|
||||
}
|
|
@ -0,0 +1,30 @@
|
|||
package org.jeecg.modules.entity.vo;
|
||||
|
||||
import lombok.Data;
|
||||
|
||||
import java.io.Serializable;
|
||||
|
||||
@Data
|
||||
public class TableNuclideActivity implements Serializable {
|
||||
|
||||
private String nuclide;
|
||||
|
||||
private String halfLife;
|
||||
|
||||
private String energy;
|
||||
|
||||
private String yield;
|
||||
|
||||
private String efficiency;
|
||||
|
||||
private String activity;
|
||||
|
||||
private String actErr;
|
||||
|
||||
private String mda;
|
||||
|
||||
private String conc;
|
||||
|
||||
private String mdc;
|
||||
|
||||
}
|
|
@ -0,0 +1,28 @@
|
|||
package org.jeecg.modules.entity.vo;
|
||||
|
||||
import lombok.Data;
|
||||
|
||||
import java.io.Serializable;
|
||||
|
||||
@Data
|
||||
public class TablePeak implements Serializable {
|
||||
|
||||
private String energy;
|
||||
|
||||
private String centroid;
|
||||
|
||||
private String multiplet;
|
||||
|
||||
private String FWHM;
|
||||
|
||||
private String netArea;
|
||||
|
||||
private String areaErr;
|
||||
|
||||
private String significant;
|
||||
|
||||
private String sensitivity;
|
||||
|
||||
private String indentify;
|
||||
|
||||
}
|
|
@ -0,0 +1,26 @@
|
|||
package org.jeecg.modules.entity.vo;
|
||||
|
||||
import lombok.Data;
|
||||
|
||||
import java.io.Serializable;
|
||||
|
||||
@Data
|
||||
public class TablePeakFit implements Serializable {
|
||||
|
||||
private String energy;
|
||||
|
||||
private String energyErr;
|
||||
|
||||
private String netArea;
|
||||
|
||||
private String areaErr;
|
||||
|
||||
private String netCountRate;
|
||||
|
||||
private String ncRateErr;
|
||||
|
||||
private String lc;
|
||||
|
||||
private String significance;
|
||||
|
||||
}
|
|
@ -0,0 +1,18 @@
|
|||
package org.jeecg.modules.entity.vo;
|
||||
|
||||
import lombok.Data;
|
||||
|
||||
import java.io.Serializable;
|
||||
|
||||
@Data
|
||||
public class TableQCResult implements Serializable {
|
||||
|
||||
private String name;
|
||||
|
||||
private String flag;
|
||||
|
||||
private double value;
|
||||
|
||||
private String standard;
|
||||
|
||||
}
|
|
@ -0,0 +1,28 @@
|
|||
package org.jeecg.modules.entity.vo;
|
||||
|
||||
import lombok.Data;
|
||||
|
||||
import java.io.Serializable;
|
||||
|
||||
@Data
|
||||
public class TableResult implements Serializable {
|
||||
|
||||
private String nuclide;
|
||||
|
||||
private String activity;
|
||||
|
||||
private String actErr;
|
||||
|
||||
private String factor1;
|
||||
|
||||
private String confidence1;
|
||||
|
||||
private String conc;
|
||||
|
||||
private String concErr;
|
||||
|
||||
private String factor2;
|
||||
|
||||
private String confidence2;
|
||||
|
||||
}
|
|
@ -3,6 +3,8 @@ package org.jeecg.modules.mapper;
|
|||
import com.baomidou.mybatisplus.core.metadata.IPage;
|
||||
import com.baomidou.mybatisplus.extension.plugins.pagination.Page;
|
||||
import org.apache.ibatis.annotations.Param;
|
||||
import org.jeecg.modules.base.entity.configuration.GardsNuclLib;
|
||||
import org.jeecg.modules.base.entity.configuration.GardsNuclLinesLib;
|
||||
import org.jeecg.modules.base.entity.rnman.GardsAnalySetting;
|
||||
import org.jeecg.modules.entity.*;
|
||||
import org.jeecg.modules.entity.vo.*;
|
||||
|
@ -82,4 +84,19 @@ public interface SpectrumAnalysisMapper {
|
|||
List<GardsROIResultsSpectrum> ReadROIResults(@Param(value = "dbName") String dbName, @Param(value = "idAnalysis") Integer idAnalysis, @Param(value = "sampleId") Integer sampleId);
|
||||
|
||||
List<GardsXeResultsSpectrum> ReadXeResults(@Param(value = "dbName") String dbName, @Param(value = "idAnalysis") Integer idAnalysis, @Param(value = "sampleId") Integer sampleId);
|
||||
|
||||
List<String> getFULLNuclideNames(@Param(value = "min") Double min, @Param(value = "max") Double max);
|
||||
|
||||
List<String> getRelevantNuclideNames(@Param(value = "min") Double min, @Param(value = "max") Double max);
|
||||
|
||||
List<String> getNuclideNames(@Param(value = "dbName") String dbName);
|
||||
|
||||
List<GardsNuclLinesLib> getNuclideLine(@Param(value = "min") Double min, @Param(value = "max") Double max, @Param(value = "name") String name);
|
||||
|
||||
GardsNuclLib getNuclideInfo(@Param(value = "name") String name);
|
||||
|
||||
GardsNuclLib getParentAndDaughter(@Param(value = "name") String name);
|
||||
|
||||
String findAutomaticLogPath(@Param(value = "sampleId") Integer sampleId);
|
||||
|
||||
}
|
||||
|
|
|
@ -0,0 +1,13 @@
|
|||
package org.jeecg.modules.mapper;
|
||||
|
||||
import com.baomidou.mybatisplus.core.mapper.BaseMapper;
|
||||
import org.apache.ibatis.annotations.Param;
|
||||
import org.jeecg.modules.base.entity.postgre.SysDefaultNuclide;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
public interface SysDefaultNuclideSpectrumMapper extends BaseMapper<SysDefaultNuclide> {
|
||||
|
||||
List<String> findNuclidesByUserName(@Param(value = "userName") String userName);
|
||||
|
||||
}
|
|
@ -585,4 +585,82 @@
|
|||
AND XE_RESULTS.SAMPLE_ID = #{sampleId}
|
||||
</select>
|
||||
|
||||
<select id="getFULLNuclideNames" resultType="java.lang.String">
|
||||
select
|
||||
distinct a.name
|
||||
from
|
||||
CONFIGURATION.GARDS_NUCL_LINES_LIB a
|
||||
where a.energy > #{min}
|
||||
and a.energy < #{max}
|
||||
</select>
|
||||
|
||||
<select id="getRelevantNuclideNames" resultType="java.lang.String">
|
||||
select
|
||||
distinct a.name
|
||||
from
|
||||
CONFIGURATION.GARDS_NUCL_LINES_LIB a,
|
||||
CONFIGURATION.GARDS_RELEVANT_NUCLIDE b
|
||||
where a.name = b.name
|
||||
and a.energy > #{min}
|
||||
and a.energy < #{max}
|
||||
</select>
|
||||
|
||||
<select id="getNuclideNames" resultType="java.lang.String">
|
||||
select a.name from ${dbName} a
|
||||
</select>
|
||||
|
||||
<select id="getNuclideLine" resultType="org.jeecg.modules.base.entity.configuration.GardsNuclLinesLib">
|
||||
select
|
||||
a.fullname,
|
||||
a.energy,
|
||||
a.energy_uncert,
|
||||
a.yield,
|
||||
a.yield_uncert,
|
||||
a.key_flag
|
||||
from
|
||||
CONFIGURATION.GARDS_NUCL_LINES_LIB a
|
||||
where a.name = #{name}
|
||||
<if test=" min != null and max != null ">
|
||||
and a.energy > min
|
||||
and a.energy < max
|
||||
</if>
|
||||
order by a.yield
|
||||
</select>
|
||||
|
||||
<select id="getNuclideInfo" resultType="org.jeecg.modules.base.entity.configuration.GardsNuclLib">
|
||||
select a.num_lines, a.halflife, a.halflife_err from CONFIGURATION.GARDS_NUCL_LIB a where a.name = #{name}
|
||||
</select>
|
||||
|
||||
<select id="getParentAndDaughter" resultType="org.jeecg.modules.base.entity.configuration.GardsNuclLib">
|
||||
select
|
||||
a.parents_1,
|
||||
a.parents_2,
|
||||
a.parents_3,
|
||||
a.parents_4,
|
||||
a.parents_5,
|
||||
a.parents_6,
|
||||
a.daughters_1,
|
||||
a.branchingratios_1,
|
||||
a.daughtersstable_1,
|
||||
a.daughters_2,
|
||||
a.branchingratios_2,
|
||||
a.daughtersstable_2,
|
||||
a.daughters_3,
|
||||
a.branchingratios_3,
|
||||
a.daughtersstable_3,
|
||||
a.daughters_4,
|
||||
a.branchingratios_4,
|
||||
a.daughtersstable_4,
|
||||
a.daughters_5,
|
||||
a.branchingratios_5,
|
||||
a.daughtersstable_5
|
||||
from
|
||||
CONFIGURATION.GARDS_NUCL_LIB a
|
||||
where a.name = #{name}
|
||||
</select>
|
||||
|
||||
<select id="findAutomaticLogPath" resultType="java.lang.String">
|
||||
SELECT LOG_PATH FROM RNAUTO.GARDS_ANALYSES WHERE SAMPLE_ID = #{sampleId}
|
||||
</select>
|
||||
|
||||
</mapper>
|
|
@ -0,0 +1,16 @@
|
|||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<!DOCTYPE mapper PUBLIC "-//mybatis.org//DTD Mapper 3.0//EN" "http://mybatis.org/dtd/mybatis-3-mapper.dtd">
|
||||
<mapper namespace="org.jeecg.modules.mapper.SysDefaultNuclideSpectrumMapper">
|
||||
|
||||
<select id="findNuclidesByUserName" resultType="java.lang.String">
|
||||
select
|
||||
nuclide_name
|
||||
from
|
||||
sys_default_nuclide
|
||||
where use_type = 3
|
||||
and nuclide_type = 'G'
|
||||
and create_by = #{userName}
|
||||
</select>
|
||||
|
||||
|
||||
</mapper>
|
|
@ -1,8 +1,12 @@
|
|||
package org.jeecg.modules.service;
|
||||
|
||||
import org.jeecg.common.api.vo.Result;
|
||||
import org.jeecg.modules.entity.vo.CoeffData;
|
||||
import org.jeecg.modules.entity.vo.InputData;
|
||||
|
||||
import javax.servlet.http.HttpServletRequest;
|
||||
import javax.servlet.http.HttpServletResponse;
|
||||
import java.util.List;
|
||||
|
||||
public interface IGammaService{
|
||||
|
||||
|
@ -12,4 +16,44 @@ public interface IGammaService{
|
|||
|
||||
Result Reprocessing(Integer sampleId);
|
||||
|
||||
Result InteractiveTool(Integer sampleId);
|
||||
|
||||
Result ZeroTime(Integer sampleId);
|
||||
|
||||
Result Korsum();
|
||||
|
||||
List<InputData> KorSumInput(CoeffData coeffData);
|
||||
|
||||
Result energyCalibration(Integer sampleId);
|
||||
|
||||
Result resolutionCalibration(Integer sampleId);
|
||||
|
||||
Result EfficiencyCalibration(Integer sampleId);
|
||||
|
||||
Result NuclideLibrary(String editEnergy, double err, String libraryName, String nuclideName, HttpServletRequest request);
|
||||
|
||||
Result configUserLibrary(HttpServletRequest request);
|
||||
|
||||
Result viewComment(Integer sampleId);
|
||||
|
||||
Result peakInformation(Integer sampleId);
|
||||
|
||||
Result viewARR(Integer sampleId, HttpServletResponse response);
|
||||
|
||||
Result viewRRR(Integer sampleId);
|
||||
|
||||
Result radionuclideActivity(Integer sampleId);
|
||||
|
||||
Result Spectrum(Integer sampleId);
|
||||
|
||||
Result sampleInformation(Integer sampleId);
|
||||
|
||||
Result viewQCResult(Integer sampleId);
|
||||
|
||||
Result viewRLR(Integer sampleId);
|
||||
|
||||
Result viewAutomaticAnalysisLog(Integer sampleId, HttpServletResponse response);
|
||||
|
||||
Result viewGammaviewerLog(Integer sampleId);
|
||||
|
||||
}
|
||||
|
|
|
@ -0,0 +1,12 @@
|
|||
package org.jeecg.modules.service;
|
||||
|
||||
import com.baomidou.mybatisplus.extension.service.IService;
|
||||
import org.jeecg.modules.base.entity.postgre.SysDefaultNuclide;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
public interface ISysDefaultNuclideSpectrumService extends IService<SysDefaultNuclide> {
|
||||
|
||||
List<String> findNuclidesByUserName(String userName);
|
||||
|
||||
}
|
|
@ -1,40 +1,70 @@
|
|||
package org.jeecg.modules.service.impl;
|
||||
|
||||
import cn.hutool.core.util.ObjectUtil;
|
||||
import com.baomidou.dynamic.datasource.annotation.DS;
|
||||
import com.baomidou.mybatisplus.core.toolkit.CollectionUtils;
|
||||
import com.baomidou.mybatisplus.core.toolkit.StringPool;
|
||||
import com.baomidou.mybatisplus.core.toolkit.StringUtils;
|
||||
import com.google.common.cache.Cache;
|
||||
import org.apache.commons.net.ftp.FTPClient;
|
||||
import org.apache.commons.net.ftp.FTPFile;
|
||||
import org.jeecg.common.api.vo.Result;
|
||||
import org.jeecg.common.cache.LocalCache;
|
||||
import org.jeecg.common.properties.SpectrumPathProperties;
|
||||
import org.jeecg.common.system.util.JwtUtil;
|
||||
import org.jeecg.common.util.DateUtils;
|
||||
import org.jeecg.common.util.FTPUtil;
|
||||
import org.jeecg.common.util.GammaFileUtil;
|
||||
import org.jeecg.common.util.RedisUtil;
|
||||
import org.jeecg.modules.entity.vo.NuclideLines;
|
||||
import org.jeecg.modules.entity.vo.PHDFile;
|
||||
import org.jeecg.modules.entity.vo.SpecSetup;
|
||||
import org.jeecg.modules.base.entity.configuration.GardsNuclLinesLib;
|
||||
import org.jeecg.modules.entity.vo.*;
|
||||
import org.jeecg.modules.mapper.SpectrumAnalysisMapper;
|
||||
import org.jeecg.modules.native_jni.CalValuesHandler;
|
||||
import org.jeecg.modules.service.IGammaService;
|
||||
import org.jeecg.modules.service.ISysDefaultNuclideSpectrumService;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.beans.factory.annotation.Value;
|
||||
import org.springframework.stereotype.Service;
|
||||
|
||||
import javax.servlet.ServletOutputStream;
|
||||
import javax.servlet.http.HttpServletRequest;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import javax.servlet.http.HttpServletResponse;
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.text.ParseException;
|
||||
import java.time.LocalDateTime;
|
||||
import java.util.*;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
@Service(value = "gammaService")
|
||||
@DS("ora")
|
||||
public class GammaServiceImpl implements IGammaService {
|
||||
|
||||
@Autowired
|
||||
private RedisUtil redisUtil;
|
||||
private LocalCache localCache;
|
||||
@Autowired
|
||||
private GammaFileUtil gammaFileUtil;
|
||||
@Autowired
|
||||
private ISysDefaultNuclideSpectrumService defaultNuclideSpectrumService;
|
||||
@Autowired
|
||||
private SpectrumAnalysisMapper spectrumAnalysisMapper;
|
||||
@Autowired
|
||||
private SpectrumPathProperties spectrumPathProperties;
|
||||
@Autowired
|
||||
private FTPUtil ftpUtil;
|
||||
@Value("${ZeroTime}")
|
||||
private String ZeroTimeStr;
|
||||
|
||||
@Override
|
||||
public Result gammaByDB(String dbName, Integer sampleId) {
|
||||
Result result = new Result();
|
||||
Map<String, Object> map = new HashMap<>();
|
||||
Cache<String, PHDFile> phdCache = localCache.getPHDCache();
|
||||
PHDFile phd = new PHDFile();
|
||||
//读取文件内容
|
||||
gammaFileUtil.loadFile(sampleId, phd, result);
|
||||
//声明基础数组信息
|
||||
gammaFileUtil.SetBaseInfo(phd, sampleId);
|
||||
//从数据库中读取相关信息 ---- 目前代码未完善
|
||||
gammaFileUtil.getResultFromDB(dbName, sampleId, phd, result);
|
||||
// 更新 ‘QC Flags’ 状态
|
||||
|
@ -50,7 +80,8 @@ public class GammaServiceImpl implements IGammaService {
|
|||
double deadTime = (phd.getAcq().getAcquisition_real_time() - phd.getAcq().getAcquisition_live_time()) / phd.getAcq().getAcquisition_real_time();
|
||||
map.put("dead_time", String.format("%.2f", deadTime*100));
|
||||
gammaFileUtil.UpdateChart(phd, map);
|
||||
redisUtil.set("Gamma : Gamma_"+sampleId, phd);
|
||||
phdCache.put(sampleId.toString(), phd);
|
||||
localCache.setPHDCache(phdCache);
|
||||
result.setSuccess(true);
|
||||
result.setResult(map);
|
||||
return result;
|
||||
|
@ -59,7 +90,12 @@ public class GammaServiceImpl implements IGammaService {
|
|||
@Override
|
||||
public Result configure(Integer sampleId) {
|
||||
Result result = new Result();
|
||||
PHDFile phd = (PHDFile) redisUtil.get("Gamma_" + sampleId);
|
||||
Cache<String, PHDFile> phdCache = localCache.getPHDCache();
|
||||
PHDFile phd = phdCache.getIfPresent(sampleId.toString());
|
||||
if (Objects.isNull(phd)){
|
||||
result.error500("请先选择解析文件!");
|
||||
return result;
|
||||
}
|
||||
Map<String, Object> map = new HashMap<>();
|
||||
// 用当前谱使用的设置初始化界面控件
|
||||
SpecSetup setup = phd.getUsedSetting();
|
||||
|
@ -94,4 +130,877 @@ public class GammaServiceImpl implements IGammaService {
|
|||
return null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Result InteractiveTool(Integer sampleId) {
|
||||
Result result = new Result();
|
||||
Map<String, Object> map = new HashMap<>();
|
||||
Cache<String, PHDFile> phdCache = localCache.getPHDCache();
|
||||
PHDFile phd = phdCache.getIfPresent(sampleId.toString());
|
||||
if (Objects.isNull(phd)){
|
||||
result.error500("请先选择解析文件!");
|
||||
return result;
|
||||
}
|
||||
//表单
|
||||
List<PeakInfo> vPeak = phd.getVPeak();
|
||||
map.put("table", vPeak);
|
||||
//Chart 折线图
|
||||
List<Long> m_vCount = new LinkedList<>();
|
||||
long m_nCount = phd.getSpec().getNum_g_channel();
|
||||
long m_nSChan = phd.getSpec().getBegin_channel();
|
||||
// 确保绘制曲线时所有谱都是从1道开始
|
||||
int i = 0;
|
||||
if(m_nSChan == 0){
|
||||
i = 1;
|
||||
}
|
||||
for(; i<m_nCount; ++i) {
|
||||
m_vCount.add(phd.getSpec().getCounts().get(i));
|
||||
}
|
||||
if(m_nSChan == 0) {
|
||||
m_vCount.add(0L);
|
||||
}
|
||||
ChartData channelCountChart = gammaFileUtil.Channel_Count(phd, m_vCount, m_nCount);
|
||||
ChartData channelBaseLineChart = gammaFileUtil.Channel_BaseLine(phd, m_vCount, m_nCount);
|
||||
List<ChartData> channelPeakChart = gammaFileUtil.Channel_Peak(phd, m_nCount);
|
||||
List<ShapeData> channelBaseCPChart = gammaFileUtil.Channel_BaseCP(phd);
|
||||
map.put("channelCountChart", channelCountChart);
|
||||
map.put("channelBaseLineChart", channelBaseLineChart);
|
||||
map.put("channelPeakChart", channelPeakChart);
|
||||
map.put("channelBaseCPChart", channelBaseCPChart);
|
||||
//Bar Chart 柱状图
|
||||
List<SeriseData> differance = gammaFileUtil.Differance(phd, vPeak, m_vCount, m_nCount);
|
||||
map.put("barChart", differance);
|
||||
|
||||
result.setSuccess(true);
|
||||
result.setResult(map);
|
||||
return result;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Result ZeroTime(Integer sampleId) {
|
||||
Result result = new Result();
|
||||
Map<String, Object> map = new HashMap<>();
|
||||
List<String> zeroTimeList = new LinkedList<>();
|
||||
if (StringUtils.isNotBlank(ZeroTimeStr)){
|
||||
zeroTimeList = Arrays.asList(ZeroTimeStr.split(StringPool.COMMA));
|
||||
}
|
||||
map.put("list_fission1", zeroTimeList);
|
||||
map.put("list_fission2", zeroTimeList);
|
||||
LocalDateTime now = LocalDateTime.now();
|
||||
map.put("Date", now.getYear()+"-"+now.getMonthValue()+"-"+now.getDayOfMonth());
|
||||
map.put("Time", now.getHour()+":"+now.getMinute()+":"+now.getSecond());
|
||||
result.setSuccess(true);
|
||||
result.setResult(map);
|
||||
return result;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Result Korsum() {
|
||||
Result result = new Result();
|
||||
Map<String, Object> map = new HashMap<>();
|
||||
//读取文件获取Energy Nuclide数据
|
||||
List<Double> m_vEnergy = new LinkedList<>();
|
||||
List<String> m_vNuclide = new LinkedList<>();
|
||||
gammaFileUtil.ReadData(m_vEnergy, m_vNuclide);
|
||||
CoeffData coeffData = new CoeffData();
|
||||
coeffData.setTotalEf1(-0.024326);
|
||||
coeffData.setTotalEf2(-1.857587);
|
||||
coeffData.setTotalEf3(0.111096);
|
||||
coeffData.setTotalEf4(-0.003896);
|
||||
coeffData.setTotalEf5(-0.000345);
|
||||
coeffData.setTotalEf6(0.000017);
|
||||
coeffData.setEffciency1(-0.329812);
|
||||
coeffData.setEffciency2(-3.493192);
|
||||
coeffData.setEffciency3(0.583265);
|
||||
coeffData.setEffciency4(-0.065884);
|
||||
coeffData.setEffciency5(0.003255);
|
||||
coeffData.setEffciency6(-0.000059);
|
||||
coeffData.setEnergys(m_vEnergy);
|
||||
List<InputData> input = this.KorSumInput(coeffData);
|
||||
map.put("Energy", input);
|
||||
map.put("Nuclide", m_vNuclide);
|
||||
result.setSuccess(true);
|
||||
result.setResult(map);
|
||||
return result;
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<InputData> KorSumInput(CoeffData coeffData) {
|
||||
double ener, effi, totE;
|
||||
List<Double> vTotE = new LinkedList<>();
|
||||
vTotE.add(0.005);
|
||||
vTotE.add(0.01);
|
||||
vTotE.add(0.02);
|
||||
List<Double> vEffi = new LinkedList<>();
|
||||
vEffi.add(0.002);
|
||||
vEffi.add(0.0045);
|
||||
vEffi.add(0.008);
|
||||
List<Double> energys = coeffData.energys;
|
||||
List<InputData> inputDataList = new LinkedList<>();
|
||||
for(int i=0; i< energys.size(); ++i) {
|
||||
InputData data = new InputData();
|
||||
// 将 keV 转换成 MeV
|
||||
ener = energys.get(i)/1000;
|
||||
// 如果能量小于34keV,则放弃计算
|
||||
if(ener < 0.034) {
|
||||
if(i < 3) {
|
||||
data.setTotalEffi(vTotE.get(i));
|
||||
data.setPeakEffi(vEffi.get(i));
|
||||
} else {
|
||||
data.setTotalEffi(null);
|
||||
data.setPeakEffi(null);
|
||||
}
|
||||
}else {
|
||||
effi = Math.exp( coeffData.Effciency1*ener + coeffData.Effciency2 + coeffData.Effciency3/ener + coeffData.Effciency4/Math.pow(ener,2) + coeffData.Effciency5/Math.pow(ener, 3) + coeffData.Effciency6/Math.pow(ener, 4) );
|
||||
totE = Math.exp( coeffData.totalEf1*ener + coeffData.totalEf2 + coeffData.totalEf3/ener + coeffData.totalEf4/Math.pow(ener,2) + coeffData.totalEf5/Math.pow(ener, 3) + coeffData.totalEf6/Math.pow(ener, 4) );
|
||||
data.setTotalEffi(totE);
|
||||
data.setPeakEffi(effi);
|
||||
}
|
||||
data.setEnergy(energys.get(i));
|
||||
data.setUncertain(10.0);
|
||||
inputDataList.add(data);
|
||||
}
|
||||
return inputDataList;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Result energyCalibration(Integer sampleId) {
|
||||
Result result = new Result();
|
||||
Map<String, Object> map = new HashMap<>();
|
||||
System.loadLibrary("GammaAnaly");
|
||||
Cache<String, PHDFile> phdCache = localCache.getPHDCache();
|
||||
PHDFile phd = phdCache.getIfPresent(sampleId.toString());
|
||||
if (Objects.isNull(phd)){
|
||||
result.error500("请先选择解析文件!");
|
||||
return result;
|
||||
}
|
||||
String usedEner = phd.getUsedEner();
|
||||
List<String> dataSourceList = phd.getMapEnerKD().keySet().stream().collect(Collectors.toList());
|
||||
map.put("list_dataSource", dataSourceList);
|
||||
List<Double> m_vCurCentroid = phd.getMapEnerKD().get(usedEner).getCentroid_channel();
|
||||
List<Double> m_vCurEnergy = phd.getMapEnerKD().get(usedEner).getG_energy();
|
||||
ParameterInfo m_curParam = phd.getMapEnerPara().get(usedEner);
|
||||
|
||||
int num = m_vCurEnergy.size();
|
||||
if(num < 1){
|
||||
return result;
|
||||
}
|
||||
|
||||
List<Double> vFit = CalValuesHandler.calFcnEval(m_vCurCentroid, m_curParam.getP()).counts;
|
||||
List<EnergyData> energyDataList = new LinkedList<>();
|
||||
for(int i=0; i<num; ++i) {
|
||||
EnergyData energyData = new EnergyData();
|
||||
double delta = (m_vCurEnergy.get(i) - vFit.get(i)) / m_vCurEnergy.get(i) * 100;
|
||||
energyData.setChannel(String.format("%.3f", m_vCurCentroid.get(i)));
|
||||
energyData.setChannel(String.format("%.3f", m_vCurEnergy.get(i)));
|
||||
energyData.setChannel(String.format("%.3f", vFit.get(i)));
|
||||
energyData.setChannel(String.format("%.3f", delta));
|
||||
energyDataList.add(energyData);
|
||||
}
|
||||
map.put("table", energyDataList);
|
||||
String equation = gammaFileUtil.UpdateEquationEnergy(m_curParam);
|
||||
map.put("equation", equation);
|
||||
gammaFileUtil.UpdateChartEnergy(m_vCurEnergy, m_curParam, m_vCurCentroid, phd, map);
|
||||
result.setSuccess(true);
|
||||
result.setResult(map);
|
||||
return result;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Result resolutionCalibration(Integer sampleId) {
|
||||
Result result = new Result();
|
||||
Map<String, Object> map = new HashMap<>();
|
||||
System.loadLibrary("GammaAnaly");
|
||||
Cache<String, PHDFile> phdCache = localCache.getPHDCache();
|
||||
PHDFile phd = phdCache.getIfPresent(sampleId.toString());
|
||||
if (Objects.isNull(phd)){
|
||||
result.error500("请先选择解析文件!");
|
||||
return result;
|
||||
}
|
||||
String usedReso = phd.getUsedReso();
|
||||
List<String> dataSourceList = phd.getMapResoKD().keySet().stream().collect(Collectors.toList());
|
||||
map.put("list_dataSource", dataSourceList);
|
||||
List<Double> m_vCurReso = phd.getMapResoKD().get(usedReso).getFWHM();
|
||||
List<Double> m_vCurEnergy = phd.getMapResoKD().get(usedReso).getG_energy();
|
||||
ParameterInfo m_curParam = phd.getMapResoPara().get(usedReso);
|
||||
|
||||
int num = m_vCurEnergy.size();
|
||||
if(num < 1){
|
||||
return result;
|
||||
}
|
||||
|
||||
List<Double> vFit = CalValuesHandler.calFcnEval(m_vCurEnergy, m_curParam.getP()).counts;
|
||||
List<ResolutionData> resolutionDataList = new LinkedList<>();
|
||||
for(int i=0; i<num; ++i) {
|
||||
ResolutionData resolutionData = new ResolutionData();
|
||||
double delta = (m_vCurReso.get(i) - vFit.get(i)) / m_vCurReso.get(i) * 100;
|
||||
resolutionData.setEnergy(String.format("%.3f", m_vCurEnergy.get(i)));
|
||||
resolutionData.setFWHM(String.format("%.3f", m_vCurReso.get(i)));
|
||||
resolutionData.setFit(String.format("%.3f", vFit.get(i)));
|
||||
resolutionData.setDelta(String.format("%.3f", delta));
|
||||
resolutionDataList.add(resolutionData);
|
||||
}
|
||||
map.put("table", resolutionDataList);
|
||||
String equation = gammaFileUtil.UpdateEquationResolution(m_curParam);
|
||||
map.put("equation", equation);
|
||||
gammaFileUtil.UpdateChartResolution(m_vCurEnergy, m_curParam, m_vCurReso, phd, map);
|
||||
result.setSuccess(true);
|
||||
result.setResult(map);
|
||||
return result;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Result EfficiencyCalibration(Integer sampleId) {
|
||||
Result result = new Result();
|
||||
Map<String, Object> map = new HashMap<>();
|
||||
List<String> m_vFuncName = new LinkedList<>();
|
||||
m_vFuncName.add("Interpolation"); // 1
|
||||
m_vFuncName.add("HT Efficiency"); // 5
|
||||
m_vFuncName.add("Log Polynomial"); // 6
|
||||
m_vFuncName.add("Invlog Polynomial"); // 8
|
||||
m_vFuncName.add("HAE Efficiency(1-3)"); // 93
|
||||
m_vFuncName.add("HAE Efficiency(1-2)"); // 94
|
||||
m_vFuncName.add("HAE Efficiency(1-2-3)"); // 95
|
||||
map.put("function", m_vFuncName);
|
||||
System.loadLibrary("GammaAnaly");
|
||||
Cache<String, PHDFile> phdCache = localCache.getPHDCache();
|
||||
PHDFile phd = phdCache.getIfPresent(sampleId.toString());
|
||||
if (Objects.isNull(phd)){
|
||||
result.error500("请先选择解析文件!");
|
||||
return result;
|
||||
}
|
||||
String usedEffi = phd.getUsedEffi();
|
||||
List<String> dataSourceList = phd.getMapEffiKD().keySet().stream().collect(Collectors.toList());
|
||||
map.put("list_dataSource", dataSourceList);
|
||||
List<Double> m_vCurEffi = phd.getMapEffiKD().get(usedEffi).getEfficiency();
|
||||
List<Double> m_vCurEnergy = phd.getMapEffiKD().get(usedEffi).getG_energy();
|
||||
ParameterInfo m_curParam = phd.getMapEffiPara().get(usedEffi);
|
||||
|
||||
int num = m_vCurEnergy.size();
|
||||
if(num < 1){
|
||||
return result;
|
||||
}
|
||||
|
||||
List<Double> vFit = CalValuesHandler.calFcnEval(m_vCurEnergy, m_curParam.getP()).counts;
|
||||
List<EfficiencyData> efficiencyDataList = new LinkedList<>();
|
||||
for(int i=0; i<num; ++i) {
|
||||
double delta = (m_vCurEffi.get(i) - vFit.get(i)) / m_vCurEffi.get(i) * 100;
|
||||
EfficiencyData efficiencyData = new EfficiencyData();
|
||||
efficiencyData.setEnergy(String.format("%.3f", m_vCurEnergy.get(i)));
|
||||
efficiencyData.setEfficiency(String.format("%.3f", m_vCurEffi.get(i)));
|
||||
efficiencyData.setFit(String.format("%.3f", vFit.get(i)));
|
||||
efficiencyData.setDelta(String.format("%.3f", delta));
|
||||
efficiencyDataList.add(efficiencyData);
|
||||
}
|
||||
map.put("table", efficiencyDataList);
|
||||
String equation = gammaFileUtil.UpdateEquationEfficiency(m_vCurEnergy, m_curParam);
|
||||
map.put("equation", equation);
|
||||
gammaFileUtil.UpdateChartEfficiency(m_vCurEnergy, m_curParam, m_vCurEffi, phd, map);
|
||||
result.setSuccess(true);
|
||||
result.setResult(map);
|
||||
return result;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Result NuclideLibrary(String editEnergy, double err, String libraryName, String nuclideName, HttpServletRequest request) {
|
||||
Result result = new Result();
|
||||
Map<String, Object> map = new HashMap<>();
|
||||
String userName = JwtUtil.getUserNameByToken(request);
|
||||
List<String> nuclides = new LinkedList<>();
|
||||
//判断传入的数据是否都不为空
|
||||
if (StringUtils.isNotBlank(editEnergy) && Objects.nonNull(err)){
|
||||
double editEnergyDou = Double.valueOf(editEnergy);
|
||||
double min = editEnergyDou - err;
|
||||
double max = editEnergyDou + err;
|
||||
if (libraryName.equals("UserLibrary")){
|
||||
//从postgreSql中获取当前用户关注的核素信息 如果当前用户没有 则返回管理员的
|
||||
nuclides = defaultNuclideSpectrumService.findNuclidesByUserName(userName);
|
||||
if (CollectionUtils.isEmpty(nuclides)){
|
||||
nuclides = defaultNuclideSpectrumService.findNuclidesByUserName("admin");
|
||||
}
|
||||
}else if (libraryName.equals("FULLLibrary")){
|
||||
nuclides = spectrumAnalysisMapper.getFULLNuclideNames(min, max);
|
||||
}else if (libraryName.equals("RelevantLibrary")){
|
||||
nuclides = spectrumAnalysisMapper.getRelevantNuclideNames(min, max);
|
||||
}
|
||||
}else {
|
||||
if(libraryName.equals("UserLibrary")) {
|
||||
//从postgreSql中获取当前用户关注的核素信息 如果当前用户没有 则返回管理员的
|
||||
nuclides = defaultNuclideSpectrumService.findNuclidesByUserName(userName);
|
||||
if (CollectionUtils.isEmpty(nuclides)){
|
||||
nuclides = defaultNuclideSpectrumService.findNuclidesByUserName("admin");
|
||||
}
|
||||
} else if (libraryName.equals("FULLLibrary")){
|
||||
nuclides = spectrumAnalysisMapper.getNuclideNames("CONFIGURATION.GARDS_NUCL_LIB");
|
||||
} else if (libraryName.equals("RelevantLibrary")){
|
||||
nuclides = spectrumAnalysisMapper.getNuclideNames("CONFIGURATION.GARDS_RELEVANT_NUCLIDE");
|
||||
}
|
||||
}
|
||||
map.put("nuclides", nuclides);
|
||||
if (StringUtils.isBlank(nuclideName)){
|
||||
nuclideName = nuclides.get(0);
|
||||
}
|
||||
List<GardsNuclLinesLib> nuclLinesLibs = gammaFileUtil.InitNuclideLine(editEnergy, err, nuclideName);
|
||||
map.put("nuclLinesLibs", nuclLinesLibs);
|
||||
Map<String, String> nuclideInfo = gammaFileUtil.InitNuclideInfo(nuclideName);
|
||||
map.put("nuclideInfo", nuclideInfo);
|
||||
Map<String, Object> daughter = gammaFileUtil.InitParentAndDaughter(nuclideName);
|
||||
map.put("daughter", daughter);
|
||||
result.setSuccess(true);
|
||||
result.setResult(map);
|
||||
return result;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Result configUserLibrary(HttpServletRequest request) {
|
||||
Result result = new Result();
|
||||
Map<String, List<String>> map = new HashMap<>();
|
||||
String userName = JwtUtil.getUserNameByToken(request);
|
||||
List<String> nuclides = spectrumAnalysisMapper.getNuclideNames("CONFIGURATION.GARDS_NUCL_LIB");
|
||||
List<String> userNuclides = defaultNuclideSpectrumService.findNuclidesByUserName(userName);
|
||||
if (CollectionUtils.isEmpty(userNuclides)){
|
||||
userNuclides = defaultNuclideSpectrumService.findNuclidesByUserName("admin");
|
||||
}
|
||||
map.put("AllNuclides", nuclides);
|
||||
map.put("UserNuclides", userNuclides);
|
||||
result.setSuccess(true);
|
||||
result.setResult(map);
|
||||
return result;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Result viewComment(Integer sampleId) {
|
||||
Result result = new Result();
|
||||
String comments = "";
|
||||
Cache<String, PHDFile> phdCache = localCache.getPHDCache();
|
||||
PHDFile phd = phdCache.getIfPresent(sampleId.toString());
|
||||
if (Objects.isNull(phd)){
|
||||
result.error500("请先选择解析文件!");
|
||||
return result;
|
||||
}
|
||||
String temp = phd.getOriTotalCmt().trim();
|
||||
if(StringUtils.isNotBlank(temp)) {
|
||||
comments += "Comments From Original Spectrum:\n" + temp;
|
||||
}
|
||||
if(Objects.nonNull(sampleId)) {
|
||||
CommentData commentData = spectrumAnalysisMapper.viewComment(sampleId);
|
||||
if(Objects.nonNull(commentData)) {
|
||||
temp = commentData.getComment()==null?"":commentData.getComment().trim();
|
||||
if(StringUtils.isNotBlank(temp)) {
|
||||
comments += "\n\nComments From "+commentData.getAnalyst()+"\n:"+temp;
|
||||
}
|
||||
}
|
||||
}
|
||||
result.setSuccess(true);
|
||||
result.setResult(comments);
|
||||
return result;
|
||||
}
|
||||
|
||||
public Result peakInformation(Integer sampleId){
|
||||
Result result = new Result();
|
||||
Cache<String, PHDFile> phdCache = localCache.getPHDCache();
|
||||
PHDFile phd = phdCache.getIfPresent(sampleId.toString());
|
||||
if (Objects.isNull(phd)){
|
||||
result.error500("请先选择解析文件!");
|
||||
return result;
|
||||
}
|
||||
List<PeakInfo> vPeak = phd.getVPeak();
|
||||
List<TablePeak> tablePeaks = new LinkedList<>();
|
||||
if (CollectionUtils.isNotEmpty(vPeak)){
|
||||
for (PeakInfo peak: vPeak){
|
||||
TablePeak tablePeak = new TablePeak();
|
||||
tablePeak.setEnergy(String.format("%.3f", peak.energy));
|
||||
tablePeak.setCentroid(String.format("%.3f", peak.peakCentroid));
|
||||
tablePeak.setMultiplet(String.valueOf(peak.multiIndex));
|
||||
tablePeak.setFWHM(String.format("%.3f", peak.fwhm));
|
||||
tablePeak.setNetArea(String.format("%.3f", peak.area));
|
||||
tablePeak.setAreaErr(peak.area>0?String.format("%.3f",(peak.areaErr/peak.area)*100):"0");
|
||||
tablePeak.setSignificant(String.format("%.3f", peak.significance));
|
||||
tablePeak.setSensitivity(String.format("%.3f", peak.sensitivity));
|
||||
List<String> nuclides = peak.nuclides;
|
||||
String nuclide = "";
|
||||
if (CollectionUtils.isNotEmpty(nuclides)){
|
||||
for (String name:nuclides) {
|
||||
nuclide+=name+StringPool.COMMA;
|
||||
}
|
||||
nuclide = nuclide.substring(0, nuclide.length()-1);
|
||||
}
|
||||
tablePeak.setIndentify(nuclide);
|
||||
tablePeaks.add(tablePeak);
|
||||
}
|
||||
}
|
||||
result.setSuccess(true);
|
||||
result.setResult(tablePeaks);
|
||||
return result;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Result viewARR(Integer sampleId, HttpServletResponse response) {
|
||||
Result result = new Result();
|
||||
if(Objects.isNull(sampleId)) {
|
||||
result.error500("The file isn't existed.");
|
||||
return result;
|
||||
}
|
||||
//获取自动处理生成的报告地址
|
||||
String reportPath = spectrumAnalysisMapper.viewARR(sampleId);
|
||||
if(StringUtils.isBlank(reportPath.trim())) {
|
||||
result.error500("The file isn't existed.");
|
||||
return result;
|
||||
}
|
||||
String pathName = reportPath.substring(0, reportPath.lastIndexOf(StringPool.SLASH));
|
||||
String fileName = reportPath.substring(reportPath.lastIndexOf(StringPool.SLASH)+1)+".txt";
|
||||
//连接ftp
|
||||
FTPClient ftpClient = ftpUtil.LoginFTP();
|
||||
if (Objects.isNull(ftpClient)){
|
||||
throw new RuntimeException("ftp连接失败");
|
||||
}
|
||||
InputStream inputStream = null;
|
||||
ServletOutputStream outputStream = null;
|
||||
try {
|
||||
//切换被动模式
|
||||
ftpClient.enterLocalPassiveMode();
|
||||
ftpClient.setFileType(FTPClient.BINARY_FILE_TYPE);
|
||||
// 设置编码,当文件中存在中文且上传后文件乱码时可使用此配置项
|
||||
ftpClient.setControlEncoding("UTF-8");
|
||||
ftpClient.setFileTransferMode(FTPClient.STREAM_TRANSFER_MODE);
|
||||
pathName=StringPool.SLASH + spectrumPathProperties.getRootPath() + StringPool.SLASH + pathName;
|
||||
ftpClient.changeWorkingDirectory(pathName);
|
||||
List<FTPFile> ftpFiles = Arrays.asList(ftpClient.listFiles());
|
||||
ftpFiles=ftpFiles.stream().filter(item -> item.getName().equals(fileName)).collect(Collectors.toList());
|
||||
if (CollectionUtils.isEmpty(ftpFiles)){
|
||||
throw new RuntimeException("ftp下对应的报告文件不存在");
|
||||
}
|
||||
FTPFile ftpFile = ftpFiles.get(0);
|
||||
if (Objects.nonNull(ftpFile)){
|
||||
inputStream = ftpClient.retrieveFileStream(ftpFile.getName());
|
||||
outputStream = response.getOutputStream();
|
||||
byte[] buffer = new byte[1024];
|
||||
int bytesRead;
|
||||
// 将文件输出流写入到输出流中
|
||||
while ((bytesRead = inputStream.read(buffer)) != -1) {
|
||||
outputStream.write(buffer, 0, bytesRead);
|
||||
}
|
||||
}
|
||||
} catch (IOException e) {
|
||||
throw new RuntimeException(e);
|
||||
} finally {
|
||||
try {
|
||||
if (Objects.nonNull(ftpClient)){
|
||||
ftpClient.disconnect();
|
||||
}
|
||||
if (ObjectUtil.isNotNull(inputStream)){
|
||||
inputStream.close();
|
||||
}
|
||||
if (ObjectUtil.isNotNull(outputStream)){
|
||||
outputStream.close();
|
||||
}
|
||||
} catch (IOException e) {
|
||||
throw new RuntimeException(e);
|
||||
}
|
||||
}
|
||||
result.setSuccess(true);
|
||||
return result;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Result viewRRR(Integer sampleId) {
|
||||
Result result = new Result();
|
||||
Cache<String, PHDFile> phdCache = localCache.getPHDCache();
|
||||
PHDFile phd = phdCache.getIfPresent(sampleId.toString());
|
||||
if (Objects.isNull(phd)){
|
||||
result.error500("请先选择解析文件!");
|
||||
return result;
|
||||
}
|
||||
String reportContent = gammaFileUtil.GetReportContent(phd, false);
|
||||
result.setSuccess(true);
|
||||
result.setResult(reportContent);
|
||||
return result;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Result radionuclideActivity(Integer sampleId) {
|
||||
Result result = new Result();
|
||||
Cache<String, PHDFile> phdCache = localCache.getPHDCache();
|
||||
PHDFile phd = phdCache.getIfPresent(sampleId.toString());
|
||||
if (Objects.isNull(phd)){
|
||||
result.error500("请先选择解析文件!");
|
||||
return result;
|
||||
}
|
||||
Map<String, Object> map = gammaFileUtil.UpdateDatasNuclideActivity(phd.getMapNucActMda(), phd.getUsedSetting().getRefTime_act(), phd.getUsedSetting().getRefTime_conc());
|
||||
result.setSuccess(true);
|
||||
result.setResult(map);
|
||||
return result;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Result Spectrum(Integer sampleId) {
|
||||
Result result = new Result();
|
||||
Cache<String, PHDFile> phdCache = localCache.getPHDCache();
|
||||
PHDFile phd = phdCache.getIfPresent(sampleId.toString());
|
||||
if (Objects.isNull(phd)){
|
||||
result.error500("请先选择解析文件!");
|
||||
return result;
|
||||
}
|
||||
String spectrum = gammaFileUtil.MakeUpSpectrum(phd);
|
||||
result.setSuccess(true);
|
||||
result.setResult(spectrum);
|
||||
return result;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Result sampleInformation(Integer sampleId) {
|
||||
Result result = new Result();
|
||||
Cache<String, PHDFile> phdCache = localCache.getPHDCache();
|
||||
PHDFile phd = phdCache.getIfPresent(sampleId.toString());
|
||||
if (Objects.isNull(phd)){
|
||||
result.error500("请先选择解析文件!");
|
||||
return result;
|
||||
}
|
||||
Map<String, String> map = new HashMap<>();
|
||||
try {
|
||||
String collect_start = phd.getCollect().getCollection_start_date() + " " + phd.getCollect().getCollection_start_time();
|
||||
String collect_stop = phd.getCollect().getCollection_stop_date() + " " + phd.getCollect().getCollection_stop_time();
|
||||
String acq_start = phd.getAcq().getAcquisition_start_date() + " " + phd.getAcq().getAcquisition_start_time();
|
||||
Date collect_stop_dt = DateUtils.parseDate(collect_stop.substring(0, collect_stop.indexOf(StringPool.DOT)), "yyyy/MM/dd HH:mm:ss");
|
||||
Date collect_start_dt = DateUtils.parseDate(collect_start.substring(0, collect_start.indexOf(StringPool.DOT)), "yyyy/MM/dd HH:mm:ss");
|
||||
double Sampling_Time = (collect_start_dt.getTime()/1000 - collect_stop_dt.getTime()/1000) / 3600.0;
|
||||
|
||||
Date acq_start_dt = DateUtils.parseDate(acq_start.substring(0, acq_start.indexOf(StringPool.DOT)), "yyyy/MM/dd HH:mm:ss");
|
||||
double Decay_Time = (collect_stop_dt.getTime()/1000 - acq_start_dt.getTime()/1000) / 3600.0;
|
||||
|
||||
String acq_stop = DateUtils.formatDate(new Date((long) (acq_start_dt.getTime()/1000 + phd.getAcq().getAcquisition_live_time())), "yyyy/MM/dd HH:mm:ss");
|
||||
|
||||
map.put("stationId", phd.getHeader().getSite_code());
|
||||
map.put("sampleId", phd.getId_sample());
|
||||
map.put("sampleQuantity", String.format("%.2f", phd.getCollect().getAir_volume())+" m3");
|
||||
map.put("collectStart", collect_start);
|
||||
map.put("collectStop", collect_stop);
|
||||
map.put("acquisitionStart", acq_start);
|
||||
map.put("acquisitionStop", acq_stop);
|
||||
|
||||
map.put("detectorId", phd.getHeader().getDetector_code());
|
||||
map.put("sampleGeometry", phd.getHeader().getSample_geometry());
|
||||
map.put("sampleType", phd.getHeader().getSystem_type());
|
||||
map.put("samplingTime", String.format("%.2f", Sampling_Time)+" hours");
|
||||
map.put("decayTime", String.format("%.2f", Decay_Time)+" hours");
|
||||
map.put("acquisitionTime", String.format("%.2f", phd.getAcq().getAcquisition_real_time() / 3600.0)+" hours");
|
||||
map.put("avgFlowRate", String.format("%.2f", phd.getCollect().getAir_volume() / Sampling_Time)+" m3/hour");
|
||||
} catch (ParseException e) {
|
||||
throw new RuntimeException(e);
|
||||
}
|
||||
result.setSuccess(true);
|
||||
result.setResult(map);
|
||||
return result;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Result viewQCResult(Integer sampleId) {
|
||||
Result result = new Result();
|
||||
Cache<String, PHDFile> phdCache = localCache.getPHDCache();
|
||||
PHDFile phd = phdCache.getIfPresent(sampleId.toString());
|
||||
if (Objects.isNull(phd)){
|
||||
result.error500("请先选择解析文件!");
|
||||
return result;
|
||||
}
|
||||
Map<String, QcCheckItem> m_mapQC = phd.getQcItems();
|
||||
int size_map = m_mapQC.size();
|
||||
if(size_map < 1){
|
||||
return result;
|
||||
}
|
||||
List<TableQCResult> qcResultList = new LinkedList<>();
|
||||
for(Map.Entry<String, QcCheckItem> iter: m_mapQC.entrySet()){
|
||||
TableQCResult item = new TableQCResult();
|
||||
String name = iter.getKey();
|
||||
if(name.equals("Ba140-MDC")){
|
||||
name += " (uBq/m3)";
|
||||
} else if(name.equals("Be7-FWHM")){
|
||||
name += " (keV)";
|
||||
} else if(name.equals("Xe133-MDC")){
|
||||
name += " (uBq/m3)";
|
||||
} else if(name.equals("acq_time")){
|
||||
name += " (h)";
|
||||
} else if(name.equals("airFlow")){
|
||||
name += " (m3/h)";
|
||||
} else if(name.equals("col_time")){
|
||||
name += " (h)";
|
||||
} else if(name.equals("decay_time")){
|
||||
name += " (h)";
|
||||
} else if(name.equals("samp_vol")){
|
||||
name += " (m3)";
|
||||
}
|
||||
item.setName(name);
|
||||
|
||||
item.setFlag(iter.getValue().isBPass() ? "PASS" : "FAIL");
|
||||
|
||||
item.setValue(iter.getValue().getValue());
|
||||
|
||||
String standard="";
|
||||
List<String> strList = Arrays.asList(iter.getValue().getStandard().split(StringPool.COMMA));
|
||||
for (String str : strList) {
|
||||
if(str.contains("-")) {
|
||||
if(str.contains("(") || str.contains("[")){
|
||||
standard += iter.getKey();
|
||||
}
|
||||
} else if(str.contains("(")) {
|
||||
standard += str.replace("(", "") + " < " + iter.getKey();
|
||||
}
|
||||
else if(str.contains(")"))
|
||||
{
|
||||
standard += " < " + str.replace(")", "");
|
||||
}
|
||||
else if(str.contains("["))
|
||||
{
|
||||
standard += str.replace("[", "") + " <= " + iter.getKey();
|
||||
}
|
||||
else if(str.contains("]"))
|
||||
{
|
||||
standard += " <= " + str.replace("]", "");
|
||||
}
|
||||
}
|
||||
item.setStandard(standard);
|
||||
qcResultList.add(item);
|
||||
}
|
||||
result.setSuccess(true);
|
||||
result.setResult(qcResultList);
|
||||
return result;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Result viewRLR(Integer sampleId) {
|
||||
Result result = new Result();
|
||||
Cache<String, PHDFile> phdCache = localCache.getPHDCache();
|
||||
PHDFile phd = phdCache.getIfPresent(sampleId.toString());
|
||||
if (Objects.isNull(phd)){
|
||||
result.error500("请先选择解析文件!");
|
||||
return result;
|
||||
}
|
||||
Map<String, Object> map = new HashMap<>();
|
||||
Date curTime = new Date();
|
||||
String responsible_person = "wang shilian";
|
||||
String software = "GammaAnalyser,GammaAnalyALG";
|
||||
int num = phd.getVPeak().size();
|
||||
|
||||
// Init #Header
|
||||
if(StringUtils.isNotBlank(phd.getMsgInfo().getMsg_id())){
|
||||
// 1: MSG_ID
|
||||
map.put("header_msg_id", phd.getMsgInfo().getMsg_id());
|
||||
}
|
||||
map.put("header_data_type", "MINIRLR"); // 2: DataType
|
||||
map.put("header_priority_level", "Routine"); // 3: Priority Level
|
||||
if(StringUtils.isNotBlank(phd.getHeader().getSite_code())){
|
||||
// 4: Station code
|
||||
map.put("header_station_code", phd.getHeader().getSite_code());
|
||||
}
|
||||
if(StringUtils.isNotBlank(phd.getHeader().getSample_ref_id())){
|
||||
// 5: SRID
|
||||
map.put("header_srid", phd.getHeader().getSample_ref_id());
|
||||
}
|
||||
map.put("header_lab_code", "CNL06"); // 6: Laboratory Code
|
||||
map.put("header_lab_detector", "CNL06_001"); // 7: Laboratory Detector Code
|
||||
map.put("header_report_type", "FIN"); // 8: Report Type
|
||||
map.put("header_report_number", "1"); // 9: Report Number
|
||||
map.put("header_sample_category", "Category C"); // 10:Sample Category
|
||||
map.put("header_transmission", curTime); // 11:Message Transmission Time
|
||||
// Init #Objective
|
||||
map.put("Obj_purpose", "The purpose of this analysis is proficiency test.");
|
||||
map.put("Obj_authorized", "High-resolution non-destructive gamma spectroscopy.");
|
||||
map.put("Obj_instruction", "`0");
|
||||
// Init #Collection
|
||||
String col_start = phd.getCollect().getCollection_start_date() + " " + phd.getCollect().getCollection_start_time();
|
||||
String col_stop = phd.getCollect().getCollection_stop_date() + " " + phd.getCollect().getCollection_stop_time();
|
||||
map.put("collect_start", col_start);
|
||||
map.put("collect_stop", col_stop);
|
||||
map.put("collect_airVolume", phd.getCollect().getAir_volume());
|
||||
// Init #SampleReceipt
|
||||
if(StringUtils.isNotBlank(phd.getHeader().getSample_ref_id())){
|
||||
map.put("Receipt_srid", phd.getHeader().getSample_ref_id());
|
||||
}
|
||||
map.put("Receipt_sealNum", "0");
|
||||
map.put("Receipt_sample", curTime);
|
||||
map.put("Receipt_package", "Parcel.");
|
||||
map.put("Receipt_seal", "Good.");
|
||||
map.put("Receipt_sample", "Good.");
|
||||
// Init #Test
|
||||
map.put("Test_type", "Analysis of a reference sample.");
|
||||
map.put("Test_completion", curTime);
|
||||
map.put("Test_person", responsible_person);
|
||||
map.put("Test_purpose", "The purpose of this analysis is proficiency test exercise.");
|
||||
// Init #PeaksMethod
|
||||
map.put("PeakMethod_software", software);
|
||||
map.put("PeakMethod_location", "0");
|
||||
// Init #PeakFit
|
||||
List<TablePeakFit> peakFitList = new LinkedList<>();
|
||||
String energy_uncert = "-9999";
|
||||
double live_time = phd.getAcq().getAcquisition_live_time();
|
||||
for(int i=0; i<num; ++i) {
|
||||
TablePeakFit tablePeak = new TablePeakFit();
|
||||
PeakInfo peak = phd.getVPeak().get(i);
|
||||
tablePeak.setEnergy(String.valueOf(peak.energy));
|
||||
tablePeak.setEnergyErr(energy_uncert);
|
||||
tablePeak.setNetArea(String.valueOf(peak.area));
|
||||
String area_err = peak.area > 0 ? String.valueOf(peak.areaErr/peak.area*100) : "0";
|
||||
tablePeak.setAreaErr(area_err);
|
||||
String rate = live_time > 0 ? String.valueOf(peak.area/live_time) : "0";
|
||||
tablePeak.setNetCountRate(rate);
|
||||
tablePeak.setNcRateErr(area_err);
|
||||
tablePeak.setLc(String.valueOf(peak.lc));
|
||||
tablePeak.setSignificance(String.valueOf(peak.significance));
|
||||
peakFitList.add(tablePeak);
|
||||
}
|
||||
map.put("peakFit", peakFitList);
|
||||
// Init #g_AnalysisMethods
|
||||
map.put("AnalyMethod_software", software);
|
||||
map.put("AnalyMethod_nuclide", "Interactive");
|
||||
map.put("AnalyMethod_baseline", "GammaAnalyALG");
|
||||
map.put("AnalyMethod_lc", "CTBT/PTS/INF.96/Rev.6");
|
||||
map.put("AnalyMethod_calib", "0");
|
||||
// Init #PeakAssociation
|
||||
String Explanation_Level = "100.000";
|
||||
List<TableAssociation> associationList = new LinkedList<>();
|
||||
for(int i=0; i<num; ++i) {
|
||||
TableAssociation tableAssociation = new TableAssociation();
|
||||
tableAssociation.setExLevel(Explanation_Level);
|
||||
List<String> nuclides = phd.getVPeak().get(i).nuclides;
|
||||
String iden = "";
|
||||
for (String str:nuclides) {
|
||||
iden+=str+",";
|
||||
}
|
||||
iden = iden.length()>0?iden.substring(0, iden.length()-1):"";
|
||||
tableAssociation.setIdentified(iden);
|
||||
associationList.add(tableAssociation);
|
||||
}
|
||||
map.put("Association", associationList);
|
||||
// Init #References
|
||||
if(StringUtils.isNotBlank(phd.getHeader().getMeasurement_id())) {
|
||||
map.put("Reference_samplePHD", phd.getHeader().getMeasurement_id());
|
||||
map.put("Reference_CalibPHD", phd.getHeader().getMeasurement_id());
|
||||
}
|
||||
map.put("Reference_physical", "Table of Isotopes CD ROM Edition, Version 1.0, March,1998");
|
||||
// Init #Results
|
||||
String coverage_factor = "2.00";
|
||||
String level_confidence = "95.00";
|
||||
|
||||
map.put("Result_act_ref", phd.getUsedSetting().getRefTime_act());
|
||||
map.put("Result_conc_ref", phd.getUsedSetting().getRefTime_conc());
|
||||
List<TableResult> tableResultList = new LinkedList<>();
|
||||
for(Map.Entry<String, NuclideActMda> it : phd.getMapNucActMda().entrySet()){
|
||||
if(it.getValue().isBCalculateMDA()) {
|
||||
TableResult tableResult = new TableResult();
|
||||
NuclideActMda nuc = it.getValue();
|
||||
tableResult.setNuclide(it.getKey());
|
||||
tableResult.setActivity(String.valueOf(nuc.getActivity()));
|
||||
String act_err = nuc.getActivity() > 0 ? String.valueOf(nuc.getAct_err()/nuc.getActivity()*100) : "0";
|
||||
tableResult.setActErr(act_err);
|
||||
tableResult.setFactor1(coverage_factor);
|
||||
tableResult.setConfidence1(level_confidence);
|
||||
tableResult.setConc(String.valueOf(nuc.getConcentration()/1000));
|
||||
tableResult.setConcErr(act_err);
|
||||
tableResult.setFactor2(coverage_factor);
|
||||
tableResult.setConfidence2(level_confidence);
|
||||
tableResultList.add(tableResult);
|
||||
}
|
||||
}
|
||||
map.put("Result", tableResultList);
|
||||
// Init #NuclideRatios
|
||||
// Init #g_CoincidenceCorrection
|
||||
// Init #MDA
|
||||
// Init #Conclusions
|
||||
map.put("conclusion_person", responsible_person);
|
||||
map.put("Conclusion_IDC", "0");
|
||||
List<String> listNuc = phd.getMapNucActMda().keySet().stream().collect(Collectors.toList());
|
||||
String nuc = "";
|
||||
for (String str:listNuc) {
|
||||
nuc+=str+",";
|
||||
}
|
||||
map.put("Conclusion_Lab", "The nuclides "+nuc.substring(0, nuc.length()-1)+" are identified in the reference sample definitely.");
|
||||
map.put("Conclusion_Res", "0");
|
||||
// Init #Comment
|
||||
map.put("Comment", "0");
|
||||
|
||||
result.setSuccess(true);
|
||||
result.setResult(map);
|
||||
return result;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Result viewAutomaticAnalysisLog(Integer sampleId, HttpServletResponse response) {
|
||||
Result result = new Result();
|
||||
if (Objects.isNull(sampleId)){
|
||||
result.error500("The file isn't existed.");
|
||||
return result;
|
||||
}
|
||||
String logPath = spectrumAnalysisMapper.findAutomaticLogPath(sampleId);
|
||||
if (StringUtils.isBlank(logPath)){
|
||||
result.error500("The file isn't existed.");
|
||||
return result;
|
||||
}
|
||||
String pathName = logPath.substring(0, logPath.lastIndexOf(StringPool.SLASH));
|
||||
String fileName = logPath.substring(logPath.lastIndexOf(StringPool.SLASH) + 1);
|
||||
//连接ftp
|
||||
FTPClient ftpClient = ftpUtil.LoginFTP();
|
||||
if (Objects.isNull(ftpClient)){
|
||||
throw new RuntimeException("ftp连接失败");
|
||||
}
|
||||
InputStream inputStream = null;
|
||||
ServletOutputStream outputStream = null;
|
||||
try {
|
||||
//切换被动模式
|
||||
ftpClient.enterLocalPassiveMode();
|
||||
ftpClient.setFileType(FTPClient.BINARY_FILE_TYPE);
|
||||
// 设置编码,当文件中存在中文且上传后文件乱码时可使用此配置项
|
||||
ftpClient.setControlEncoding("UTF-8");
|
||||
ftpClient.setFileTransferMode(FTPClient.STREAM_TRANSFER_MODE);
|
||||
pathName=StringPool.SLASH + spectrumPathProperties.getLogPath() + StringPool.SLASH + pathName;
|
||||
ftpClient.changeWorkingDirectory(pathName);
|
||||
List<FTPFile> ftpFiles = Arrays.asList(ftpClient.listFiles());
|
||||
ftpFiles=ftpFiles.stream().filter(item -> item.getName().equals(fileName)).collect(Collectors.toList());
|
||||
if (CollectionUtils.isEmpty(ftpFiles)){
|
||||
throw new RuntimeException("ftp下对应的报告文件不存在");
|
||||
}
|
||||
FTPFile ftpFile = ftpFiles.get(0);
|
||||
if (Objects.nonNull(ftpFile)){
|
||||
inputStream = ftpClient.retrieveFileStream(ftpFile.getName());
|
||||
outputStream = response.getOutputStream();
|
||||
byte[] buffer = new byte[1024];
|
||||
int bytesRead;
|
||||
// 将文件输出流写入到输出流中
|
||||
while ((bytesRead = inputStream.read(buffer)) != -1) {
|
||||
outputStream.write(buffer, 0, bytesRead);
|
||||
}
|
||||
}
|
||||
} catch (IOException e) {
|
||||
throw new RuntimeException(e);
|
||||
} finally {
|
||||
try {
|
||||
if (Objects.nonNull(ftpClient)){
|
||||
ftpClient.disconnect();
|
||||
}
|
||||
if (ObjectUtil.isNotNull(inputStream)){
|
||||
inputStream.close();
|
||||
}
|
||||
if (ObjectUtil.isNotNull(outputStream)){
|
||||
outputStream.close();
|
||||
}
|
||||
} catch (IOException e) {
|
||||
throw new RuntimeException(e);
|
||||
}
|
||||
}
|
||||
result.setSuccess(true);
|
||||
return result;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Result viewGammaviewerLog(Integer sampleId) {
|
||||
Result result = new Result();
|
||||
Cache<String, PHDFile> phdCache = localCache.getPHDCache();
|
||||
PHDFile phd = phdCache.getIfPresent(sampleId.toString());
|
||||
if (Objects.isNull(phd)){
|
||||
result.error500("请先选择解析文件!");
|
||||
return result;
|
||||
}
|
||||
String reportContent = gammaFileUtil.GetReportContent(phd, true);
|
||||
result.setSuccess(true);
|
||||
result.setResult(reportContent);
|
||||
return result;
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -0,0 +1,30 @@
|
|||
package org.jeecg.modules.service.impl;
|
||||
|
||||
import com.baomidou.dynamic.datasource.annotation.DS;
|
||||
import com.baomidou.mybatisplus.core.toolkit.CollectionUtils;
|
||||
import com.baomidou.mybatisplus.extension.service.impl.ServiceImpl;
|
||||
import org.jeecg.modules.base.entity.postgre.SysDefaultNuclide;
|
||||
import org.jeecg.modules.mapper.SysDefaultNuclideSpectrumMapper;
|
||||
import org.jeecg.modules.service.ISysDefaultNuclideSpectrumService;
|
||||
import org.springframework.stereotype.Service;
|
||||
import org.springframework.transaction.annotation.Propagation;
|
||||
import org.springframework.transaction.annotation.Transactional;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
@Service("defaultNuclideSpectrumService")
|
||||
@DS("master")
|
||||
public class SysDefaultNuclideSpectrumServiceImpl extends ServiceImpl<SysDefaultNuclideSpectrumMapper, SysDefaultNuclide> implements ISysDefaultNuclideSpectrumService {
|
||||
|
||||
@Override
|
||||
@Transactional(propagation = Propagation.REQUIRES_NEW)
|
||||
public List<String> findNuclidesByUserName(String userName) {
|
||||
//查询当前用户的核素信息
|
||||
List<String> nuclides = this.baseMapper.findNuclidesByUserName(userName);
|
||||
if (CollectionUtils.isEmpty(nuclides)){
|
||||
nuclides = this.baseMapper.findNuclidesByUserName("admin");
|
||||
}
|
||||
return nuclides;
|
||||
}
|
||||
|
||||
}
|
Loading…
Reference in New Issue
Block a user