自动处理,人工交互gamma部分加载phd谱文件内容增加系统来源字段判断是自动处理功能,还是人工交互功能区别加载文件的方式

自动处理gamma部分不再删除文件
自动处理gamma部分调用分析对象前传入参数增加父级对象整体实体类内容
This commit is contained in:
qiaoqinzheng 2024-01-03 14:31:58 +08:00
parent a5412c684e
commit 792c6b411b
7 changed files with 168 additions and 166 deletions

View File

@ -57,6 +57,8 @@ public class Sample_G_Analysis {
private final Map<String,String> fieldMap = fieldMap();
private AbstractS_D_Q_G_SpectrumHandler spectrumHandler;
// Sample谱原始数据
private GardsSampleData sampleData;
@ -112,8 +114,9 @@ public class Sample_G_Analysis {
*/
private String arrFileName;
public Sample_G_Analysis(EnergySpectrumStruct energySpectrumStruct,SpectrumServiceQuotes serviceQuotes,
public Sample_G_Analysis(AbstractS_D_Q_G_SpectrumHandler spectrumHandler, EnergySpectrumStruct energySpectrumStruct,SpectrumServiceQuotes serviceQuotes,
GardsSampleData sampleData) {
this.spectrumHandler = spectrumHandler;
this.sampleData = sampleData;
this.serviceQuotes = serviceQuotes;
this.energySpectrumStruct = energySpectrumStruct;
@ -140,16 +143,16 @@ public class Sample_G_Analysis {
// 解析PHD文件
spectrumPathProperties = SpringContextUtils.getBean(SpectrumPathProperties.class);
ftpUtil = SpringContextUtils.getBean(FTPUtil.class);
String sampleFilePath = sampleData.getInputFileName();
String pathName = ftpUtil.getFtpRootPath() + spectrumPathProperties.getSaveFilePath() + StringPool.SLASH +
sampleFilePath.substring(0, sampleFilePath.lastIndexOf(StringPool.SLASH));
String fileName = sampleFilePath.substring(sampleFilePath.lastIndexOf(StringPool.SLASH)+1);
//获取PHD文件的路径以及文件名称信息
String sampleFilePathName = spectrumHandler.spectrumFile.getAbsolutePath().replace("\\", StringPool.SLASH);
String pathName = sampleFilePathName.substring(0, sampleFilePathName.lastIndexOf(StringPool.SLASH));
String fileName = sampleFilePathName.substring(sampleFilePathName.lastIndexOf(StringPool.SLASH)+1);
// 获取数据库 Gamma 默认参数
getSettingFromDB(phdFile);
// 解析能谱文件
boolean flag = gammaFileUtil.loadFile(pathName, fileName, phdFile, new Result());
boolean flag = gammaFileUtil.loadFile(pathName, fileName, phdFile, "AUTO", new Result());
// 文件路径
middleData.setAnalyses_save_filePath(this.sampleInputFilename);
@ -192,14 +195,6 @@ public class Sample_G_Analysis {
e.printStackTrace();
log.error("Sample_G_Analysis", e);
throw new GAnalyseException("Sample Analyse Error at "+DateUtils.formatDate(new Date(),"yyyy-MM-dd HH:mm:ss"));
} finally {
//删除临时文件
if (StringUtils.isNotBlank(phdFile.getTmpFilePath())) {
File tmpFile = new File(phdFile.getTmpFilePath());
if (Objects.nonNull(tmpFile)) {
tmpFile.delete();
}
}
}
log.info("Gamma自动处理分析--End");
}

View File

@ -82,7 +82,7 @@ public class SamplephdSpectrum extends AbstractS_D_Q_G_SpectrumHandler {
bAnalysis.analysis();
}
if (this.sourceData.system_type.equals(SystemType.PARTICULATE.getType()) || this.sourceData.system_type.equals(SystemType.GAMMA.getType())) {
Sample_G_Analysis sample_g_analysis = new Sample_G_Analysis(super.sourceData, super.spectrumServiceQuotes, super.sampleData);
Sample_G_Analysis sample_g_analysis = new Sample_G_Analysis(this, super.sourceData, super.spectrumServiceQuotes, super.sampleData);
sample_g_analysis.analysis();
}
}

View File

@ -60,15 +60,21 @@ public class GammaFileUtil extends AbstractLogOrReport {
@Autowired
private AnalysisProcess analysisProcess;
public boolean loadFile(String pathName, String fileName, PHDFile phd, Result result) {
public boolean loadFile(String pathName, String fileName, PHDFile phd, String sysSource, Result result) {
phd.setFilepath(pathName);
phd.setFilename(fileName);
String fromPath = pathName + StringPool.SLASH + fileName;
File file = ftpUtil.downloadFile(fromPath, "betaGamma");
//如果功能是人工交互模块则从ftp获取文件内容
File file = null;
if (sysSource.equals("BetaGamma")) {
file = ftpUtil.downloadFile(fromPath, "betaGamma");
if (Objects.isNull(file)) {
result.error500("ftp file can't find");
return false;
}
} else if (sysSource.equals("AUTO")) {//如果是自动处理则从本地文件中获取文件内容
file = new File(pathName + StringPool.SLASH + fileName);
}
phd.setTmpFilePath(file.getAbsolutePath());
try {
//读取文件信息

View File

@ -538,16 +538,16 @@ public class GammaController {
gammaService.saveToPHD(fileName, request, response);
}
// @GetMapping("peakComparison")
// @ApiOperation(value = "Peak Information页面查看Comparison数据", notes = "Peak Information页面查看Comparison数据")
// public Result peakComparison(String fileName, HttpServletRequest request) {
// return gammaService.peakComparison(fileName, request);
// }
//
// @GetMapping("nuclComparison")
// @ApiOperation(value = "Radionuclide Activity页面查看Comparison数据", notes = "Radionuclide Activity页面查看Comparison数据")
// public Result nuclComparison(String fileName, HttpServletRequest request) {
// return gammaService.nuclComparison(fileName, request);
// }
@GetMapping("peakComparison")
@ApiOperation(value = "Peak Information页面查看Comparison数据", notes = "Peak Information页面查看Comparison数据")
public Result peakComparison(String fileName, HttpServletRequest request) {
return gammaService.peakComparison(fileName, request);
}
@GetMapping("nuclComparison")
@ApiOperation(value = "Radionuclide Activity页面查看Comparison数据", notes = "Radionuclide Activity页面查看Comparison数据")
public Result nuclComparison(String fileName, HttpServletRequest request) {
return gammaService.nuclComparison(fileName, request);
}
}

View File

@ -203,8 +203,8 @@ public interface IGammaService{
void readMDCParameter();
// Result peakComparison(String fileName, HttpServletRequest request);
//
// Result nuclComparison(String fileName, HttpServletRequest request);
Result peakComparison(String fileName, HttpServletRequest request);
Result nuclComparison(String fileName, HttpServletRequest request);
}

View File

@ -149,8 +149,8 @@ public class GammaServiceImpl extends AbstractLogOrReport implements IGammaServi
private RedisStreamUtil redisStreamUtil;
@Autowired
private ISysDictSpectrumService sysDictService;
// @Autowired
// private IDataService dataService;
@Autowired
private IDataService dataService;
@Override
public Result initValue(Integer sampleId, String dbName, String analyst, String samfileName, HttpServletRequest request) {
@ -172,7 +172,7 @@ public class GammaServiceImpl extends AbstractLogOrReport implements IGammaServi
String fileName = sampleFilePath.substring(sampleFilePath.lastIndexOf(StringPool.SLASH) + 1);
// 读取文件内容
// 调用加载文件的方法 传入文件路径文件名称全局变量phd响应结果result
boolean flag = gammaFileUtil.loadFile(pathName, fileName, phd, result);
boolean flag = gammaFileUtil.loadFile(pathName, fileName, phd, "BetaGamma", result);
// 如果文件加载失败 返回失败原因
if (!flag) {
return result;
@ -213,7 +213,7 @@ public class GammaServiceImpl extends AbstractLogOrReport implements IGammaServi
String pathName = ftpUtil.getFtpRootPath() + spectrumPathProperties.getUploadPath() + StringPool.SLASH + userName;
String fileName = samfileName;
// 加载文件内容
boolean bRet = gammaFileUtil.loadFile(pathName, fileName, phd, result);
boolean bRet = gammaFileUtil.loadFile(pathName, fileName, phd, "BetaGamma", result);
if (!bRet) {
return result;
}
@ -482,7 +482,7 @@ public class GammaServiceImpl extends AbstractLogOrReport implements IGammaServi
phd = new PHDFile();
// 读取文件内容
// 调用加载文件的方法 传入文件路径文件名称全局变量phd响应结果result
boolean flag = gammaFileUtil.loadFile(pathName, fileName, phd, result);
boolean flag = gammaFileUtil.loadFile(pathName, fileName, phd, "BetaGamma", result);
// 如果文件加载失败 返回失败原因
if (!flag) {
return result;
@ -873,7 +873,7 @@ public class GammaServiceImpl extends AbstractLogOrReport implements IGammaServi
if (Objects.isNull(phd)) {
phd = new PHDFile();
// 加载文件内容
boolean bRet = gammaFileUtil.loadFile(path, fileName, phd, result);
boolean bRet = gammaFileUtil.loadFile(path, fileName, phd, "BetaGamma", result);
if (!bRet) {
return result;
}
@ -5914,128 +5914,128 @@ public class GammaServiceImpl extends AbstractLogOrReport implements IGammaServi
}
}
// @Override
// public Result peakComparison(String fileName, HttpServletRequest request) {
// Result result = new Result();
// Connection conn = null;
// List<PeakInfo> peakInfoList = new LinkedList<>();
// //获取用户名
// String userName = JwtUtil.getUserNameByToken(request);
// //读取缓存内容
// Cache<String, PHDFile> phdCache = localCache.getPHDCache();
// PHDFile phd = phdCache.getIfPresent(fileName + StringPool.DASH + userName);
// if (Objects.isNull(phd)) {
// result.error500("Please select the parse file first");
// return result;
// }
// Map<String, Integer> idcStationMap = (Map<String, Integer>) redisUtil.get("idcStationMap");
// try {
// String collectStart = "";
// if (phd.getCollect().getCollection_start_time().indexOf(StringPool.DOT) > 0) {
// collectStart = phd.getCollect().getCollection_start_date() + " " + phd.getCollect().getCollection_start_time().substring(0, phd.getCollect().getCollection_start_time().indexOf(StringPool.DOT));
// } else {
// collectStart = phd.getCollect().getCollection_start_date() + " " + phd.getCollect().getCollection_start_time();
// }
// //获取采集开始时间
// Date acqStart = DateUtils.parseDate(phd.getAcq().getAcquisition_start_date() + " " + phd.getAcq().getAcquisition_start_time());
// //计算得到采集结束时间对应的毫秒数
// long stopTime = (long) (acqStart.getTime() + (phd.getAcq().getAcquisition_real_time() * 1000));
// //根据毫秒数得到采集结束时间
// Date acquisitionStopDate = new Date(stopTime);
// //格式化得到采集结束时间的字符串
// String acquisitionStop = DateUtils.formatDate(acquisitionStopDate, "yyyy-MM-dd HH:mm:ss");
// //从缓存的idc台站信息中获取当前台站对应的台站id
// Integer stationId = null;
// if (CollectionUtils.isNotEmpty(idcStationMap)) {
// stationId = idcStationMap.get(phd.getHeader().getSite_code());
// }
// //连接本地同步的idc数据库
// conn = dataService.connectInland();
// //连接对象为空 则连接idc国际库
// if (Objects.isNull(conn)) {
// conn = dataService.connectOverSea();
// }
// //判断是否连接成功
// if (Objects.nonNull(conn)) {
// //查询获取Peaks表的对比数据内容
// peakInfoList = dataService.viewPeaks(phd.getHeader().getDetector_code(), phd.getHeader().getSpectrum_quantity(), stationId, collectStart, acquisitionStop, conn);
// }
// result.setSuccess(true);
// result.setResult(peakInfoList);
// } catch (ParseException e) {
// throw new RuntimeException(e);
// } finally {
// try {
// if (Objects.nonNull(conn)) {
// conn.close();
// }
// } catch (SQLException e) {
// throw new RuntimeException(e);
// }
// }
// return result;
// }
//
// @Override
// public Result nuclComparison(String fileName, HttpServletRequest request) {
// Result result = new Result();
// Connection conn = null;
// List<TableNuclideActivity> nuclideActivityList = new LinkedList<>();
// //获取用户名
// String userName = JwtUtil.getUserNameByToken(request);
// //读取缓存内容
// Cache<String, PHDFile> phdCache = localCache.getPHDCache();
// PHDFile phd = phdCache.getIfPresent(fileName + StringPool.DASH + userName);
// if (Objects.isNull(phd)) {
// result.error500("Please select the parse file first");
// return result;
// }
// Map<String, Integer> idcStationMap = (Map<String, Integer>) redisUtil.get("idcStationMap");
// try {
// String collectStart = "";
// if (phd.getCollect().getCollection_start_time().indexOf(StringPool.DOT) > 0) {
// collectStart = phd.getCollect().getCollection_start_date() + " " + phd.getCollect().getCollection_start_time().substring(0, phd.getCollect().getCollection_start_time().indexOf(StringPool.DOT));
// } else {
// collectStart = phd.getCollect().getCollection_start_date() + " " + phd.getCollect().getCollection_start_time();
// }
// //获取采集开始时间
// Date acqStart = DateUtils.parseDate(phd.getAcq().getAcquisition_start_date() + " " + phd.getAcq().getAcquisition_start_time());
// //计算得到采集结束时间对应的毫秒数
// long stopTime = (long) (acqStart.getTime() + (phd.getAcq().getAcquisition_real_time() * 1000));
// //根据毫秒数得到采集结束时间
// Date acquisitionStopDate = new Date(stopTime);
// //格式化得到采集结束时间的字符串
// String acquisitionStop = DateUtils.formatDate(acquisitionStopDate, "yyyy-MM-dd HH:mm:ss");
// //从缓存的idc台站信息中获取当前台站对应的台站id
// Integer stationId = null;
// if (CollectionUtils.isNotEmpty(idcStationMap)) {
// stationId = idcStationMap.get(phd.getHeader().getSite_code());
// }
// //连接本地同步的idc数据库
// conn = dataService.connectInland();
// //连接对象为空 则连接idc国际库
// if (Objects.isNull(conn)) {
// conn = dataService.connectOverSea();
// }
// //判断是否连接成功
// if (Objects.nonNull(conn)) {
// //查询获取Peaks表的对比数据内容
// nuclideActivityList = dataService.viewNucl(phd.getHeader().getDetector_code(), phd.getHeader().getSpectrum_quantity(), stationId, collectStart, acquisitionStop, conn);
// }
// result.setSuccess(true);
// result.setResult(nuclideActivityList);
// } catch (ParseException e) {
// throw new RuntimeException(e);
// } finally {
// try {
// if (Objects.nonNull(conn)) {
// conn.close();
// }
// } catch (SQLException e) {
// throw new RuntimeException(e);
// }
// }
// return result;
// }
@Override
public Result peakComparison(String fileName, HttpServletRequest request) {
Result result = new Result();
Connection conn = null;
List<PeakInfo> peakInfoList = new LinkedList<>();
//获取用户名
String userName = JwtUtil.getUserNameByToken(request);
//读取缓存内容
Cache<String, PHDFile> phdCache = localCache.getPHDCache();
PHDFile phd = phdCache.getIfPresent(fileName + StringPool.DASH + userName);
if (Objects.isNull(phd)) {
result.error500("Please select the parse file first");
return result;
}
Map<String, Integer> idcStationMap = (Map<String, Integer>) redisUtil.get("idcStationMap");
try {
String collectStart = "";
if (phd.getCollect().getCollection_start_time().indexOf(StringPool.DOT) > 0) {
collectStart = phd.getCollect().getCollection_start_date() + " " + phd.getCollect().getCollection_start_time().substring(0, phd.getCollect().getCollection_start_time().indexOf(StringPool.DOT));
} else {
collectStart = phd.getCollect().getCollection_start_date() + " " + phd.getCollect().getCollection_start_time();
}
//获取采集开始时间
Date acqStart = DateUtils.parseDate(phd.getAcq().getAcquisition_start_date() + " " + phd.getAcq().getAcquisition_start_time());
//计算得到采集结束时间对应的毫秒数
long stopTime = (long) (acqStart.getTime() + (phd.getAcq().getAcquisition_real_time() * 1000));
//根据毫秒数得到采集结束时间
Date acquisitionStopDate = new Date(stopTime);
//格式化得到采集结束时间的字符串
String acquisitionStop = DateUtils.formatDate(acquisitionStopDate, "yyyy-MM-dd HH:mm:ss");
//从缓存的idc台站信息中获取当前台站对应的台站id
Integer stationId = null;
if (CollectionUtils.isNotEmpty(idcStationMap)) {
stationId = idcStationMap.get(phd.getHeader().getSite_code());
}
//连接本地同步的idc数据库
conn = dataService.connectInland();
//连接对象为空 则连接idc国际库
if (Objects.isNull(conn)) {
conn = dataService.connectOverSea();
}
//判断是否连接成功
if (Objects.nonNull(conn)) {
//查询获取Peaks表的对比数据内容
peakInfoList = dataService.viewPeaks(phd.getHeader().getDetector_code(), phd.getHeader().getSpectrum_quantity(), stationId, collectStart, acquisitionStop, conn);
}
result.setSuccess(true);
result.setResult(peakInfoList);
} catch (ParseException e) {
throw new RuntimeException(e);
} finally {
try {
if (Objects.nonNull(conn)) {
conn.close();
}
} catch (SQLException e) {
throw new RuntimeException(e);
}
}
return result;
}
@Override
public Result nuclComparison(String fileName, HttpServletRequest request) {
Result result = new Result();
Connection conn = null;
List<TableNuclideActivity> nuclideActivityList = new LinkedList<>();
//获取用户名
String userName = JwtUtil.getUserNameByToken(request);
//读取缓存内容
Cache<String, PHDFile> phdCache = localCache.getPHDCache();
PHDFile phd = phdCache.getIfPresent(fileName + StringPool.DASH + userName);
if (Objects.isNull(phd)) {
result.error500("Please select the parse file first");
return result;
}
Map<String, Integer> idcStationMap = (Map<String, Integer>) redisUtil.get("idcStationMap");
try {
String collectStart = "";
if (phd.getCollect().getCollection_start_time().indexOf(StringPool.DOT) > 0) {
collectStart = phd.getCollect().getCollection_start_date() + " " + phd.getCollect().getCollection_start_time().substring(0, phd.getCollect().getCollection_start_time().indexOf(StringPool.DOT));
} else {
collectStart = phd.getCollect().getCollection_start_date() + " " + phd.getCollect().getCollection_start_time();
}
//获取采集开始时间
Date acqStart = DateUtils.parseDate(phd.getAcq().getAcquisition_start_date() + " " + phd.getAcq().getAcquisition_start_time());
//计算得到采集结束时间对应的毫秒数
long stopTime = (long) (acqStart.getTime() + (phd.getAcq().getAcquisition_real_time() * 1000));
//根据毫秒数得到采集结束时间
Date acquisitionStopDate = new Date(stopTime);
//格式化得到采集结束时间的字符串
String acquisitionStop = DateUtils.formatDate(acquisitionStopDate, "yyyy-MM-dd HH:mm:ss");
//从缓存的idc台站信息中获取当前台站对应的台站id
Integer stationId = null;
if (CollectionUtils.isNotEmpty(idcStationMap)) {
stationId = idcStationMap.get(phd.getHeader().getSite_code());
}
//连接本地同步的idc数据库
conn = dataService.connectInland();
//连接对象为空 则连接idc国际库
if (Objects.isNull(conn)) {
conn = dataService.connectOverSea();
}
//判断是否连接成功
if (Objects.nonNull(conn)) {
//查询获取Peaks表的对比数据内容
nuclideActivityList = dataService.viewNucl(phd.getHeader().getDetector_code(), phd.getHeader().getSpectrum_quantity(), stationId, collectStart, acquisitionStop, conn);
}
result.setSuccess(true);
result.setResult(nuclideActivityList);
} catch (ParseException e) {
throw new RuntimeException(e);
} finally {
try {
if (Objects.nonNull(conn)) {
conn.close();
}
} catch (SQLException e) {
throw new RuntimeException(e);
}
}
return result;
}
}

View File

@ -4,6 +4,7 @@ import lombok.extern.slf4j.Slf4j;
import org.jeecg.common.cache.BetaCache;
import org.jeecg.common.cache.LocalCache;
import org.jeecg.common.util.oConvertUtils;
import org.jeecg.modules.service.IDataService;
import org.jeecg.modules.service.IGammaService;
import org.jeecg.modules.service.IGardsNuclCoincidenceSumSpectrumService;
import org.springframework.beans.factory.annotation.Autowired;
@ -33,8 +34,8 @@ public class JeecgSpectrumAnalysisApplication extends SpringBootServletInitializ
private IGammaService gammaService;
@Autowired
private IGardsNuclCoincidenceSumSpectrumService nuclCoincidenceSumSpectrumService;
// @Autowired
// private IDataService dataService;
@Autowired
private IDataService dataService;
@Override
@ -66,6 +67,6 @@ public class JeecgSpectrumAnalysisApplication extends SpringBootServletInitializ
localCache.initCache();
gammaService.readMDCParameter();
nuclCoincidenceSumSpectrumService.getNuclCoincidenceMap();
//dataService.viewStations();
dataService.viewStations();
}
}