Merge remote-tracking branch 'origin/station' into station

This commit is contained in:
xiongzheng 2023-11-06 08:58:50 +08:00
commit eb99b95bd7
31 changed files with 2191 additions and 1403 deletions

View File

@ -1,7 +1,11 @@
package org.jeecg.common.util;
import cn.hutool.core.collection.CollUtil;
import cn.hutool.core.util.ObjectUtil;
import cn.hutool.core.util.StrUtil;
import com.baomidou.mybatisplus.core.toolkit.CollectionUtils;
import lombok.extern.slf4j.Slf4j;
import org.apache.commons.io.FileUtils;
import org.apache.commons.lang3.StringUtils;
import org.apache.commons.net.ftp.FTP;
import org.apache.commons.net.ftp.FTPClient;
@ -13,10 +17,12 @@ import org.springframework.stereotype.Component;
import javax.servlet.ServletOutputStream;
import javax.servlet.http.HttpServletResponse;
import java.io.File;
import java.io.IOException;
import java.io.InputStream;
import java.net.URLEncoder;
import java.nio.charset.StandardCharsets;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import java.util.Objects;
@ -289,6 +295,34 @@ public class FTPUtil {
}
}
/*
* 批量删除FTP文件 返回删除失败的文件路径
* */
public List<String> removeFiles(List<String> paths){
List<String> failList = new ArrayList<>();
if (CollUtil.isEmpty(paths))
return failList;
// 连接FTP服务
final FTPClient ftpClient = this.LoginFTP();
for (String path : paths) {
try {
if (StrUtil.isBlank(path)) continue;
boolean success = ftpClient.deleteFile(path);
if (!success) failList.add(path);
} catch (IOException e) {
failList.add(path);
e.printStackTrace();
}
}
// 关闭FTP连接
try {
if (ObjectUtil.isNotNull(ftpClient)) ftpClient.disconnect();
} catch (IOException e) {
e.printStackTrace();
}
return failList;
}
public static boolean createDirs(FTPClient ftp , String path) throws IOException {
/* 该部分为逐级创建 */
String[] split = path.split(SymbolConstant.SINGLE_SLASH);
@ -303,4 +337,66 @@ public class FTPUtil {
}
return true;
}
/*
* 将源FTP路径的文件保存为指定路径的临时文件
* */
public File downloadFile(String fromPath, String toPath) {
FTPClient ftpClient = null;
InputStream inputStream = null;
try {
ftpClient = LoginFTP();
// 切换被动模式
ftpClient.enterLocalPassiveMode();
ftpClient.setFileType(FTPClient.BINARY_FILE_TYPE);
// 设置编码当文件中存在中文且上传后文件乱码时可使用此配置项
ftpClient.setControlEncoding("UTF-8");
ftpClient.setFileTransferMode(FTPClient.STREAM_TRANSFER_MODE);
inputStream = ftpClient.retrieveFileStream(fromPath);
// 声明一个临时文件
File tempFile = File.createTempFile(toPath, null);
// 将FTP文件的输入流复制给临时文件
FileUtils.copyInputStreamToFile(inputStream, tempFile);
return tempFile;
} catch (IOException e) {
e.printStackTrace();
return null;
} finally {
try {
if (ObjectUtil.isNotNull(ftpClient))
ftpClient.disconnect();
if (ObjectUtil.isNotNull(inputStream))
inputStream.close();
} catch (IOException e) {
e.printStackTrace();
}
}
}
/*
* 将源FTP路径的文件转换为文件流
* */
public InputStream downloadFileStream(String fromPath) {
FTPClient ftpClient = null;
try {
ftpClient = LoginFTP();
// 切换被动模式
ftpClient.enterLocalPassiveMode();
ftpClient.setFileType(FTPClient.BINARY_FILE_TYPE);
// 设置编码当文件中存在中文且上传后文件乱码时可使用此配置项
ftpClient.setControlEncoding("UTF-8");
ftpClient.setFileTransferMode(FTPClient.STREAM_TRANSFER_MODE);
return ftpClient.retrieveFileStream(fromPath);
} catch (IOException e) {
e.printStackTrace();
return null;
} finally {
try {
if (ObjectUtil.isNotNull(ftpClient))
ftpClient.disconnect();
} catch (IOException e) {
e.printStackTrace();
}
}
}
}

View File

@ -0,0 +1,25 @@
package org.jeecg.config.mybatis;
import com.baomidou.dynamic.datasource.toolkit.DynamicDataSourceContextHolder;
/**
* 数据源切换器
*/
public class DSSwitcher {
private final static String ORACLE = "ora";
private final static String PGSQL = "master";
public static void switchToOracle(){
DynamicDataSourceContextHolder.push(ORACLE);
}
public static void switchToPGSql(){
DynamicDataSourceContextHolder.push(PGSQL);
}
public static void clear(){
DynamicDataSourceContextHolder.clear();
}
}

View File

@ -0,0 +1,17 @@
package org.jeecg.modules.base.dto;
import lombok.Data;
@Data
public class DBInfo {
private String tableName;
private Integer numRow;
private Double dataSize;
private Double indexSize;
private Double used;
}

View File

@ -0,0 +1,11 @@
package org.jeecg.modules.base.dto;
import lombok.Data;
@Data
public class OwnerDto {
private String owner;
private String tableName;
}

View File

@ -18,7 +18,7 @@ public class FittingBody implements Serializable {
private Integer count;
private String qcFileName;
private String sampleFileName;
private String tabName;

View File

@ -31,8 +31,16 @@ public class QCResult implements Serializable {
private String xe133MDCStatus;
private String gasBgEvaluationMetrics;
private String gasBgValue;
private boolean gasBgValueAndStatus;
private String detBgEvaluationMetrics;
private String detBgValue;
private boolean detBgValueAndStatus;
}

View File

@ -63,4 +63,17 @@ public class SysDatabaseController {
List<SourceDto> sourceDtos = sysDatabaseService.listAll();
return Result.OK(sourceDtos);
}
@GetMapping("dbNames")
@ApiOperation(value = "数据库名列表",notes = "数据库名列表")
public Result<?> dbNames(@RequestParam String dbType){
return Result.OK(sysDatabaseService.dbNames(dbType));
}
@GetMapping("dbInfo")
@ApiOperation(value = "数据库表详情信息",notes = "数据库表详情信息")
public Result<?> dbInfo(@RequestParam String dbType,
@RequestParam String dataBase) {
return Result.OK(sysDatabaseService.dbInfo(dbType, dataBase));
}
}

View File

@ -1,6 +1,8 @@
package org.jeecg.modules.mapper;
import com.baomidou.mybatisplus.core.mapper.BaseMapper;
import org.apache.ibatis.annotations.Param;
import org.jeecg.modules.base.dto.DBInfo;
import org.jeecg.modules.base.dto.DatabaseDto;
import org.jeecg.modules.base.entity.postgre.SysDatabase;
import org.jeecg.modules.entity.AlarmHistory;
@ -13,4 +15,16 @@ public interface SysDatabaseMapper extends BaseMapper<SysDatabase> {
List<AlarmHistory> findAlarmHistory(Map<String,Object> params);
List<DatabaseDto> findPage(Map<String,Object> params);
List<String> dbNamesPG();
List<String> dbNamesMY();
List<String> dbNamesOR();
List<DBInfo> dbInfoOR(@Param("owner") String dataBase);
List<DBInfo> dbIndexOR(@Param("owner") String dataBase);
List<DBInfo> dbInfoMY(@Param("owner") String dataBase);
}

View File

@ -18,10 +18,4 @@ public interface SysServerMapper extends BaseMapper<SysServer> {
List<SourceDto> pageAll(String itemName);
List<AlarmInfo> alarmInfo(String sourceId);
List<String> dbNamesPG();
List<String> dbNamesMY();
List<String> dbNamesOR();
}

View File

@ -72,14 +72,4 @@
WHERE r.source_id = #{sourceId}
ORDER BY l.alarm_start_date DESC
</select>
<select id="dbNamesPG" resultType="java.lang.String">
SELECT datname FROM pg_database WHERE datistemplate = false;
</select>
<select id="dbNamesMY" resultType="java.lang.String">
SHOW DATABASES;
</select>
<select id="dbNamesOR" resultType="java.lang.String">
SELECT username FROM all_users;
</select>
</mapper>

View File

@ -50,5 +50,51 @@
LIMIT #{pageSize} OFFSET #{pageStart}
</if>
</select>
<select id="dbNamesPG" resultType="java.lang.String">
SELECT datname FROM pg_database WHERE datistemplate = false;
</select>
<select id="dbNamesMY" resultType="java.lang.String">
SHOW DATABASES;
</select>
<select id="dbNamesOR" resultType="java.lang.String">
SELECT username FROM all_users;
</select>
<select id="dbInfoOR" resultType="org.jeecg.modules.base.dto.DBInfo">
SELECT
a.table_name AS tableName,
a.num_rows AS numRow,
COALESCE(ROUND((d.bytes / (1024 * 1024)), 2), 0) AS dataSize,
COALESCE(ROUND((d.bytes / (1024 * 1024)), 2), 0) AS indexSize,
COALESCE(ROUND((d.bytes / d.max_size) * 100, 3), 0) AS used
FROM
all_tables a
LEFT JOIN dba_segments d ON a.owner = d.owner AND a.table_name = d.segment_name AND d.segment_type = 'TABLE'
WHERE
a.owner = #{owner}
ORDER BY a.table_name
</select>
<select id="dbIndexOR" resultType="org.jeecg.modules.base.dto.DBInfo">
SELECT
a.table_name AS tableName,
COALESCE(ROUND((d.bytes / (1024 * 1024)), 2), 0) AS indexSize
FROM
all_tables a
LEFT JOIN dba_segments d ON a.owner = d.owner AND a.table_name = d.segment_name AND d.segment_type = 'INDEX'
WHERE
a.owner = #{owner}
ORDER BY a.table_name
</select>
<select id="dbInfoMY" resultType="org.jeecg.modules.base.dto.DBInfo">
SELECT
TABLE_NAME AS tableName,
TABLE_ROWS AS numRow,
round((data_length / 1024 / 1024), 2) AS dataSize,
round((index_length / 1024 / 1024), 2) AS indexSize
FROM
information_schema.tables
WHERE
table_schema = #{owner};
ORDER BY TABLE_NAME
</select>
</mapper>

View File

@ -3,6 +3,7 @@ package org.jeecg.modules.service;
import com.baomidou.mybatisplus.extension.service.IService;
import org.jeecg.common.api.QueryRequest;
import org.jeecg.common.api.vo.Result;
import org.jeecg.modules.base.dto.DBInfo;
import org.jeecg.modules.base.dto.SourceDto;
import org.jeecg.modules.base.entity.postgre.SysDatabase;
import org.jeecg.modules.base.bizVo.SourceVo;
@ -24,4 +25,8 @@ public interface ISysDatabaseService extends IService<SysDatabase> {
Result findAlarmHistory(SourceVo sourceVo);
List<SourceDto> listAll();
List<String> dbNames(String dbType);
List<DBInfo> dbInfo(String dbType, String dataBase);
}

View File

@ -33,6 +33,4 @@ public interface ISysServerService extends IService<SysServer> {
Result<?> details_BasicInfo(String hostId);
Result<?> details_AlarmInfo(String sourceId, Integer pageNo, Integer pageSize);
List<String> dbNames();
}

View File

@ -9,10 +9,13 @@ import com.baomidou.mybatisplus.extension.plugins.pagination.Page;
import com.baomidou.mybatisplus.extension.service.impl.ServiceImpl;
import org.jeecg.common.api.QueryRequest;
import org.jeecg.common.api.vo.Result;
import org.jeecg.common.constant.DataBaseConstant;
import org.jeecg.common.constant.DateConstant;
import org.jeecg.common.constant.DictConstant;
import org.jeecg.common.constant.Prompt;
import org.jeecg.common.system.vo.DictModel;
import org.jeecg.config.mybatis.DSSwitcher;
import org.jeecg.modules.base.dto.DBInfo;
import org.jeecg.modules.base.dto.DatabaseDto;
import org.jeecg.modules.base.dto.SourceDto;
import org.jeecg.modules.base.entity.postgre.SysDatabase;
@ -178,4 +181,87 @@ public class SysDatabaseServiceImpl extends ServiceImpl<SysDatabaseMapper, SysDa
return sourceDtos;
}
@Override
public List<String> dbNames(String dbType) {
List<String> dbNames = new ArrayList<>();
switch (dbType){
case DataBaseConstant.DB_TYPE_ORACLE:
dbNames = dbNamesOR();
break;
case DataBaseConstant.DB_TYPE_POSTGRESQL:
dbNames = dbNamesPG();
break;
case DataBaseConstant.DB_TYPE_MYSQL:
// ...
break;
default:
break;
}
return dbNames;
}
@Override
public List<DBInfo> dbInfo(String dbType, String dataBase) {
List<DBInfo> dbInfos = new ArrayList<>();
switch (dbType){
case DataBaseConstant.DB_TYPE_ORACLE:
dbInfos = dbInfoOR(dataBase);
break;
case DataBaseConstant.DB_TYPE_POSTGRESQL:
break;
case DataBaseConstant.DB_TYPE_MYSQL:
// ...
break;
default:
break;
}
return dbInfos;
}
private List<String> dbNamesOR(){
DSSwitcher.switchToOracle();
List<String> dbNames = baseMapper.dbNamesOR();
DSSwitcher.clear();
return dbNames;
}
private List<String> dbNamesPG(){
return baseMapper.dbNamesPG();
}
private List<String> dbNamesMY(){
// 切换数据源
return baseMapper.dbNamesMY();
// 清除数据源
}
private List<DBInfo> dbInfoOR(String dataBase){
DSSwitcher.switchToOracle();
List<DBInfo> dbInfos = baseMapper.dbInfoOR(dataBase);
Map<String, Double> indexSize = baseMapper.dbIndexOR(dataBase).stream()
.collect(Collectors.toMap(DBInfo::getTableName, DBInfo::getIndexSize));
for (DBInfo dbInfo : dbInfos) {
String tableName = dbInfo.getTableName();
dbInfo.setIndexSize(indexSize.get(tableName));
}
DSSwitcher.clear();
return dbInfos;
}
private List<DBInfo> dbInfoPG(String dataBase){
return null;
}
private List<DBInfo> dbInfoMY(String dataBase){
// 切换数据源
List<DBInfo> dbInfos = baseMapper.dbInfoMY(dataBase);
// 清除数据源
return dbInfos;
}
}

View File

@ -304,10 +304,4 @@ public class SysServerServiceImpl extends ServiceImpl<SysServerMapper, SysServer
page.setRecords(records);
return Result.OK(page);
}
@Override
public List<String> dbNames() {
return null;
}
}

View File

@ -62,7 +62,7 @@ public class GardsSampleDataServiceImpl extends ServiceImpl<GardsSampleDataMappe
*/
@Override
public GardsSampleData getSampleIdAndInputFileName(String measurementId, String dataType, String systemType) {
String detectorId = measurementId.substring(0, 8);
String detectorId = measurementId.substring(0, 9);
final List<GardsSampleData> sampleDatas = this.baseMapper.getSampleIdAndInputFileName(measurementId, dataType, systemType, detectorId);
if(!CollectionUtils.isEmpty(sampleDatas)){
//如果查询出多条则需要根据inputFileName字段降序排序后返回第一个

View File

@ -18,11 +18,8 @@ import org.jeecg.common.constant.*;
import org.jeecg.common.constant.enums.SpectrumSystemType;
import org.jeecg.common.properties.ParameterProperties;
import org.jeecg.common.properties.SpectrumPathProperties;
import org.jeecg.common.util.DateUtils;
import org.jeecg.common.util.GammaFileUtil;
import org.jeecg.common.util.RedisUtil;
import org.jeecg.common.util.*;
import org.jeecg.modules.base.bizVo.AttributeItemVo;
import org.jeecg.common.util.MyLogFormatUtil;
import org.jeecg.modules.base.dto.*;
import org.jeecg.modules.base.entity.original.GardsSampleData;
import org.jeecg.modules.base.entity.rnauto.*;
@ -61,6 +58,8 @@ public class Sample_G_Analysis {
private RedisUtil redisUtil;
private FTPUtil ftpUtil;
/**
* 系统类型
*/
@ -124,9 +123,10 @@ public class Sample_G_Analysis {
PHDFile phdFile = new PHDFile();
phdFile.setXmlFilePath(parameterProperties.getFilePath());
// 解析PHD文件
spectrumPathProperties = ApplicationContextUtil.getContext().getBean(SpectrumPathProperties.class);
spectrumPathProperties = SpringContextUtils.getBean(SpectrumPathProperties.class);
ftpUtil = SpringContextUtils.getBean(FTPUtil.class);
String sampleFilePath = sampleData.getInputFileName();
String pathName = File.separator + spectrumPathProperties.getSaveFilePath() + File.separator +
String pathName = ftpUtil.getFtpRootPath() + spectrumPathProperties.getSaveFilePath() + File.separator +
sampleFilePath.substring(0, sampleFilePath.lastIndexOf(StringPool.SLASH));
String fileName = sampleFilePath.substring(sampleFilePath.lastIndexOf(StringPool.SLASH)+1);

View File

@ -1,5 +1,6 @@
package org.jeecg.common.util;
import cn.hutool.core.io.FileUtil;
import cn.hutool.core.util.ObjectUtil;
import com.alibaba.fastjson.JSON;
import com.baomidou.mybatisplus.core.toolkit.CollectionUtils;
@ -11,7 +12,6 @@ import org.apache.commons.io.FileUtils;
import org.apache.commons.lang3.StringUtils;
import org.apache.commons.net.ftp.FTP;
import org.apache.commons.net.ftp.FTPClient;
import org.checkerframework.checker.units.qual.N;
import org.ejml.simple.SimpleMatrix;
import org.jeecg.common.api.vo.Result;
import org.jeecg.common.properties.ParameterProperties;
@ -30,7 +30,6 @@ import org.jeecg.modules.native_jni.struct.EnergySpectrumStruct;
import org.jeecgframework.core.util.ApplicationContextUtil;
import org.springframework.beans.BeanUtils;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.stereotype.Component;
import org.w3c.dom.*;
import org.xml.sax.SAXException;
@ -42,9 +41,9 @@ import java.io.*;
import java.math.BigDecimal;
import java.math.RoundingMode;
import java.text.DecimalFormat;
import java.text.NumberFormat;
import java.text.ParseException;
import java.util.*;
import java.util.stream.Collectors;
@Component
@Slf4j
@ -64,28 +63,13 @@ public class GammaFileUtil extends AbstractLogOrReport {
public boolean loadFile(String pathName, String fileName, PHDFile phd, Result result) {
phd.setFilepath(pathName);
phd.setFilename(fileName);
//连接ftp
FTPClient ftpClient = ftpUtil.LoginFTP();
if (Objects.isNull(ftpClient)){
result.error500("ftp connection failed");
String fromPath = pathName + StringPool.SLASH + fileName;
File file = ftpUtil.downloadFile(fromPath, "betaGamma");
if (Objects.isNull(file)) {
result.error500("ftp file can't find");
return false;
}
InputStream inputStream = null;
File file = null;
try {
//切换被动模式
ftpClient.enterLocalPassiveMode();
ftpClient.setFileType(FTPClient.BINARY_FILE_TYPE);
// 设置编码当文件中存在中文且上传后文件乱码时可使用此配置项
ftpClient.setControlEncoding("UTF-8");
ftpClient.setFileTransferMode(FTPClient.STREAM_TRANSFER_MODE);
ftpClient.changeWorkingDirectory(pathName);
inputStream = ftpClient.retrieveFileStream(fileName);
if (Objects.nonNull(inputStream)) {
//声明一个临时文件
file = File.createTempFile("betaGamma", null);
//将ftp文件的输入流复制给临时文件
FileUtils.copyInputStreamToFile(inputStream, file);
//读取文件信息
EnergySpectrumStruct struct = EnergySpectrumHandler.getSourceData(file.getAbsolutePath());
//MsgInfo
@ -248,166 +232,65 @@ public class GammaFileUtil extends AbstractLogOrReport {
phd.setBAnalyed(false);
phd.setAnaly_start_time(DateUtils.formatDate(new Date(), "yyyy/MM/dd HH:mm:ss"));
}
}catch (IOException e) {
throw new RuntimeException(e);
} catch (ParseException e) {
}catch (ParseException e) {
throw new RuntimeException(e);
} finally {
try {
if (Objects.nonNull(ftpClient)){
ftpClient.disconnect();
}
if (Objects.nonNull(inputStream)){
inputStream.close();
}
if (Objects.nonNull(file)) {
if (Objects.nonNull(file))
file.delete();
}
} catch (IOException e) {
throw new RuntimeException(e);
}
}
return true;
}
public void SetBaseInfo(PHDFile phd) {
public void SetBaseInfo(PHDFile phd, String prefixName) {
String pathName = phd.getFilepath();
String fileName = phd.getFilename();
//文件名称需要加上自动处理的前缀以及修改不同的文件后缀名
String subFileName = fileName.substring(0, fileName.lastIndexOf(StringPool.DOT));
if(StringUtils.isNotBlank(subFileName)){
//连接ftp
FTPClient ftpClient = ftpUtil.LoginFTP();
if (Objects.isNull(ftpClient)){
String warning = "ftp connection failed";
}
InputStream inputStream = null;
File file = null;
File lcFile = null;
File scacFile = null;
InputStream inputStreamBase = null;
try {
//切换被动模式
ftpClient.enterLocalPassiveMode();
ftpClient.setFileType(FTPClient.BINARY_FILE_TYPE);
// 设置编码当文件中存在中文且上传后文件乱码时可使用此配置项
ftpClient.setControlEncoding("UTF-8");
ftpClient.setFileTransferMode(FTPClient.STREAM_TRANSFER_MODE);
ftpClient.changeWorkingDirectory(pathName);
String lcFileName = "RNAUTO_"+subFileName + ".lc";
inputStream = ftpClient.retrieveFileStream(lcFileName);
if (Objects.nonNull(inputStream)){
//声明一个临时文件
file = File.createTempFile("betaGamma", null);
//将ftp文件的输入流复制给临时文件
FileUtils.copyInputStreamToFile(inputStream, file);
//调用FileUtils的readLines方法获取文件的所有行数据
List<String> readLines = FileUtils.readLines(file, "UTF-8");
String lcFileName = prefixName+"_"+subFileName + ".lc";
String fromPathLc = pathName + StringPool.SLASH + lcFileName;
lcFile = ftpUtil.downloadFile(fromPathLc, "betaGamma");
List<String> readLinesLc = FileUtils.readLines(lcFile, "UTF-8");
//得到行数据处理后的数据结果
List<Double> vData = ReadLcScacInfo(readLines);
List<Double> vDataLc = ReadLcScacInfo(readLinesLc);
//将数据结果赋值给 phdFile的vLc
phd.setVLc(vData);
}
} catch (IOException e) {
throw new RuntimeException(e);
} finally {
try {
if (Objects.nonNull(ftpClient)) {
ftpClient.disconnect();
}
if (Objects.nonNull(inputStream)) {
inputStream.close();
}
if (Objects.nonNull(file)) {
file.delete();
}
} catch (IOException e) {
throw new RuntimeException(e);
}
}
}
if(StringUtils.isNotBlank(subFileName)){
//连接ftp
FTPClient ftpClient = ftpUtil.LoginFTP();
if (Objects.isNull(ftpClient)){
String warning = "ftp connection failed";
}
InputStream inputStream = null;
File file = null;
try {
//切换被动模式
ftpClient.enterLocalPassiveMode();
ftpClient.setFileType(FTPClient.BINARY_FILE_TYPE);
// 设置编码当文件中存在中文且上传后文件乱码时可使用此配置项
ftpClient.setControlEncoding("UTF-8");
ftpClient.setFileTransferMode(FTPClient.STREAM_TRANSFER_MODE);
ftpClient.changeWorkingDirectory(pathName);
String scacFileName = "RNAUTO_"+subFileName + ".scac";
inputStream = ftpClient.retrieveFileStream(scacFileName);
if (Objects.nonNull(inputStream)){
//声明一个临时文件
file = File.createTempFile("betaGamma", null);
//将ftp文件的输入流复制给临时文件
FileUtils.copyInputStreamToFile(inputStream, file);
//调用FileUtils的readLines方法获取文件的所有行数据
List<String> readLines = FileUtils.readLines(file, "UTF-8");
phd.setVLc(vDataLc);
String scacFileName = prefixName+"_"+subFileName + ".scac";
String fromPathScac = pathName + StringPool.SLASH + scacFileName;
scacFile = ftpUtil.downloadFile(fromPathScac, "betaGamma");
List<String> readLinesScac = FileUtils.readLines(scacFile, "UTF-8");
//得到行数据处理后的数据结果
List<Double> vData = ReadLcScacInfo(readLines);
//将数据结果赋值给 phdFile的vScac
phd.setVScac(vData);
}
} catch (IOException e) {
throw new RuntimeException(e);
} finally {
try {
if (Objects.nonNull(ftpClient)) {
ftpClient.disconnect();
}
if (Objects.nonNull(inputStream)) {
inputStream.close();
}
if (Objects.nonNull(file)) {
file.delete();
}
} catch (IOException e) {
throw new RuntimeException(e);
}
}
}
if(StringUtils.isNotBlank(subFileName)){
//连接ftp
FTPClient ftpClient = ftpUtil.LoginFTP();
if (Objects.isNull(ftpClient)){
String warning = "ftp connection failed";
}
InputStream inputStream = null;
try {
//切换被动模式
ftpClient.enterLocalPassiveMode();
ftpClient.setFileType(FTPClient.BINARY_FILE_TYPE);
// 设置编码当文件中存在中文且上传后文件乱码时可使用此配置项
ftpClient.setControlEncoding("UTF-8");
ftpClient.setFileTransferMode(FTPClient.STREAM_TRANSFER_MODE);
ftpClient.changeWorkingDirectory(pathName);
String baselineFileName = "RNAUTO_"+subFileName + ".baseline";
//获取ftp的文件流数据
inputStream = ftpClient.retrieveFileStream(baselineFileName);
if (Objects.nonNull(inputStream)){
//调用处理BaseCtrl的方法
ReadBaseCtrlInfo(phd, inputStream);
//将phdFile的BaseCtrls的BaseLine部分数据 赋值给 phdFile的vBase
List<Double> vDataScac = ReadLcScacInfo(readLinesScac);
//将数据结果赋值给 phdFile的vLc
phd.setVScac(vDataScac);
String baselineFileName = prefixName+"_"+subFileName + ".baseline";
String fromPathBase = pathName + StringPool.SLASH + baselineFileName;
inputStreamBase = ftpUtil.downloadFileStream(fromPathBase);
// 调用处理BaseCtrl的方法
ReadBaseCtrlInfo(phd, inputStreamBase);
// 将phdFile的BaseCtrls的BaseLine部分数据 赋值给 phdFile的vBase
phd.setVBase(phd.getBaseCtrls().getBaseline());
}
} catch (IOException e) {
throw new RuntimeException(e);
}catch (IOException e){
e.printStackTrace();
} finally {
try {
if (Objects.nonNull(ftpClient)) {
ftpClient.disconnect();
}
if (Objects.nonNull(inputStream)) {
inputStream.close();
}
if (ObjectUtil.isNotNull(inputStreamBase))
inputStreamBase.close();
if (ObjectUtil.isNotNull(lcFile))
lcFile.delete();
if (ObjectUtil.isNotNull(scacFile))
scacFile.delete();
} catch (IOException e) {
throw new RuntimeException(e);
e.printStackTrace();
}
}
}
@ -982,7 +865,7 @@ public class GammaFileUtil extends AbstractLogOrReport {
return true;
}
public List<String> DetailedInfo(Integer sampleId, PHDFile phd) {
public List<String> DetailedInfo(String sampleId, PHDFile phd) {
try {
// Sample_Id, Station_Code, Detector_Code, System_Type, Data_Type, Spectral_Qualifier,
// SRID, Sample_Status, Collect_Start, Sampling_Time, Quantity, Flow_Rate,
@ -993,7 +876,7 @@ public class GammaFileUtil extends AbstractLogOrReport {
detailInfo.add("");
}
//读取phdFile的数据 修改detailInfo的数据
detailInfo.set(0, Objects.nonNull(sampleId)?sampleId.toString():""); // Sample_Id
detailInfo.set(0, StringUtils.isNotBlank(sampleId)?sampleId:""); // Sample_Id
detailInfo.set(1, phd.getHeader().getSite_code()); // Station_Code
detailInfo.set(2, phd.getHeader().getDetector_code()); // Detector_Code
detailInfo.set(3, phd.getHeader().getSystem_type().toUpperCase()); // System_Type
@ -1026,6 +909,19 @@ public class GammaFileUtil extends AbstractLogOrReport {
}
public void UpdateChart(PHDFile phd, Map<String, Object> map, Map<String, String> colorMap) {
ChartData shadowEnergyChart = Energy_Count(phd, colorMap.get("Color_Spec"));
ChartData shadowChannelChart = Channel_Count(phd, colorMap.get("Color_Spec"));
map.put("shadowEnergyChart", shadowEnergyChart);
map.put("shadowChannelChart", shadowChannelChart);
List<ChartData> allData = AllData(false, phd, colorMap);
map.put("allData", allData);
List<ShapeData> shapeEnergyData = Energy_BaseCP(phd);
map.put("shapeEnergyData", shapeEnergyData);
List<ShapeData> shapeChannelData = Channel_BaseCP(phd);
map.put("shapeChannelData", shapeChannelData);
}
public ChartData Energy_Count(PHDFile phd, String color) {
//声明一个数组
List<Long> m_vCount = new LinkedList<>();
//获取 phdFile的Spec的num_g_channel
@ -1042,19 +938,7 @@ public class GammaFileUtil extends AbstractLogOrReport {
if(m_nSChan == 0) {
m_vCount.add(0L);
}
ChartData shadowEnergyChart = Energy_Count(phd, m_vCount, m_nCount, colorMap.get("Color_Spec"));
ChartData shadowChannelChart = Channel_Count(m_vCount, m_nCount, colorMap.get("Color_Spec"));
map.put("shadowEnergyChart", shadowEnergyChart);
map.put("shadowChannelChart", shadowChannelChart);
List<ChartData> allData = AllData(false, phd, m_vCount, m_nCount, colorMap);
map.put("allData", allData);
List<ShapeData> shapeEnergyData = Energy_BaseCP(phd);
map.put("shapeEnergyData", shapeEnergyData);
List<ShapeData> shapeChannelData = Channel_BaseCP(phd);
map.put("shapeChannelData", shapeChannelData);
}
public ChartData Energy_Count(PHDFile phd, List<Long> m_vCount, long m_nCount, String color) {
int start = 0;
long end = -1;
//Energy
@ -1066,10 +950,10 @@ public class GammaFileUtil extends AbstractLogOrReport {
if(end < 1 || end > m_nCount){
end = m_nCount;
}
for(int i=start-1; i<end; i++) {
for(int j=start-1; j<end; j++) {
SeriseData seriseData = new SeriseData();
seriseData.setX(phd.getVEnergy().get(i));
seriseData.setY(m_vCount.get(i));
seriseData.setX(phd.getVEnergy().get(j));
seriseData.setY(m_vCount.get(j));
data1.getPointlist().add(seriseData);
}
data1.setName("Energy");
@ -1079,7 +963,22 @@ public class GammaFileUtil extends AbstractLogOrReport {
return data1;
}
public ChartData Channel_Count(List<Long> m_vCount, long m_nCount, String color) {
public ChartData Channel_Count(PHDFile phd, String color) {
// Chart 折线图
List<Long> m_vCount = new LinkedList<>();
long m_nCount = phd.getSpec().getNum_g_channel();
long m_nSChan = phd.getSpec().getBegin_channel();
// 确保绘制曲线时所有谱都是从1道开始
int i = 0;
if (m_nSChan == 0) {
i = 1;
}
for (; i < m_nCount; i++) {
m_vCount.add(phd.getSpec().getCounts().get(i));
}
if (m_nSChan == 0) {
m_vCount.add(0L);
}
int start = 1;
long end = -1;
ChartData data1 = new ChartData();
@ -1090,10 +989,10 @@ public class GammaFileUtil extends AbstractLogOrReport {
if(end < 1 || end > m_nCount){
end = m_nCount;
}
for(int i=start-1; i<end; i++) {
for(int j=start-1; j<end; j++) {
SeriseData seriseData = new SeriseData();
seriseData.setX(i+1);
seriseData.setY(m_vCount.get(i));
seriseData.setX(j+1);
seriseData.setY(m_vCount.get(j));
data1.getPointlist().add(seriseData);
}
data1.setName("Count");
@ -1103,7 +1002,8 @@ public class GammaFileUtil extends AbstractLogOrReport {
return data1;
}
public List<ChartData> Lc_Scac_base(boolean showLc, PHDFile phd, long m_nCount, Map<String, String> colorMap) {
public List<ChartData> Lc_Scac_base(boolean showLc, PHDFile phd, Map<String, String> colorMap) {
long m_nCount = phd.getSpec().getNum_g_channel();
List<ChartData> datalist = new LinkedList<>();
int start = 0;
long end = -1;
@ -1216,12 +1116,12 @@ public class GammaFileUtil extends AbstractLogOrReport {
return datalist;
}
public List<ChartData> Channel_Peak(PHDFile phd, long m_nCount, String color) {
return PeakSet(phd.getVPeak(), phd.getVBase(), color, m_nCount, null, false);
public List<ChartData> Channel_Peak(PHDFile phd, String color) {
return PeakSet(phd.getVPeak(), phd.getVBase(), color, phd.getSpec().getNum_g_channel(), null, false);
}
public List<ChartData> Energy_Peak(PHDFile phd, long m_nCount, String color) {
return PeakSet(phd.getVPeak(), phd.getVBase(), color, m_nCount, phd.getUsedEnerPara().getP(), true);
public List<ChartData> Energy_Peak(PHDFile phd, String color) {
return PeakSet(phd.getVPeak(), phd.getVBase(), color, phd.getSpec().getNum_g_channel(), phd.getUsedEnerPara().getP(), true);
}
public List<ChartData> PeakSet(List<PeakInfo> vPeak, List<Double> vBase, String color, long m_nCount, List<Double> p, boolean bEnergy) {
@ -1257,7 +1157,6 @@ public class GammaFileUtil extends AbstractLogOrReport {
for(int i=startPeak; i<=endPeak; i++) {
PeakInfo peak = vPeak.get(i);
ChartData cData = new ChartData();
cData.setColor(color);
cData.setName("Peak");
@ -1292,7 +1191,7 @@ public class GammaFileUtil extends AbstractLogOrReport {
}
SeriseData seriseData = new SeriseData();
seriseData.setX(regChan.get(ii));
seriseData.setY(regPeak.get(ii));
seriseData.setY(regPeak.get(ii).isNaN()?0:regPeak.get(ii));
cData.getPointlist().add(seriseData);
}
}
@ -1336,13 +1235,13 @@ public class GammaFileUtil extends AbstractLogOrReport {
return shapes;
}
public List<ChartData> AllData(boolean showLc, PHDFile phd, List<Long> m_vCount, long m_nCount, Map<String, String> colorMap) {
public List<ChartData> AllData(boolean showLc, PHDFile phd, Map<String, String> colorMap) {
List<ChartData> datalist = new LinkedList<>();
datalist.add(Channel_Count(m_vCount, m_nCount, colorMap.get("Color_Spec")));
datalist.add(Energy_Count(phd, m_vCount, m_nCount, colorMap.get("Color_Spec")));
datalist.addAll(Lc_Scac_base(showLc, phd, m_nCount, colorMap));
datalist.addAll(Channel_Peak(phd, m_nCount, colorMap.get("Color_Peak")));
datalist.addAll(Energy_Peak(phd, m_nCount, colorMap.get("Color_Peak")));
datalist.add(Channel_Count(phd, colorMap.get("Color_Spec")));
datalist.add(Energy_Count(phd, colorMap.get("Color_Spec")));
datalist.addAll(Lc_Scac_base(showLc, phd, colorMap));
datalist.addAll(Channel_Peak(phd, colorMap.get("Color_Peak")));
datalist.addAll(Energy_Peak(phd, colorMap.get("Color_Peak")));
return datalist;
}
@ -1673,7 +1572,7 @@ public class GammaFileUtil extends AbstractLogOrReport {
double sum_found = 0; // 所有匹配的γ射线能量处效率乘以分支比的和
int mainPeakIdx = -1; // 记录核素主γ峰的索引下标
for (int i=0, j=0; i<vEnergy.size(); i++) {
for(; j<peakNum; ++j) {
for(; j<peakNum; j++) {
if(phd.getVPeak().get(j).energy >= 510 && phd.getVPeak().get(j).energy <= 512) {
continue; // 峰中心道能量为511的峰不进行核素识别
}
@ -1711,11 +1610,8 @@ public class GammaFileUtil extends AbstractLogOrReport {
}
if(mainPeakIdx < 0) continue;
}
if (Objects.isNull(phd.getMapNucActMda().get(iter.getKey()))) {
Map<String, NuclideActMda> mapNucActMda = phd.getMapNucActMda();
mapNucActMda.put(iter.getKey(), new NuclideActMda());
}
NuclideActMda ActMda = phd.getMapNucActMda().get(iter.getKey());
NuclideActMda ActMda = new NuclideActMda();
boolean bActBigger = CalculateMDCs(phd, ActMda, mainPeakIdx, lambda, maxFoundYield, 1.0);
if(rate > 0.8 || bActBigger) {
@ -1899,7 +1795,8 @@ public class GammaFileUtil extends AbstractLogOrReport {
}
}
public ChartData Channel_BaseLine(PHDFile phd, long m_nCount, String color) {
public ChartData Channel_BaseLine(PHDFile phd, String color) {
long m_nCount = phd.getSpec().getNum_g_channel();
ChartData cData = new ChartData();
int start =0;
long end = -1;
@ -1925,7 +1822,22 @@ public class GammaFileUtil extends AbstractLogOrReport {
return cData;
}
public List<SeriseData> Differance(PHDFile phd, List<PeakInfo> vecPeak, List<Long> m_vCount, long m_nCount) {
public List<SeriseData> Differance(PHDFile phd, List<PeakInfo> vecPeak) {
// Chart 折线图
List<Long> m_vCount = new LinkedList<>();
long m_nCount = phd.getSpec().getNum_g_channel();
long m_nSChan = phd.getSpec().getBegin_channel();
// 确保绘制曲线时所有谱都是从1道开始
int i = 0;
if (m_nSChan == 0) {
i = 1;
}
for (; i < m_nCount; i++) {
m_vCount.add(phd.getSpec().getCounts().get(i));
}
if (m_nSChan == 0) {
m_vCount.add(0L);
}
List<SeriseData> pointlist = new LinkedList<>();
int start =0;
long end = -1;
@ -1961,8 +1873,8 @@ public class GammaFileUtil extends AbstractLogOrReport {
List<Double> regPeak = new LinkedList<>();
int channel = 1;
for(int i=start-1; i<end; i++) {
channel = i + 1;
for(int j=start-1; j<end; j++) {
channel = j + 1;
PeakInfo peak = vecPeak.get(startPeak);
if(channel >= peak.left && channel <= peak.right) {
if(CollectionUtils.isEmpty(regPeak)) {
@ -1977,12 +1889,12 @@ public class GammaFileUtil extends AbstractLogOrReport {
// 差值 = 原谱值 - 峰面积
SeriseData seriseData = new SeriseData();
seriseData.setX(channel);
seriseData.setY(m_vCount.get(i) - regPeak.get(channel-peak.left));
seriseData.setY(m_vCount.get(j) - regPeak.get(channel-peak.left));
pointlist.add(seriseData);
} else if(m_vCount.get(i) > 0 && phd.getVBase().get(i) > 0) {
} else if(m_vCount.get(j) > 0 && phd.getVBase().get(j) > 0) {
SeriseData seriseData = new SeriseData();
seriseData.setX(channel);
seriseData.setY(m_vCount.get(i) - phd.getVBase().get(i));
seriseData.setY(m_vCount.get(j) - phd.getVBase().get(j));
pointlist.add(seriseData);
} else{
SeriseData seriseData = new SeriseData();
@ -1992,21 +1904,22 @@ public class GammaFileUtil extends AbstractLogOrReport {
}
if(vecPeak.get(startPeak).right < channel) {
++startPeak; regPeak.clear();
++startPeak;
regPeak.clear();
}
if(startPeak > endPeak){
break;
}
}
for(int i=channel; i<end; i++) {
if(m_vCount.get(i) > 0 && phd.getVBase().get(i) > 0) {
for(int j=channel; j<end; j++) {
if(m_vCount.get(j) > 0 && phd.getVBase().get(j) > 0) {
SeriseData seriseData = new SeriseData();
seriseData.setX(i+1);
seriseData.setY(m_vCount.get(i) - phd.getVBase().get(i));
seriseData.setX(j+1);
seriseData.setY(m_vCount.get(j) - phd.getVBase().get(j));
pointlist.add(seriseData);
} else {
SeriseData seriseData = new SeriseData();
seriseData.setX(i+1);
seriseData.setX(j+1);
seriseData.setY(0);
pointlist.add(seriseData);
}
@ -3111,8 +3024,20 @@ public class GammaFileUtil extends AbstractLogOrReport {
anylseEnd = DateUtils.formatDate(new Date(), "yyyy/MM/dd HH:mm:ss");
middleData.analyses_analysisBegin = anylseBegin;
middleData.analyses_analysisEnd = anylseEnd;
} else if (type.equals(MiddleDataType.InterAction.getType())){ //如果是人工交互分析 则先上传phd对应文件信息
//上传本次文件到ftp人工交互存储路径下
File sampleTmp = null;
try {
sampleTmp = analyzeFile(StringPool.SLASH + ftpUtil.getFtpRootPath() + fileAnlyse.getFilepath(), fileAnlyse.getFilename());
if (Objects.nonNull(sampleTmp)) {
bRet = ftpUtil.saveFile(StringPool.SLASH + ftpUtil.getFtpRootPath() + spectrumPathProperties.getSaveFilePath() + StringPool.SLASH + middleData.analyses_save_filePath.substring(0, middleData.analyses_save_filePath.lastIndexOf(StringPool.SLASH)), middleData.analyses_save_filePath.substring(middleData.analyses_save_filePath.lastIndexOf(StringPool.SLASH)+1), new FileInputStream(sampleTmp));
}
} catch (FileNotFoundException e) {
throw new RuntimeException(e);
} finally {
sampleTmp.delete();
}
}
//gards_analyses
String qsBaseLinePath = path+StringPool.SLASH+middleData.analyses_analyst+"_"+baselineName;
@ -3700,29 +3625,36 @@ public class GammaFileUtil extends AbstractLogOrReport {
halflife /= 3600;
units = 'H';
}
tableNuclideActivity.setHalfLife(halflife+StringPool.SPACE+units);
tableNuclideActivity.setEnergy(nuc.getVEnergy().get(nuc.getCalculateIdx()).toString());
tableNuclideActivity.setYield(String.valueOf(nuc.getVYield().get(nuc.getCalculateIdx())*100));
tableNuclideActivity.setHalfLife(NumberFormatUtil.numberFormat(String.valueOf(halflife))+StringPool.SPACE+units);
tableNuclideActivity.setEnergy(NumberFormatUtil.numberFormat(String.valueOf(nuc.getVEnergy().get(nuc.getCalculateIdx()))));
tableNuclideActivity.setYield(NumberFormatUtil.numberFormat(String.valueOf(nuc.getVYield().get(nuc.getCalculateIdx())*100)));
String str_effi = (nuc.getEfficiency() <= 0 ? "null" : String.valueOf(nuc.getEfficiency()));
String str_effi = (nuc.getEfficiency() <= 0 ? "null" : NumberFormatUtil.numberFormat(String.valueOf(nuc.getEfficiency())));
tableNuclideActivity.setEfficiency(str_effi);
String str_act = (nuc.getActivity() <= 0 ? "null" : String.valueOf(nuc.getActivity()));
String str_act = (nuc.getActivity() <= 0 ? "null" : NumberFormatUtil.numberFormat(String.valueOf(nuc.getActivity())));
tableNuclideActivity.setActivity(str_act);
String str_act_err = (nuc.getActivity() <= 0 ? "null" : String.valueOf(nuc.getAct_err() / nuc.getActivity() * 100));
String str_act_err = (nuc.getActivity() <= 0 ? "null" : NumberFormatUtil.numberFormat(String.valueOf(nuc.getAct_err() / nuc.getActivity() * 100)));
tableNuclideActivity.setActErr(str_act_err);
String str_mda = (nuc.getMda() <= 0 ? "null" : String.valueOf(nuc.getMda()));
String str_mda = (nuc.getMda() <= 0 ? "null" : NumberFormatUtil.numberFormat(String.valueOf(nuc.getMda())));
tableNuclideActivity.setMda(str_mda);
String str_con = (nuc.getConcentration() <= 0 ? "null" : String.valueOf(nuc.getConcentration()));
if (nuc.getConcentration() > 1000000) {
DecimalFormat decimalFormat = new DecimalFormat("0.###E0");
nuc.setConcentration(Double.valueOf(decimalFormat.format(nuc.getConcentration())));
}
String str_con = (nuc.getConcentration() <= 0 ? "null" : NumberFormatUtil.numberFormat(String.valueOf(nuc.getConcentration())));
tableNuclideActivity.setConc(str_con);
String str_mdc = (nuc.getMdc() <= 0 ? "null" : String.valueOf(nuc.getMdc()));
if (nuc.getMdc() > 1000000) {
DecimalFormat decimalFormat = new DecimalFormat("0.###E0");
nuc.setConcentration(Double.valueOf(decimalFormat.format(nuc.getMdc())));
}
String str_mdc = (nuc.getMdc() <= 0 ? "null" : NumberFormatUtil.numberFormat(String.valueOf(nuc.getMdc())));
tableNuclideActivity.setMdc(str_mdc);
nuclideActivityList.add(tableNuclideActivity);
}
nuclideActivityList = nuclideActivityList.stream().sorted(Comparator.comparing(TableNuclideActivity::getNuclide)).collect(Collectors.toList());
map.put("table", nuclideActivityList);
return map;
}
@ -3915,8 +3847,8 @@ public class GammaFileUtil extends AbstractLogOrReport {
int j = i;
double temp = channel - peak.peakCentroid;
while(++j < peakNum && vPeak.get(j).multiIndex == peak.multiIndex) {
if(Math.abs(vPeak.get(j).peakCentroid - channel) < temp) // 找出重峰中峰中心道离 channel 最近的峰
{
// 找出重峰中峰中心道离 channel 最近的峰
if(Math.abs(vPeak.get(j).peakCentroid - channel) < temp) {
temp = Math.abs(vPeak.get(j).peakCentroid - channel);
i = j;
}
@ -3943,6 +3875,8 @@ public class GammaFileUtil extends AbstractLogOrReport {
for(int i=0; i<vPeak.size(); i++) {
PeakInfo peakInfo = new PeakInfo();
peakInfo.index = i+1;
peakInfo.left = vPeak.get(i).left;
peakInfo.right = vPeak.get(i).right;
BigDecimal energy = new BigDecimal(vPeak.get(i).energy);
energy = energy.setScale(3, RoundingMode.HALF_UP);
peakInfo.energy = energy.doubleValue();
@ -3985,7 +3919,7 @@ public class GammaFileUtil extends AbstractLogOrReport {
NuclideActMda ActMda = new NuclideActMda();
ActMda.setHalflife(Objects.isNull(mapHalflife.get(nucName))?lines.halflife : mapHalflife.get(nucName));
for (int i=0, j=0; i<vEnergy.size(); i++) {
for(; j<vPeakIdx.size(); ++j) {
for(; j<vPeakIdx.size(); j++) {
double energy = phd.getVPeak().get(vPeakIdx.get(j)).energy;
if(vEnergy.get(i) < energy - energyWidth) {
break;
@ -4038,18 +3972,22 @@ public class GammaFileUtil extends AbstractLogOrReport {
// 否则删除该核素与该峰所有关联的射线信息
boolean needReCalc = false;
for(int i=nuc.getVPeakIdx().size()-1; i>=0; i--) {
//判断当前下标对应的核素下标是否是需要删除的下标
if(nuc.getVPeakIdx().get(i) == removePeakIdx) {
nuc.getVEnergy().remove(i);
nuc.getVUncertE().remove(i);
nuc.getVYield().remove(i);
nuc.getVUncertY().remove(i);
nuc.getFullNames().remove(i);
//判断主峰核素下标是否与当前下标一致 BCalculateMDA是true
if(nuc.getCalculateIdx() == i && nuc.isBCalculateMDA()) {
needReCalc = true;
}
nuc.setCalculateIdx(nuc.getVEnergy().size()-1);
nuc.getVPeakIdx().remove(i);
}
}
nuc.getVPeakIdx().remove(removePeakIdx);
// 如果该核素只与一个峰对应则直接删除该核素的 MDA 信息
if(nuc.getVPeakIdx().size() < 1) {
phd.getMapNucActMda().remove(nuclide);
@ -4059,7 +3997,7 @@ public class GammaFileUtil extends AbstractLogOrReport {
if(needReCalc) {
double maxYield = 0;
int mainPeakIdx = -1;
for(int j=0; j<nuc.getVYield().size(); ++j) {
for(int j=0; j<nuc.getVYield().size(); j++) {
if(nuc.getVYield().get(j) > maxYield) {
nuc.setCalculateIdx(j);
maxYield = nuc.getVYield().get(j);
@ -4075,29 +4013,9 @@ public class GammaFileUtil extends AbstractLogOrReport {
public List<Long> loadCompareData(String compareFileName, String userName, long m_nCount, Result result) {
List<Long> m_vecCompare = new LinkedList<>();
String compareFilePath = StringPool.SLASH + spectrumPathProperties.getUploadPath() + StringPool.SLASH + userName;
//连接ftp
FTPClient ftpClient = ftpUtil.LoginFTP();
if (Objects.isNull(ftpClient)){
result.error500("ftp connection failed");
return m_vecCompare;
}
InputStream inputStream = null;
File file = null;
try {
//切换被动模式
ftpClient.enterLocalPassiveMode();
ftpClient.setFileType(FTPClient.BINARY_FILE_TYPE);
// 设置编码当文件中存在中文且上传后文件乱码时可使用此配置项
ftpClient.setControlEncoding("UTF-8");
ftpClient.setFileTransferMode(FTPClient.STREAM_TRANSFER_MODE);
ftpClient.changeWorkingDirectory(compareFilePath);
inputStream = ftpClient.retrieveFileStream(compareFileName);
if (Objects.nonNull(inputStream)) {
//声明一个临时文件
file = File.createTempFile("betaGamma", null);
//将ftp文件的输入流复制给临时文件
FileUtils.copyInputStreamToFile(inputStream, file);
String compareFilePath = ftpUtil.getFtpRootPath() + spectrumPathProperties.getUploadPath() + StringPool.SLASH + userName;
String fromPath = compareFilePath + StringPool.SLASH + compareFileName;
File file = ftpUtil.downloadFile(fromPath, "betaGamma");
//读取文件信息
EnergySpectrumStruct struct = EnergySpectrumHandler.getSourceData(file.getAbsolutePath());
//判断当前用来进行比较的文件的#g_Spectrum数量是否与原文件的大小一致
@ -4110,24 +4028,8 @@ public class GammaFileUtil extends AbstractLogOrReport {
m_vecCompare.add(0L);
m_vecCompare.remove(0);
}
}
}catch (IOException e) {
throw new RuntimeException(e);
} finally {
try {
if (Objects.nonNull(ftpClient)){
ftpClient.disconnect();
}
if (Objects.nonNull(inputStream)){
inputStream.close();
}
if (Objects.nonNull(file)) {
if (ObjectUtil.isNotNull(file))
file.delete();
}
} catch (IOException e) {
throw new RuntimeException(e);
}
}
return m_vecCompare;
}
@ -4201,41 +4103,8 @@ public class GammaFileUtil extends AbstractLogOrReport {
public File analyzeFile(String path, String fileName) {
path = path.replace("\\", "/");
//连接ftp
FTPClient ftpClient = ftpUtil.LoginFTP();
InputStream inputStream = null;
File file = null;
try {
//被动模式
ftpClient.enterLocalPassiveMode();
//设置文件类型--二进制文件
ftpClient.setFileType(FTP.BINARY_FILE_TYPE);
//
ftpClient.setControlEncoding("UTF-8");
ftpClient.setFileTransferMode(FTPClient.STREAM_TRANSFER_MODE);
//切换文件路径
ftpClient.changeWorkingDirectory(path);
inputStream = ftpClient.retrieveFileStream(fileName);
if (Objects.nonNull(inputStream)){
file = File.createTempFile("betaGamma", null);
//将ftp文件的输入流复制给临时文件
FileUtils.copyInputStreamToFile(inputStream, file);
}
} catch (IOException e) {
throw new RuntimeException(e);
} finally {
try {
if (Objects.nonNull(ftpClient)){
ftpClient.disconnect();
}
if (Objects.nonNull(inputStream)){
inputStream.close();
}
} catch (IOException e) {
throw new RuntimeException(e);
}
}
return file;
String fromPath = path + StringPool.SLASH + fileName;
return ftpUtil.downloadFile(fromPath, "betaGamma");
}
public String makeUpSpectrum(PHDFile phd) {
@ -4427,7 +4296,7 @@ public class GammaFileUtil extends AbstractLogOrReport {
for(int i=0; i<g_ener.getG_energy().size(); i++) {
String g_energy = RightFill(String.format("%.9f", g_ener.getG_energy().get(i)), 16);
String centroid_channel = RightFill(String.format("%.9f", g_ener.getCentroid_channel().get(i)), 16);
String uncertainty = RightFill(String.format("%.9f", g_ener.getUncertainty().get(i).isNaN() ? 0.5 : g_ener.getUncertainty().get(i)), 16);
String uncertainty = RightFill(String.format("%.9f", Objects.nonNull(g_ener.getUncertainty().get(i)) ? (g_ener.getUncertainty().get(i).isNaN()?0.5 : g_ener.getUncertainty().get(i)):0.5), 16);
String energy = "%s %s %s";
spectrum.append(rowFormat(energy, g_energy, centroid_channel, uncertainty));
//换行
@ -4444,7 +4313,7 @@ public class GammaFileUtil extends AbstractLogOrReport {
for(int i=0; i<g_reso.getFWHM().size(); i++) {
String g_energy = RightFill(String.format("%.9f", g_reso.getG_energy().get(i)), 16);
String fwhm = RightFill(String.format("%.9f", g_reso.getFWHM().get(i)), 16);
String uncertainty = RightFill(String.format("%.9f", g_reso.getUncertainty().get(i).isNaN() ? 0.5 : g_reso.getUncertainty().get(i)), 16);
String uncertainty = RightFill(String.format("%.9f", Objects.nonNull(g_reso.getUncertainty().get(i))? (g_reso.getUncertainty().get(i).isNaN() ? 0.5 : g_reso.getUncertainty().get(i)):0.5), 16);
String reso = "%s %s %s";
spectrum.append(rowFormat(reso, g_energy, fwhm, uncertainty));
//换行
@ -4461,7 +4330,7 @@ public class GammaFileUtil extends AbstractLogOrReport {
for(int i=0; i<g_effi.getEfficiency().size(); i++) {
String g_energy = RightFill(String.format("%.9f", g_effi.getG_energy().get(i)), 16);
String g_efficiency = RightFill(String.format("%.9f", g_effi.getEfficiency().get(i)), 16);
String uncertainty = RightFill(String.format("%.9f", g_effi.getUncertainty().get(i).isNaN() ? 0.5 : g_effi.getUncertainty().get(i)), 16);
String uncertainty = RightFill(String.format("%.9f", Objects.nonNull(g_effi.getUncertainty().get(i))?(g_effi.getUncertainty().get(i).isNaN() ? 0.5 : g_effi.getUncertainty().get(i)):0.5), 16);
String efficiency = "%s %s %s";
spectrum.append(rowFormat(efficiency, g_energy, g_efficiency, uncertainty));
spectrum.append(System.lineSeparator());
@ -4478,7 +4347,7 @@ public class GammaFileUtil extends AbstractLogOrReport {
for(int i=0; i<g_totE.getRecord_count(); i++) {
String g_energy = RightFill(String.format("%.9f", g_totE.getG_energy().get(i)), 16);
String total_efficiency = RightFill(String.format("%.9f", g_totE.getTotal_efficiency().get(i)), 16);
String uncertainty = RightFill(String.format("%.9f", g_totE.getUncertainty().get(i).isNaN() ? 0.5 : g_totE.getUncertainty().get(i)), 16);
String uncertainty = RightFill(String.format("%.9f", Objects.nonNull(g_totE.getUncertainty().get(i))?(g_totE.getUncertainty().get(i).isNaN() ? 0.5 : g_totE.getUncertainty().get(i)):0.5), 16);
String totalEff = "%s %s %s";
spectrum.append(rowFormat(totalEff, g_energy, total_efficiency, uncertainty));
//换行
@ -4519,7 +4388,7 @@ public class GammaFileUtil extends AbstractLogOrReport {
String number1 = RightFill(String.valueOf(j), 5);
String number2 = RightFill(String.valueOf(phd.getSpec().getCounts().get(i)), 10);
spectrum.append(rowFormat(spectrumData3, number1, number2));
for(i = i+1; i < phd.getSpec().getNum_g_channel(); ++i) {
for(i = i+1; i < phd.getSpec().getNum_g_channel(); i++) {
String spectrumData4 = " %s";
String number3 = RightFill(String.valueOf(phd.getSpec().getCounts().get(i)), 10);
spectrum.append(rowFormat(spectrumData4, number3));
@ -4536,7 +4405,7 @@ public class GammaFileUtil extends AbstractLogOrReport {
ChartData cData = new ChartData();
cData.setColor(color);
cData.setName(name);
for(int i=0; i<m_baseCtrl.getBaseline().size(); ++i) {
for(int i=0; i<m_baseCtrl.getBaseline().size(); i++) {
SeriseData seriseData = new SeriseData();
seriseData.setX(i+1);
seriseData.setY(m_baseCtrl.getBaseline().get(i));
@ -4547,7 +4416,7 @@ public class GammaFileUtil extends AbstractLogOrReport {
public List<ShapeData> CreateShapeCP(BaseControls baseControls) {
List<ShapeData> shapes = new LinkedList<>();
for(int i=0; i<baseControls.getXCtrl().size(); ++i) {
for(int i=0; i<baseControls.getXCtrl().size(); i++) {
ShapeData shape = new ShapeData();
shape.setType("Shape_Round");
shape.setName("tmpCP"+i);

View File

@ -1,5 +1,6 @@
package org.jeecg.common.util;
import cn.hutool.core.util.ObjectUtil;
import com.baomidou.mybatisplus.core.toolkit.CollectionUtils;
import com.baomidou.mybatisplus.core.toolkit.StringPool;
import com.baomidou.mybatisplus.core.toolkit.StringUtils;
@ -262,52 +263,19 @@ public class PHDFileUtil extends AbstractLogOrReport {
public List<String> readLine(String filePath) {
String parameterFilePath = filePath.substring(0, filePath.lastIndexOf(StringPool.SLASH));
String fileName = filePath.substring(filePath.lastIndexOf(StringPool.SLASH) + 1);
//连接ftp
FTPClient ftpClient = ftpUtil.LoginFTP();
//判断ftp是否连接成功
if (Objects.isNull(ftpClient)){
throw new RuntimeException("ftp connection failed!");
}
InputStream iStream= null;
File file = null;
List<String> allLines = new ArrayList<>();
try {
//被动模式
ftpClient.enterLocalPassiveMode();
//设置文件类型--二进制文件
ftpClient.setFileType(FTP.BINARY_FILE_TYPE);
//
ftpClient.setControlEncoding("UTF-8");
ftpClient.setFileTransferMode(FTPClient.STREAM_TRANSFER_MODE);
//在当前工作路径下读取文件
ftpClient.changeWorkingDirectory(parameterFilePath);
//读取ftp文件的输入流
iStream=ftpClient.retrieveFileStream(fileName);
if (Objects.nonNull(iStream)) {
//声明一个临时文件
file = File.createTempFile("betaGamma", null);
//将ftp文件的输入流复制给临时文件
FileUtils.copyInputStreamToFile(iStream, file);
List<String> allLines = FileUtils.readLines(file, ftpUtil.getEncoding());
file = ftpUtil.downloadFile(filePath, "betaGamma");
return FileUtils.readLines(file, ftpUtil.getEncoding());
}catch (IOException e){
e.printStackTrace();
return allLines;
}
} catch (IOException e) {
throw new RuntimeException(e);
} finally {
try {
if (Objects.nonNull(ftpClient)){
ftpClient.disconnect();
}
if (Objects.nonNull(iStream)){
iStream.close();
}
if (Objects.nonNull(file)) {
if (ObjectUtil.isNotNull(file))
file.delete();
}
} catch (IOException e) {
throw new RuntimeException(e);
}
}
return Collections.emptyList();
}
public void getLightColor(Map<String, Object> sampleMap, Map<String, Object> gasBgMap, Map<String, Object> detBgMap, Map<String, Object> qcMap) {
@ -451,29 +419,10 @@ public class PHDFileUtil extends AbstractLogOrReport {
public Map<String, String> getFileData(String filePath, String sampleFileName) {
Map<String, String> map = new HashMap<>();
//连接ftp 获取ftp文件数据
FTPClient ftpClient = ftpUtil.LoginFTP();
if (Objects.isNull(ftpClient)){
return map;
}
InputStream inputStream = null;
File file = null;
try {
//切换被动模式
ftpClient.enterLocalPassiveMode();
ftpClient.setFileType(FTPClient.BINARY_FILE_TYPE);
// 设置编码当文件中存在中文且上传后文件乱码时可使用此配置项
ftpClient.setControlEncoding("UTF-8");
ftpClient.setFileTransferMode(FTPClient.STREAM_TRANSFER_MODE);
//切换工作路径
ftpClient.changeWorkingDirectory(filePath);
//解析sampleFile
inputStream = ftpClient.retrieveFileStream(sampleFileName);
if (Objects.nonNull(inputStream)) {
//声明一个临时文件
file = File.createTempFile("betaGamma", null);
//将ftp文件的输入流复制给临时文件
FileUtils.copyInputStreamToFile(inputStream, file);
String fromPath = filePath + StringPool.SLASH + sampleFileName;
file = ftpUtil.downloadFile(fromPath, "betaGamma");
//加载sampleFile内容
EnergySpectrumStruct struct = EnergySpectrumHandler.getSourceData(file.getAbsolutePath());
//获取所需要的数据
@ -493,48 +442,23 @@ public class PHDFileUtil extends AbstractLogOrReport {
map.put("gasFileName", gasFileName);
map.put("detaFileName", detaFileName);
map.put("sampleSystemType", systemType);
}
} catch (IOException e) {
throw new RuntimeException(e);
} finally {
try {
if (ftpClient!=null){
ftpClient.disconnect();
}
if (inputStream!=null){
inputStream.close();
}
if (Objects.nonNull(file)) {
return map;
}catch (Exception e){
e.printStackTrace();
return map;
}finally {
if (ObjectUtil.isNotNull(file))
file.delete();
}
} catch (IOException e) {
throw new RuntimeException(e);
}
}
return map;
}
public String NameStandardBy(String filePath, String fileName) {
//连接ftp
FTPClient ftpClient = ftpUtil.LoginFTP();
InputStream inputStream = null;
File file = null;
StringBuffer path = new StringBuffer();
File file = null;
try {
//被动模式
ftpClient.enterLocalPassiveMode();
//设置文件类型--二进制文件
ftpClient.setFileType(FTP.BINARY_FILE_TYPE);
//
ftpClient.setControlEncoding("UTF-8");
ftpClient.setFileTransferMode(FTPClient.STREAM_TRANSFER_MODE);
//切换文件路径
ftpClient.changeWorkingDirectory(filePath);
inputStream = ftpClient.retrieveFileStream(fileName);
if (Objects.nonNull(inputStream)){
file = File.createTempFile("betaGamma", null);
//将ftp文件的输入流复制给临时文件
FileUtils.copyInputStreamToFile(inputStream, file);
String fromPath = ftpUtil.getFtpRootPath() + filePath +
StringPool.SLASH + fileName;
file = ftpUtil.downloadFile(fromPath, "betaGamma");
EnergySpectrumStruct sourceData = EnergySpectrumHandler.getSourceData(file.getAbsolutePath());
String systemType = sourceData.system_type;
String dataType = sourceData.data_type;
@ -564,25 +488,14 @@ public class PHDFileUtil extends AbstractLogOrReport {
path.append(StringPool.SLASH+fileName.substring(pos+5,pos+7));
}
path.append(StringPool.SLASH+fileName);
}
} catch (IOException e) {
throw new RuntimeException(e);
} finally {
try {
if (Objects.nonNull(ftpClient)){
ftpClient.disconnect();
}
if (inputStream != null){
inputStream.close();
}
if (Objects.nonNull(file)) {
return path.toString();
}catch (Exception e){
e.printStackTrace();
return path.toString();
}finally {
if (ObjectUtil.isNotNull(file))
file.delete();
}
} catch (IOException e) {
throw new RuntimeException(e);
}
}
return path.toString();
}
public List<String> FileNameByStandardForm(String filePath, String sampleFileName) {
@ -692,7 +605,8 @@ public class PHDFileUtil extends AbstractLogOrReport {
return file;
}
public void analyzeSpectrum(File sampleTmp, File gasTmp, File detTmp, BgCalibratePara BgCalPara, Map<String, Object> map) {
public boolean analyzeSpectrum(File sampleTmp, File gasTmp, File detTmp, BgCalibratePara BgCalPara, Map<String, Object> map) {
boolean bRet = true;
//调用动态库解析文件
BgAnalyseResult analyseResult = null;
if (Objects.isNull(BgCalPara)) {
@ -700,6 +614,10 @@ public class PHDFileUtil extends AbstractLogOrReport {
} else {
analyseResult = EnergySpectrumHandler.bgReAnalyse(sampleTmp.getAbsolutePath(), gasTmp.getAbsolutePath(), detTmp.getAbsolutePath(), BgCalPara);
}
if (StringUtils.isNotBlank(analyseResult.error_log) && !analyseResult.error_log.equalsIgnoreCase("no error.")) {
bRet = false;
return bRet;
} else {
EnergySpectrumStruct sample = analyzeFileSourceData(sampleTmp);
EnergySpectrumStruct gas = analyzeFileSourceData(gasTmp);
EnergySpectrumStruct det = analyzeFileSourceData(detTmp);
@ -779,6 +697,8 @@ public class PHDFileUtil extends AbstractLogOrReport {
}
map.put("DetBoundary", boundaryList);
}
return bRet;
}
}
public EnergySpectrumStruct analyzeFileSourceData(String filePath, String fileName) {
@ -1326,11 +1246,27 @@ public class PHDFileUtil extends AbstractLogOrReport {
//换行
out.append(System.lineSeparator());
//第三十六行数据
if (bgAnalyseResult.s_b_fitting_c_e != null && CollectionUtils.isNotEmpty(bgAnalyseResult.s_b_fitting_c_e)) {
if (bgAnalyseResult.s_b_fitting_c_e.size() > 0) {
out.append(rowFormat("energy to channel equation: CH(x) = (%s)+(%s)*x+(%s)x*x", NumberFormatUtil.numberFormat(String.valueOf(bgAnalyseResult.s_b_fitting_c_e.get(0))), NumberFormatUtil.numberFormat(String.valueOf(bgAnalyseResult.s_b_fitting_c_e.get(1))), NumberFormatUtil.numberFormat(String.valueOf(bgAnalyseResult.s_b_fitting_c_e.get(2)))));
} else {
out.append(rowFormat("energy to channel equation: CH(x) = (%s)+(%s)*x+(%s)x*x", "?", "?", "?"));
}
} else {
out.append(rowFormat("energy to channel equation: CH(x) = (%s)+(%s)*x+(%s)x*x", "?", "?", "?"));
}
//换行
out.append(System.lineSeparator());
//第三十七行数据
if (bgAnalyseResult.s_b_fitting_e_c != null && CollectionUtils.isNotEmpty(bgAnalyseResult.s_b_fitting_e_c)) {
if (bgAnalyseResult.s_b_fitting_e_c.size() > 0) {
out.append(rowFormat("channel to energy equation: E(x) = (%s)+(%s)*x+(%s)x*x", NumberFormatUtil.numberFormat(String.valueOf(bgAnalyseResult.s_b_fitting_e_c.get(0))), NumberFormatUtil.numberFormat(String.valueOf(bgAnalyseResult.s_b_fitting_e_c.get(1))), NumberFormatUtil.numberFormat(String.valueOf(bgAnalyseResult.s_b_fitting_e_c.get(2)))));
} else {
out.append(rowFormat("channel to energy equation: E(x) = (%s)+(%s)*x+(%s)x*x", "?", "?", "?"));
}
} else {
out.append(rowFormat("channel to energy equation: E(x) = (%s)+(%s)*x+(%s)x*x", "?", "?", "?"));
}
//换行
out.append(System.lineSeparator());
//换行
@ -1340,11 +1276,27 @@ public class PHDFileUtil extends AbstractLogOrReport {
//换行
out.append(System.lineSeparator());
//第三十六行数据
if (bgAnalyseResult.s_g_fitting_c_e !=null && CollectionUtils.isNotEmpty(bgAnalyseResult.s_g_fitting_c_e)) {
if (bgAnalyseResult.s_g_fitting_c_e.size() > 0) {
out.append(rowFormat("energy to channel equation: CH(x) = (%s)+(%s)*x+(%s)x*x", NumberFormatUtil.numberFormat(String.valueOf(bgAnalyseResult.s_g_fitting_c_e.get(0))), NumberFormatUtil.numberFormat(String.valueOf(bgAnalyseResult.s_g_fitting_c_e.get(1))), NumberFormatUtil.numberFormat(String.valueOf(bgAnalyseResult.s_g_fitting_c_e.get(2)))));
} else {
out.append(rowFormat("energy to channel equation: CH(x) = (%s)+(%s)*x+(%s)x*x", "?", "?", "?"));
}
} else {
out.append(rowFormat("energy to channel equation: CH(x) = (%s)+(%s)*x+(%s)x*x", "?", "?", "?"));
}
//换行
out.append(System.lineSeparator());
//第三十七行数据
if (bgAnalyseResult.s_g_fitting_e_c != null && CollectionUtils.isNotEmpty(bgAnalyseResult.s_g_fitting_e_c)) {
if (bgAnalyseResult.s_g_fitting_e_c.size() > 0) {
out.append(rowFormat("channel to energy equation: E(x) = (%s)+(%s)*x+(%s)x*x", NumberFormatUtil.numberFormat(String.valueOf(bgAnalyseResult.s_g_fitting_e_c.get(0))), NumberFormatUtil.numberFormat(String.valueOf(bgAnalyseResult.s_g_fitting_e_c.get(1))), NumberFormatUtil.numberFormat(String.valueOf(bgAnalyseResult.s_g_fitting_e_c.get(2)))));
} else {
out.append(rowFormat("channel to energy equation: E(x) = (%s)+(%s)*x+(%s)x*x", "?", "?", "?"));
}
} else {
out.append(rowFormat("channel to energy equation: E(x) = (%s)+(%s)*x+(%s)x*x", "?", "?", "?"));
}
//换行
out.append(System.lineSeparator());
//换行
@ -1394,11 +1346,28 @@ public class PHDFileUtil extends AbstractLogOrReport {
//换行
out.append(System.lineSeparator());
//第四十四行数据
if (bgAnalyseResult.d_b_fitting_c_e != null && CollectionUtils.isNotEmpty(bgAnalyseResult.d_b_fitting_c_e)) {
if (bgAnalyseResult.d_b_fitting_c_e.size() > 0) {
out.append(rowFormat("energy to channel equation: CH(x) = (%s)+(%s)*x+(%s)x*x", NumberFormatUtil.numberFormat(String.valueOf(bgAnalyseResult.d_b_fitting_c_e.get(0))), NumberFormatUtil.numberFormat(String.valueOf(bgAnalyseResult.d_b_fitting_c_e.get(1))), NumberFormatUtil.numberFormat(String.valueOf(bgAnalyseResult.d_b_fitting_c_e.get(2)))));
} else {
out.append(rowFormat("energy to channel equation: CH(x) = (%s)+(%s)*x+(%s)x*x", "?", "?", "?"));
}
} else {
out.append(rowFormat("energy to channel equation: CH(x) = (%s)+(%s)*x+(%s)x*x", "?", "?", "?"));
}
//换行
out.append(System.lineSeparator());
//第四十五行数据
if (bgAnalyseResult.d_b_fitting_e_c != null && CollectionUtils.isNotEmpty(bgAnalyseResult.d_b_fitting_e_c)) {
if (bgAnalyseResult.d_b_fitting_e_c.size() > 0) {
out.append(rowFormat("channel to energy equation: E(x) = (%s)+(%s)*x+(%s)x*x", NumberFormatUtil.numberFormat(String.valueOf(bgAnalyseResult.d_b_fitting_e_c.get(0))), NumberFormatUtil.numberFormat(String.valueOf(bgAnalyseResult.d_b_fitting_e_c.get(1))), NumberFormatUtil.numberFormat(String.valueOf(bgAnalyseResult.d_b_fitting_e_c.get(2)))));
} else {
out.append(rowFormat("channel to energy equation: E(x) = (%s)+(%s)*x+(%s)x*x", "?", "?", "?"));
}
} else {
out.append(rowFormat("channel to energy equation: E(x) = (%s)+(%s)*x+(%s)x*x", "?", "?", "?"));
}
//换行
out.append(System.lineSeparator());
//换行
@ -1408,11 +1377,27 @@ public class PHDFileUtil extends AbstractLogOrReport {
//换行
out.append(System.lineSeparator());
//第四十四行数据
if (bgAnalyseResult.d_g_fitting_c_e != null && CollectionUtils.isNotEmpty(bgAnalyseResult.d_g_fitting_c_e)) {
if (bgAnalyseResult.d_g_fitting_c_e.size() > 0) {
out.append(rowFormat("energy to channel equation: CH(x) = (%s)+(%s)*x+(%s)x*x", NumberFormatUtil.numberFormat(String.valueOf(bgAnalyseResult.d_g_fitting_c_e.get(0))), NumberFormatUtil.numberFormat(String.valueOf(bgAnalyseResult.d_g_fitting_c_e.get(1))), NumberFormatUtil.numberFormat(String.valueOf(bgAnalyseResult.d_g_fitting_c_e.get(2)))));
} else {
out.append(rowFormat("energy to channel equation: CH(x) = (%s)+(%s)*x+(%s)x*x", "?", "?", "?"));
}
} else {
out.append(rowFormat("energy to channel equation: CH(x) = (%s)+(%s)*x+(%s)x*x", "?", "?", "?"));
}
//换行
out.append(System.lineSeparator());
//第四十五行数据
if (bgAnalyseResult.d_g_fitting_e_c != null && CollectionUtils.isNotEmpty(bgAnalyseResult.d_g_fitting_e_c)) {
if (bgAnalyseResult.d_g_fitting_e_c.size() > 0) {
out.append(rowFormat("channel to energy equation: E(x) = (%s)+(%s)*x+(%s)x*x", NumberFormatUtil.numberFormat(String.valueOf(bgAnalyseResult.d_g_fitting_e_c.get(0))), NumberFormatUtil.numberFormat(String.valueOf(bgAnalyseResult.d_g_fitting_e_c.get(1))), NumberFormatUtil.numberFormat(String.valueOf(bgAnalyseResult.d_g_fitting_e_c.get(2)))));
} else {
out.append(rowFormat("channel to energy equation: E(x) = (%s)+(%s)*x+(%s)x*x", "?", "?", "?"));
}
} else {
out.append(rowFormat("channel to energy equation: E(x) = (%s)+(%s)*x+(%s)x*x", "?", "?", "?"));
}
//换行
out.append(System.lineSeparator());
//换行
@ -1457,11 +1442,27 @@ public class PHDFileUtil extends AbstractLogOrReport {
//换行
out.append(System.lineSeparator());
//第五十一行数据
if (bgAnalyseResult.g_b_fitting_c_e != null && CollectionUtils.isNotEmpty(bgAnalyseResult.g_b_fitting_c_e)) {
if (bgAnalyseResult.g_b_fitting_c_e.size() > 0) {
out.append(rowFormat("energy to channel equation: CH(x) = (%s)+(%s)*x+(%s)x*x", NumberFormatUtil.numberFormat(String.valueOf(bgAnalyseResult.g_b_fitting_c_e.get(0))), NumberFormatUtil.numberFormat(String.valueOf(bgAnalyseResult.g_b_fitting_c_e.get(1))), NumberFormatUtil.numberFormat(String.valueOf(bgAnalyseResult.g_b_fitting_c_e.get(2)))));
} else {
out.append(rowFormat("energy to channel equation: CH(x) = (%s)+(%s)*x+(%s)x*x", "?", "?", "?"));
}
} else {
out.append(rowFormat("energy to channel equation: CH(x) = (%s)+(%s)*x+(%s)x*x", "?", "?", "?"));
}
//换行
out.append(System.lineSeparator());
//第五十二行数据
if (bgAnalyseResult.g_b_fitting_e_c != null && CollectionUtils.isNotEmpty(bgAnalyseResult.g_b_fitting_e_c)) {
if (bgAnalyseResult.g_b_fitting_e_c.size() > 0) {
out.append(rowFormat("channel to energy equation: E(x) = (%s)+(%s)*x+(%s)x*x", NumberFormatUtil.numberFormat(String.valueOf(bgAnalyseResult.g_b_fitting_e_c.get(0))), NumberFormatUtil.numberFormat(String.valueOf(bgAnalyseResult.g_b_fitting_e_c.get(1))), NumberFormatUtil.numberFormat(String.valueOf(bgAnalyseResult.g_b_fitting_e_c.get(2)))));
} else {
out.append(rowFormat("channel to energy equation: E(x) = (%s)+(%s)*x+(%s)x*x", "?", "?", "?"));
}
} else {
out.append(rowFormat("channel to energy equation: E(x) = (%s)+(%s)*x+(%s)x*x", "?", "?", "?"));
}
//换行
out.append(System.lineSeparator());
//换行
@ -1471,11 +1472,27 @@ public class PHDFileUtil extends AbstractLogOrReport {
//换行
out.append(System.lineSeparator());
//第五十四行数据
if (bgAnalyseResult.g_g_fitting_c_e != null && CollectionUtils.isNotEmpty(bgAnalyseResult.g_g_fitting_c_e)) {
if (bgAnalyseResult.g_g_fitting_c_e.size() > 0) {
out.append(rowFormat("energy to channel equation: CH(x) = (%s)+(%s)*x+(%s)x*x", NumberFormatUtil.numberFormat(String.valueOf(bgAnalyseResult.g_g_fitting_c_e.get(0))), NumberFormatUtil.numberFormat(String.valueOf(bgAnalyseResult.g_g_fitting_c_e.get(1))), NumberFormatUtil.numberFormat(String.valueOf(bgAnalyseResult.g_g_fitting_c_e.get(2)))));
} else {
out.append(rowFormat("energy to channel equation: CH(x) = (%s)+(%s)*x+(%s)x*x", "?", "?", "?"));
}
} else {
out.append(rowFormat("energy to channel equation: CH(x) = (%s)+(%s)*x+(%s)x*x", "?", "?", "?"));
}
//换行
out.append(System.lineSeparator());
//第五十五行数据
if (bgAnalyseResult.g_g_fitting_e_c != null && CollectionUtils.isNotEmpty(bgAnalyseResult.g_g_fitting_e_c)) {
if (bgAnalyseResult.g_g_fitting_e_c.size() > 0) {
out.append(rowFormat("channel to energy equation: E(x) = (%s)+(%s)*x+(%s)x*x", NumberFormatUtil.numberFormat(String.valueOf(bgAnalyseResult.g_g_fitting_e_c.get(0))), NumberFormatUtil.numberFormat(String.valueOf(bgAnalyseResult.g_g_fitting_e_c.get(1))), NumberFormatUtil.numberFormat(String.valueOf(bgAnalyseResult.g_g_fitting_e_c.get(2)))));
} else {
out.append(rowFormat("channel to energy equation: E(x) = (%s)+(%s)*x+(%s)x*x", "?", "?", "?"));
}
} else {
out.append(rowFormat("channel to energy equation: E(x) = (%s)+(%s)*x+(%s)x*x", "?", "?", "?"));
}
//换行
out.append(System.lineSeparator());
//换行

View File

@ -123,14 +123,14 @@ public class SpectrumAnalysesController {
@GetMapping("viewGammaDetectorCalibration")
@ApiOperation(value = "查询GammaDetectorCalibration数据", notes = "查询GammaDetectorCalibration数据")
public Result viewGammaDetectorCalibration(Integer sampleId, String qcFileName, HttpServletRequest request) {
return spectrumAnalysisService.viewGammaDetectorCalibration(sampleId, qcFileName, request);
public Result viewGammaDetectorCalibration(Integer sampleId, String sampleFileName, String qcFileName, HttpServletRequest request) {
return spectrumAnalysisService.viewGammaDetectorCalibration(sampleId, sampleFileName, qcFileName, request);
}
@GetMapping("viewBetaDetectorCalibration")
@ApiOperation(value = "查询BetaDetectorCalibration数据", notes = "查询BetaDetectorCalibration数据")
public Result viewBetaDetectorCalibration(Integer sampleId, String qcFileName, HttpServletRequest request) {
return spectrumAnalysisService.viewBetaDetectorCalibration(sampleId, qcFileName, request);
public Result viewBetaDetectorCalibration(Integer sampleId, String sampleFileName, String qcFileName, HttpServletRequest request) {
return spectrumAnalysisService.viewBetaDetectorCalibration(sampleId, sampleFileName, qcFileName, request);
}
@GetMapping("viewExtrapolation")
@ -176,7 +176,7 @@ public class SpectrumAnalysesController {
@PostMapping("fitting")
@ApiOperation(value = "公式计算新的曲线", notes = "公式计算新的曲线")
public Result fitting(@RequestBody FittingBody fittingBody, HttpServletRequest request) {
return spectrumAnalysisService.fitting(fittingBody.getParamA(), fittingBody.getParamB(), fittingBody.getParamC(), fittingBody.getTempPoints(), fittingBody.getCount(), fittingBody.getQcFileName(), fittingBody.getTabName(), request);
return spectrumAnalysisService.fitting(fittingBody.getParamA(), fittingBody.getParamB(), fittingBody.getParamC(), fittingBody.getTempPoints(), fittingBody.getCount(), fittingBody.getSampleFileName(), fittingBody.getTabName(), request);
}
@GetMapping("getGammaGated")

View File

@ -22,7 +22,7 @@ public class BgDataAnlyseResultIn implements Serializable {
/**
* 标识本次操作是否进行过解析
*/
private boolean bProcessed;
private boolean savedAnalysisResult;
/**
* 分析员名称
@ -266,7 +266,7 @@ public class BgDataAnlyseResultIn implements Serializable {
public BgDataAnlyseResultIn() {
comment = "";
bProcessed = false;
savedAnalysisResult = false;
userName = "";
stationName = "";
dbName = "";

View File

@ -46,9 +46,9 @@ public interface ISpectrumAnalysisService {
void exportRLR(BetaRLR betaRLR, HttpServletResponse response);
Result viewGammaDetectorCalibration(Integer sampleId, String qcFileName, HttpServletRequest request);
Result viewGammaDetectorCalibration(Integer sampleId, String sampleFileName, String qcFileName, HttpServletRequest request);
Result viewBetaDetectorCalibration(Integer sampleId, String qcFileName, HttpServletRequest request);
Result viewBetaDetectorCalibration(Integer sampleId, String sampleFileName, String qcFileName, HttpServletRequest request);
Result viewExtrapolation(Integer sampleId, String sampleFileName, HttpServletRequest request);
@ -64,7 +64,7 @@ public interface ISpectrumAnalysisService {
Result statisticsQueryBtn(Integer detectorId, String detectorName, Integer stationId, String statisticsType, Date startTime, Date endTime);
Result fitting(Double paramA, Double paramB, Double paramC, List<SeriseData> tempPointsArray, Integer count, String qcFileName, String tabName, HttpServletRequest request);
Result fitting(Double paramA, Double paramB, Double paramC, List<SeriseData> tempPointsArray, Integer count, String sampleFileName, String tabName, HttpServletRequest request);
Result getGammaGated(Integer chartHeight, Integer channelWidth, Integer gammaChannel, Integer sampleId, String qcFileName, HttpServletRequest request);

View File

@ -85,10 +85,14 @@ public class GammaServiceImpl extends AbstractLogOrReport implements IGammaServi
@Autowired
private FileFtransitUtil fileFtransitUtil;
@Autowired
private NameStandUtil nameStandUtil;
@Autowired
private DBUtil dbUtil;
@Autowired
private UserTaskUtil userTaskUtil;
@Autowired
private RedisUtil redisUtil;
@Autowired
private ISysDefaultNuclideSpectrumService defaultNuclideSpectrumService;
@Autowired
private ISysUserColorService sysUserColorService;
@ -138,12 +142,12 @@ public class GammaServiceImpl extends AbstractLogOrReport implements IGammaServi
@Override
public Result initValue(Integer sampleId, String dbName, String samfileName, HttpServletRequest request) {
Result result = new Result();
//
//获取用户名
String userName = JwtUtil.getUserNameByToken(request);
Cache<String, PHDFile> phdCache = localCache.getPHDCache();
PHDFile phd = new PHDFile();
// 读取文件内容
String lastName = "";
String key = "";
if (Objects.nonNull(sampleId) && StringUtils.isNotBlank(dbName)) {
// 根据sampleId获取sample文件路径
String sampleFilePath = spectrumAnalysisMapper.getSampleFilePath(sampleId);
@ -151,30 +155,56 @@ public class GammaServiceImpl extends AbstractLogOrReport implements IGammaServi
result.error500("Sample file does not exist!");
return result;
}
String pathName = StringPool.SLASH + spectrumPathProperties.getSaveFilePath() + StringPool.SLASH + sampleFilePath.substring(0, sampleFilePath.lastIndexOf(StringPool.SLASH));
String pathName = ftpUtil.getFtpRootPath() + spectrumPathProperties.getSaveFilePath() + StringPool.SLASH + sampleFilePath.substring(0, sampleFilePath.lastIndexOf(StringPool.SLASH));
String fileName = sampleFilePath.substring(sampleFilePath.lastIndexOf(StringPool.SLASH) + 1);
boolean flag = gammaFileUtil.loadFile(pathName, fileName, phd, result);
if (!flag) {
return result;
}
// 声明基础数组信息
gammaFileUtil.SetBaseInfo(phd);
if (dbName.equals("auto")) {
gammaFileUtil.SetBaseInfo(phd, "RNAUTO");
} else if (dbName.equals("man")) {
gammaFileUtil.SetBaseInfo(phd, userName);
}
// 从数据库中读取相关信息
boolean bRet = getResultFromDB(dbName, userName, sampleId, phd, result);
if (!redisUtil.hasKey(fileName+"-"+userName)) {
// 查询当前用户关联的核素信息
List<String> userLib = new LinkedList<>();
// 从postgreSql中获取当前用户关注的核素信息 如果当前用户没有 则返回管理员的
userLib = defaultNuclideSpectrumService.findNuclidesByUserName(userName, phd.getHeader().getSystem_type().toUpperCase());
if (CollectionUtils.isEmpty(userLib)) {
userLib = defaultNuclideSpectrumService.findNuclidesByUserName("admin", phd.getHeader().getSystem_type().toUpperCase());
}
Map<String, NuclideLines> nuclideMap = GetNuclideLines(userLib);
redisUtil.set(fileName+"-"+userName, nuclideMap);
}
if (!bRet) {
return result;
}
lastName = fileName;
key = fileName + "-" + userName;
} else {
String pathName = StringPool.SLASH + spectrumPathProperties.getUploadPath() + StringPool.SLASH + userName;
String pathName = ftpUtil.getFtpRootPath() + spectrumPathProperties.getUploadPath() + StringPool.SLASH + userName;
String fileName = samfileName;
boolean flag = gammaFileUtil.loadFile(pathName, fileName, phd, result);
if (!redisUtil.hasKey(fileName+"-"+userName)) {
// 查询当前用户关联的核素信息
List<String> userLib = new LinkedList<>();
// 从postgreSql中获取当前用户关注的核素信息 如果当前用户没有 则返回管理员的
userLib = defaultNuclideSpectrumService.findNuclidesByUserName(userName, phd.getHeader().getSystem_type().toUpperCase());
if (CollectionUtils.isEmpty(userLib)) {
userLib = defaultNuclideSpectrumService.findNuclidesByUserName("admin", phd.getHeader().getSystem_type().toUpperCase());
}
Map<String, NuclideLines> nuclideMap = GetNuclideLines(userLib);
redisUtil.set(fileName+"-"+userName, nuclideMap);
}
if (!flag) {
return result;
}
lastName = fileName;
key = fileName + "-" + userName;
}
phdCache.put(lastName, phd);
phdCache.put(key, phd);
localCache.setPHDCache(phdCache);
result.setSuccess(true);
result.setResult(phd);
@ -194,16 +224,9 @@ public class GammaServiceImpl extends AbstractLogOrReport implements IGammaServi
result.error500("Please select the parse file first");
return result;
}
// 查询当前用户关联的核素信息
List<String> nuclides = new LinkedList<>();
// 从postgreSql中获取当前用户关注的核素信息 如果当前用户没有 则返回管理员的
nuclides = defaultNuclideSpectrumService.findNuclidesByUserName(userName, systemType);
if (CollectionUtils.isEmpty(nuclides)) {
nuclides = defaultNuclideSpectrumService.findNuclidesByUserName("admin", systemType);
}
Map<String, NuclideLines> nuclideLinesMap = GetNuclideLines(nuclides);
Map<String, NuclideLines> nuclideLinesMap = (Map<String, NuclideLines>) redisUtil.get(fileName+"-"+userName);//GetNuclideLines(nuclides);
// 解析获取临时文件信息
File tmpFile = gammaFileUtil.analyzeFile(StringPool.SLASH + spectrumPathProperties.getUploadPath() + StringPool.SLASH + userName, fileName);
File tmpFile = gammaFileUtil.analyzeFile(ftpUtil.getFtpRootPath() + spectrumPathProperties.getUploadPath() + StringPool.SLASH + userName, fileName);
ObjectMapper mapper = new ObjectMapper();
try {
String phdStr = mapper.writeValueAsString(phd);
@ -375,16 +398,16 @@ public class GammaServiceImpl extends AbstractLogOrReport implements IGammaServi
ParameterInfo value = JSON.parseObject(JSON.toJSONString(entry.getValue()), ParameterInfo.class);
phd.setUsedTotEPara(value);
}
if (entry.getKey().equalsIgnoreCase("mapNucActMda")) {
HashMap<String, Object> jsonMap = JSON.parseObject(JSON.toJSONString(entry.getValue()), HashMap.class);
Map<String, NuclideActMda> value = new HashMap<>();
for (Map.Entry<String, Object> objectEntry : jsonMap.entrySet()) {
String key = objectEntry.getKey();
NuclideActMda entryValue = JSON.parseObject(JSON.toJSONString(objectEntry.getValue()), NuclideActMda.class);
value.put(key, entryValue);
}
phd.setMapNucActMda(value);
}
// if (entry.getKey().equalsIgnoreCase("mapNucActMda")) {
// HashMap<String, Object> jsonMap = JSON.parseObject(JSON.toJSONString(entry.getValue()), HashMap.class);
// Map<String, NuclideActMda> value = new HashMap<>();
// for (Map.Entry<String, Object> objectEntry : jsonMap.entrySet()) {
// String key = objectEntry.getKey();
// NuclideActMda entryValue = JSON.parseObject(JSON.toJSONString(objectEntry.getValue()), NuclideActMda.class);
// value.put(key, entryValue);
// }
// phd.setMapNucActMda(value);
// }
}
BeanUtils.copyProperties(phd.getSetting(), phd.getUsedSetting());
@ -396,6 +419,8 @@ public class GammaServiceImpl extends AbstractLogOrReport implements IGammaServi
peak.recoilDeltaChan = "1";
}
}
// 重新分析各峰值对应的核素信息
gammaFileUtil.NuclidesIdent(phd, nuclideLinesMap);
gammaFileUtil.RunQC(phd);
result.setResult(phd);
} catch (JsonProcessingException e) {
@ -425,7 +450,7 @@ public class GammaServiceImpl extends AbstractLogOrReport implements IGammaServi
return result;
}
// 切割数据库存储的文件路径获取路径信息
String pathName = StringPool.SLASH + spectrumPathProperties.getSaveFilePath() + StringPool.SLASH + sampleFilePath.substring(0, sampleFilePath.lastIndexOf(StringPool.SLASH));
String pathName = ftpUtil.getFtpRootPath() + spectrumPathProperties.getSaveFilePath() + StringPool.SLASH + sampleFilePath.substring(0, sampleFilePath.lastIndexOf(StringPool.SLASH));
// 切割数据库存储的文件路径获取文件名称
String fileName = sampleFilePath.substring(sampleFilePath.lastIndexOf(StringPool.SLASH) + 1);
// 声明phd实体类
@ -440,9 +465,24 @@ public class GammaServiceImpl extends AbstractLogOrReport implements IGammaServi
return result;
}
// 加载phd数据所需的lcscacbaseline数据
gammaFileUtil.SetBaseInfo(phd);
if (dbName.equals("auto")) {
gammaFileUtil.SetBaseInfo(phd, "RNAUTO");
} else if (dbName.equals("man")) {
gammaFileUtil.SetBaseInfo(phd, userName);
}
// 从数据库中读取phd其他相关信息
boolean bRet = getResultFromDB(dbName, userName, sampleId, phd, result);
if (!redisUtil.hasKey(fileName+"-"+userName)) {
// 查询当前用户关联的核素信息
List<String> userLib = new LinkedList<>();
// 从postgreSql中获取当前用户关注的核素信息 如果当前用户没有 则返回管理员的
userLib = defaultNuclideSpectrumService.findNuclidesByUserName(userName, phd.getHeader().getSystem_type().toUpperCase());
if (CollectionUtils.isEmpty(userLib)) {
userLib = defaultNuclideSpectrumService.findNuclidesByUserName("admin", phd.getHeader().getSystem_type().toUpperCase());
}
Map<String, NuclideLines> nuclideMap = GetNuclideLines(userLib);
redisUtil.set(fileName+"-"+userName, nuclideMap);
}
// 判断数据库信息是否读取正常
if (!bRet) {
return result;
@ -454,7 +494,7 @@ public class GammaServiceImpl extends AbstractLogOrReport implements IGammaServi
List<String> qcstate = gammaFileUtil.Qcstate(phd);
map.put("QCFlag", qcstate);
// 更新 Detailed Information 信息
List<String> detailedInfo = gammaFileUtil.DetailedInfo(sampleId, phd);
List<String> detailedInfo = gammaFileUtil.DetailedInfo(String.valueOf(sampleId), phd);
map.put("DetailedInformation", detailedInfo);
// 更新 ACQ 模块中各时间信息
map.put("start_time", phd.getAcq().getAcquisition_start_date() + "\n" + phd.getAcq().getAcquisition_start_time());
@ -465,6 +505,9 @@ public class GammaServiceImpl extends AbstractLogOrReport implements IGammaServi
map.put("checkBox_updateCal", phd.getSetting().isBUpdateCal());
map.put("bAnalyed", phd.isBAnalyed());
map.put("peak", phd.getVPeak());
map.put("BaseCtrls", phd.getBaseCtrls());
List<SeriseData> differance = gammaFileUtil.Differance(phd, phd.getVPeak());
map.put("barChart", differance);
// 更新页面折线图信息
gammaFileUtil.UpdateChart(phd, map, colorMap);
// 将当前加载的phd信息加入到缓存中 文件名称作为缓存信息的key
@ -477,7 +520,14 @@ public class GammaServiceImpl extends AbstractLogOrReport implements IGammaServi
public boolean getResultFromDB(String dbName, String userName, Integer sampleId, PHDFile phd, Result result) {
// 判断连接的数据库
String T_analy, T_calib, T_peaks, T_param, T_nuc_line, T_nuc_act, T_qc, T_setting;
String T_analy = "";
String T_calib = "";
String T_peaks = "";
String T_param = "";
String T_nuc_line = "";
String T_nuc_act = "";
String T_qc = "";
String T_setting = "";
if (dbName.equals("auto")) {
T_analy = "RNAUTO.GARDS_ANALYSES";
T_calib = "RNAUTO.GARDS_CALIBRATION_PAIRS";
@ -487,7 +537,7 @@ public class GammaServiceImpl extends AbstractLogOrReport implements IGammaServi
T_nuc_act = "RNAUTO.GARDS_NUCL_IDED";
T_qc = "RNAUTO.GARDS_QC_CHECK";
userName = "RNAUTO";
} else {
} else if (dbName.equals("man")) {
T_analy = "RNMAN.GARDS_ANALYSES";
T_calib = "RNMAN.GARDS_CALIBRATION_PAIRS";
T_peaks = "RNMAN.GARDS_PEAKS";
@ -776,7 +826,7 @@ public class GammaServiceImpl extends AbstractLogOrReport implements IGammaServi
Map<String, Object> map = new HashMap<>();
Cache<String, PHDFile> phdCache = localCache.getPHDCache();
// 上传文件路径
String path = StringPool.SLASH + spectrumPathProperties.getUploadPath() + StringPool.SLASH + userName;
String path = ftpUtil.getFtpRootPath() + spectrumPathProperties.getUploadPath() + StringPool.SLASH + userName;
// 获取当前角色的颜色配置
Map<String, String> colorMap = sysUserColorService.initColor(userName);
PHDFile phd = phdCache.getIfPresent(fileName + "-" + userName);
@ -787,12 +837,23 @@ public class GammaServiceImpl extends AbstractLogOrReport implements IGammaServi
if (!bRet) {
return result;
}
if (!redisUtil.hasKey(fileName+"-"+userName)) {
// 查询当前用户关联的核素信息
List<String> userLib = new LinkedList<>();
// 从postgreSql中获取当前用户关注的核素信息 如果当前用户没有 则返回管理员的
userLib = defaultNuclideSpectrumService.findNuclidesByUserName(userName, phd.getHeader().getSystem_type().toUpperCase());
if (CollectionUtils.isEmpty(userLib)) {
userLib = defaultNuclideSpectrumService.findNuclidesByUserName("admin", phd.getHeader().getSystem_type().toUpperCase());
}
Map<String, NuclideLines> nuclideMap = GetNuclideLines(userLib);
redisUtil.set(fileName+"-"+userName, nuclideMap);
}
}
// 更新 QC Flags 状态
List<String> qcstate = gammaFileUtil.Qcstate(phd);
map.put("QCFlag", qcstate);
// 更新 Detailed Information 信息
List<String> detailedInfo = gammaFileUtil.DetailedInfo(null, phd);
List<String> detailedInfo = gammaFileUtil.DetailedInfo(phd.getId_sample(), phd);
map.put("DetailedInformation", detailedInfo);
// 更新 ACQ 模块中各时间信息
map.put("start_time", phd.getAcq().getAcquisition_start_date() + "\n" + phd.getAcq().getAcquisition_start_time());
@ -803,6 +864,9 @@ public class GammaServiceImpl extends AbstractLogOrReport implements IGammaServi
map.put("checkBox_updateCal", phd.getSetting().isBUpdateCal());
map.put("bAnalyed", phd.isBAnalyed());
map.put("peak", phd.getVPeak());
map.put("BaseCtrls", phd.getBaseCtrls());
List<SeriseData> differance = gammaFileUtil.Differance(phd, phd.getVPeak());
map.put("barChart", differance);
gammaFileUtil.UpdateChart(phd, map, colorMap);
phdCache.put(fileName + "-" + userName, phd);
localCache.setPHDCache(phdCache);
@ -818,6 +882,7 @@ public class GammaServiceImpl extends AbstractLogOrReport implements IGammaServi
String key = fileName + StrUtil.DASHED + username;
// 删除指定key的Cache
localCache.deletePHDCache(key);
redisUtil.del(key);
}
@Override
@ -972,13 +1037,13 @@ public class GammaServiceImpl extends AbstractLogOrReport implements IGammaServi
phd.setXmlFilePath(parameterProperties.getFilePath());
// 获取当前角色的颜色配置
Map<String, String> colorMap = sysUserColorService.initColor(userName);
// 查询当前用户关联的核素信息
List<String> nuclides = new LinkedList<>();
// 从postgreSql中获取当前用户关注的核素信息 如果当前用户没有 则返回管理员的
nuclides = defaultNuclideSpectrumService.findNuclidesByUserName(userName, phd.getHeader().getSystem_type().toUpperCase());
if (CollectionUtils.isEmpty(nuclides)) {
nuclides = defaultNuclideSpectrumService.findNuclidesByUserName("admin", phd.getHeader().getSystem_type().toUpperCase());
}
// // 查询当前用户关联的核素信息
// List<String> nuclides = new LinkedList<>();
// // 从postgreSql中获取当前用户关注的核素信息 如果当前用户没有 则返回管理员的
// nuclides = defaultNuclideSpectrumService.findNuclidesByUserName(userName, phd.getHeader().getSystem_type().toUpperCase());
// if (CollectionUtils.isEmpty(nuclides)) {
// nuclides = defaultNuclideSpectrumService.findNuclidesByUserName("admin", phd.getHeader().getSystem_type().toUpperCase());
// }
// 分析文件数据
int flag = gammaFileUtil.AnalyseData(phd);
if (flag == 0) {
@ -987,7 +1052,7 @@ public class GammaServiceImpl extends AbstractLogOrReport implements IGammaServi
"2. You didn't change any setting or calibration.";
result.error500(warning);
} else if (flag == -1) {
Map<String, NuclideLines> nuclideLinesMap = GetNuclideLines(nuclides);
Map<String, NuclideLines> nuclideLinesMap = (Map<String, NuclideLines>) redisUtil.get(fileName+"-"+userName);//GetNuclideLines(nuclides);
gammaFileUtil.NuclidesIdent(phd, nuclideLinesMap);
gammaFileUtil.RunQC(phd);
String warning = "Finish three tasks:\n" +
@ -996,7 +1061,7 @@ public class GammaServiceImpl extends AbstractLogOrReport implements IGammaServi
"\t3.Test QC again.";
result.error500(warning);
} else {
Map<String, NuclideLines> nuclideLinesMap = GetNuclideLines(nuclides);
Map<String, NuclideLines> nuclideLinesMap = (Map<String, NuclideLines>) redisUtil.get(fileName+"-"+userName);//GetNuclideLines(nuclides);
gammaFileUtil.AnalyseSpectrum(phd, nuclideLinesMap);
// 重新分析各峰值对应的核素信息
// gammaFileUtil.NuclidesIdent(phd, nuclideLinesMap);
@ -1007,6 +1072,10 @@ public class GammaServiceImpl extends AbstractLogOrReport implements IGammaServi
map.put("QCFlag", qcstate);
map.put("bAnalyed", phd.isBAnalyed());
map.put("peak", phd.getVPeak());
map.put("BaseCtrls", phd.getBaseCtrls());
// Bar Chart 柱状图
List<SeriseData> differance = gammaFileUtil.Differance(phd, phd.getVPeak());
map.put("barChart", differance);
result.setSuccess(true);
result.setResult(map);
}
@ -1073,16 +1142,16 @@ public class GammaServiceImpl extends AbstractLogOrReport implements IGammaServi
if (m_nSChan == 0) {
m_vCount.add(0L);
}
ChartData channelCountChart = gammaFileUtil.Channel_Count(m_vCount, m_nCount, colorMap.get("Color_Spec"));
ChartData channelBaseLineChart = gammaFileUtil.Channel_BaseLine(phd, m_nCount, colorMap.get("Color_Base"));
List<ChartData> channelPeakChart = gammaFileUtil.Channel_Peak(phd, m_nCount, colorMap.get("Color_Peak"));
ChartData channelCountChart = gammaFileUtil.Channel_Count(phd, colorMap.get("Color_Spec"));
ChartData channelBaseLineChart = gammaFileUtil.Channel_BaseLine(phd, colorMap.get("Color_Base"));
List<ChartData> channelPeakChart = gammaFileUtil.Channel_Peak(phd, colorMap.get("Color_Peak"));
List<ShapeData> channelBaseCPChart = gammaFileUtil.Channel_BaseCP(phd);
map.put("channelCountChart", channelCountChart);
map.put("channelBaseLineChart", channelBaseLineChart);
map.put("channelPeakChart", channelPeakChart);
map.put("channelBaseCPChart", channelBaseCPChart);
// Bar Chart 柱状图
List<SeriseData> differance = gammaFileUtil.Differance(phd, phd.getVPeak(), m_vCount, m_nCount);
List<SeriseData> differance = gammaFileUtil.Differance(phd, phd.getVPeak());
map.put("barChart", differance);
// 赋值energy
map.put("energy", phd.getVEnergy());
@ -1370,11 +1439,11 @@ public class GammaServiceImpl extends AbstractLogOrReport implements IGammaServi
List<PeakInfo> vPeak = gammaFileUtil.InitPeakTable(phd.getVPeak());
map.put("table", vPeak);
List<ChartData> channelPeak = gammaFileUtil.Channel_Peak(phd, m_nCount, colorMap.get("Color_Peak"));
List<ChartData> channelPeak = gammaFileUtil.Channel_Peak(phd, colorMap.get("Color_Peak"));
map.put("channelPeakChart", channelPeak);
ChartData channelBaseLine = gammaFileUtil.Channel_BaseLine(phd, m_nCount, colorMap.get("Color_Base"));
ChartData channelBaseLine = gammaFileUtil.Channel_BaseLine(phd, colorMap.get("Color_Base"));
map.put("channelBaseLineChart", channelBaseLine);
List<SeriseData> differance = gammaFileUtil.Differance(phd, phd.getVPeak(), m_vCount, m_nCount);
List<SeriseData> differance = gammaFileUtil.Differance(phd, phd.getVPeak());
map.put("barChart", differance);
gammaFileUtil.UpdateChart(phd, map, colorMap);
} else {// 如果不保留 根据下标移除对应的vPeak数据
@ -1382,7 +1451,7 @@ public class GammaServiceImpl extends AbstractLogOrReport implements IGammaServi
phd.getVPeak().clear();
phd.setVPeak(oldPeak);
map.put("table", phd.getVPeak());
List<ChartData> channelPeak = gammaFileUtil.Channel_Peak(phd, m_nCount, colorMap.get("Color_Peak"));
List<ChartData> channelPeak = gammaFileUtil.Channel_Peak(phd, colorMap.get("Color_Peak"));
map.put("channelPeakChart", channelPeak);
}
}
@ -1397,24 +1466,64 @@ public class GammaServiceImpl extends AbstractLogOrReport implements IGammaServi
@Override
public Result deletePeak(String fileName, int curRow, HttpServletRequest request) {
Result result = new Result();
//获取用户名称
String userName = JwtUtil.getUserNameByToken(request);
//获取缓存的核素信息
Map<String, NuclideLines> nuclideMap = (Map<String, NuclideLines>) redisUtil.get(fileName + "-" + userName);
//获取PHDFile的缓存信息
Cache<String, PHDFile> phdCache = localCache.getPHDCache();
PHDFile phd = phdCache.getIfPresent(fileName + "-" + userName);
if (Objects.isNull(phd)) {
result.error500("Please select the parse file first");
return result;
}
//获取颜色信息
Map<String, String> colorMap = sysUserColorService.initColor(userName);
long m_nCount = phd.getSpec().getNum_g_channel();
//声明一个结果的map
HashMap<String, Object> map = new HashMap<>();
//获取峰的大小
int peakNum = phd.getVPeak().size();
if (peakNum < 1) {
result.error500("No peak to delete.");
return result;
}
//判断当前要操作的下标是否在范围内
if (curRow >= 0 && curRow < peakNum) {
//获取当前下标位置的峰值信息
PeakInfo info = phd.getVPeak().get(curRow);
//获取当前选中的峰值信息的能量值
double energy = info.energy;
if (CollectionUtils.isNotEmpty(info.nuclides)) {
//遍历核素信息
for (int i=0; i<info.nuclides.size(); i++) {
String nuclideName = info.nuclides.get(i);
//从缓存信息中获取核素名称
NuclideLines nuclideLines = nuclideMap.get(nuclideName);
//获取最大活度的核素位置
int maxYeildIdx = nuclideLines.maxYeildIdx;
//获取最大活度对应的核素能量值
Double maxEnergy = nuclideLines.getVenergy().get(maxYeildIdx);
//判断当前选中的峰值信息的能量值 是否在 最大活度对应的核素能量值公差范围内
if (energy >= maxEnergy-0.5 && energy <= maxEnergy+0.5) {
//则需要删除所有关联的核素信息并 从MapNucAct中移除相关核素内容
for (PeakInfo peakInfo: phd.getVPeak()) {
//如果峰的核素名称中包含当前删除的核素
if (peakInfo.nuclides.contains(nuclideName)) {
peakInfo.nuclides.remove(nuclideName);
}
}
//从核素相关map中移除核素信息
phd.getMapNucActMda().remove(nuclideName);
//移除核素信息
nuclideMap.remove(nuclideName);
}
}
}
//将当前行从峰数组中移除
phd.getVPeak().remove(curRow);
//重新计算核素活度浓度
gammaFileUtil.NuclidesIdent(phd, nuclideMap);
//重新分析数据
gammaFileUtil.PeaksChanged(phd);
for (int i = 0; i < phd.getVPeak().size(); i++) {
PeakInfo peakInfo = phd.getVPeak().get(i);
@ -1422,7 +1531,7 @@ public class GammaServiceImpl extends AbstractLogOrReport implements IGammaServi
}
List<PeakInfo> vPeak = gammaFileUtil.InitPeakTable(phd.getVPeak());
map.put("table", vPeak);
List<ChartData> channelPeak = gammaFileUtil.Channel_Peak(phd, m_nCount, colorMap.get("Color_Peak"));
List<ChartData> channelPeak = gammaFileUtil.Channel_Peak(phd, colorMap.get("Color_Peak"));
map.put("channelPeakChart", channelPeak);
gammaFileUtil.UpdateChart(phd, map, colorMap);
}
@ -1542,14 +1651,14 @@ public class GammaServiceImpl extends AbstractLogOrReport implements IGammaServi
// 根据要进行修改的列的数据下标 操作Vpeak数据
phd.getVPeak().get(curRow).nuclides.add(nuclideName);
// 查询当前用户所关心的核素名称
// 查询当前用户关联的核素信息
List<String> userLib = new LinkedList<>();
// 从postgreSql中获取当前用户关注的核素信息 如果当前用户没有 则返回管理员的
userLib = defaultNuclideSpectrumService.findNuclidesByUserName(userName, phd.getHeader().getSystem_type().toUpperCase());
if (CollectionUtils.isEmpty(userLib)) {
userLib = defaultNuclideSpectrumService.findNuclidesByUserName("admin", phd.getHeader().getSystem_type().toUpperCase());
}
Map<String, NuclideLines> mapNucLines = GetNuclideLines(userLib);
// // 查询当前用户关联的核素信息
// List<String> userLib = new LinkedList<>();
// // 从postgreSql中获取当前用户关注的核素信息 如果当前用户没有 则返回管理员的
// userLib = defaultNuclideSpectrumService.findNuclidesByUserName(userName, phd.getHeader().getSystem_type().toUpperCase());
// if (CollectionUtils.isEmpty(userLib)) {
// userLib = defaultNuclideSpectrumService.findNuclidesByUserName("admin", phd.getHeader().getSystem_type().toUpperCase());
// }
Map<String, NuclideLines> mapNucLines = (Map<String, NuclideLines>) redisUtil.get(fileName+"-"+userName);//GetNuclideLines(userLib);
// 查询出核素信息
NuclideLines it_line = mapNucLines.get(nuclideName);
// 如果核素信息不存在返回
@ -1576,20 +1685,54 @@ public class GammaServiceImpl extends AbstractLogOrReport implements IGammaServi
@Override
public Result deleteNuclide(Integer curRow, String nuclideName, String fileName, List<String> list_identify, HttpServletRequest request) {
Result result = new Result();
//获取用户名
String userName = JwtUtil.getUserNameByToken(request);
Map<String, Object> map = new HashMap<>();
Map<String, NuclideLines> nuclideMap = (Map<String, NuclideLines>) redisUtil.get(fileName + "-" + userName);
//缓存中获取缓存信息
Cache<String, PHDFile> phdCache = localCache.getPHDCache();
PHDFile phd = phdCache.getIfPresent(fileName + "-" + userName);
if (Objects.isNull(phd)) {
result.error500("Please select the parse file first");
return result;
}
//判断当前选择的核素名称是否包含在当前Peak的核素列表中
int index = list_identify.indexOf(nuclideName);
if (index >= 0) {
// 如果所选的行下标小于0 或者 超出界限 则不进行处理
if (curRow < 0 || curRow >= phd.getVPeak().size()) {
return result;
}
//从缓存信息中获取核素名称
NuclideLines nuclideLines = nuclideMap.get(nuclideName);
//获取最大活度的核素位置
int maxYeildIdx = nuclideLines.maxYeildIdx;
//获取最大活度对应的核素能量值
Double maxEnergy = nuclideLines.getVenergy().get(maxYeildIdx);
//获取当前选中的峰值信息的能量值
double energy = phd.getVPeak().get(curRow).energy;
//判断当前选中的峰值信息的能量值 是否在 最大活度对应的核素能量值公差范围内
if (energy >= maxEnergy-0.5 && energy <= maxEnergy+0.5) {
//则需要删除所有关联的核素信息并 从MapNucAct中移除相关核素内容
for (PeakInfo peakInfo: phd.getVPeak()) {
//如果峰的核素名称中包含当前删除的核素
if (peakInfo.nuclides.contains(nuclideName)) {
peakInfo.nuclides.remove(nuclideName);
}
}
//从核素相关map中移除核素信息
phd.getMapNucActMda().remove(nuclideName);
//移除核素信息
nuclideMap.remove(nuclideName);
//重新计算核素活度浓度
gammaFileUtil.NuclidesIdent(phd, nuclideMap);
//从核素的选中列表中移除对应下标的核素信息
list_identify.remove(index);
//重新初始化峰列表信息
List<PeakInfo> vPeak = gammaFileUtil.InitPeakTable(phd.getVPeak());
map.put("identify", list_identify);
map.put("table", vPeak);
} else {
// 更新峰信息列表和表格
// 根据核素名称获取对应的下标并从list_identifyphd.getVPeak()移除
list_identify.remove(index);
@ -1600,6 +1743,7 @@ public class GammaServiceImpl extends AbstractLogOrReport implements IGammaServi
gammaFileUtil.ReCalcMdaMdc(phd, nuclideName, curRow + 1);
map.put("identify", list_identify);
map.put("table", vPeak);
}
result.setSuccess(true);
result.setResult(map);
}
@ -1739,6 +1883,28 @@ public class GammaServiceImpl extends AbstractLogOrReport implements IGammaServi
long span = phd.getSpec().getG_energy_span();
List<GardsNuclLinesLib> nuclideTableList = spectrumAnalysisMapper.getNuclideTable(name, span);
if (CollectionUtils.isNotEmpty(nuclideTableList)) {
nuclideTableList.stream().forEach(item-> {
if (Objects.nonNull(item.getEnergy())) {
item.setEnergy(Double.valueOf(String.format("%.3f", item.getEnergy())));
} else {
item.setEnergy(Double.valueOf(String.format("%.3f", 0.0)));
}
if (Objects.nonNull(item.getEnergyUncert())) {
item.setEnergyUncert(Double.valueOf(String.format("%.3f", item.getEnergyUncert())));
} else {
item.setEnergyUncert(Double.valueOf(String.format("%.3f", 0.0)));
}
if (Objects.nonNull(item.getYield())) {
item.setYield(Double.valueOf(String.format("%.3f", item.getYield())));
} else {
item.setYield(Double.valueOf(String.format("%.3f", 0.0)));
}
if (Objects.nonNull(item.getYieldUncert())) {
item.setYieldUncert(Double.valueOf(String.format("%.3f", item.getYieldUncert())));
} else {
item.setYieldUncert(Double.valueOf(String.format("%.3f", 0.0)));
}
});
map.put("table", nuclideTableList);
gammaFileUtil.InitChart(nuclideTableList, phd, map, colorMap);
}
@ -1909,30 +2075,17 @@ public class GammaServiceImpl extends AbstractLogOrReport implements IGammaServi
phd.setBaseCtrls(m_baseCtrl);
phd.setVBase(m_baseCtrl.getBaseline());
// }
List<Long> m_vCount = new LinkedList<>();
long m_nCount = phd.getSpec().getNum_g_channel();
long m_nSChan = phd.getSpec().getBegin_channel();
// 获取当前角色的颜色配置
Map<String, String> colorMap = sysUserColorService.initColor(userName);
// 确保绘制曲线时所有谱都是从1道开始
int i = 0;
if (m_nSChan == 0) {
i = 1;
}
for (; i < m_nCount; ++i) {
m_vCount.add(phd.getSpec().getCounts().get(i));
}
if (m_nSChan == 0) {
m_vCount.add(0L);
}
List<SeriseData> differance = gammaFileUtil.Differance(phd, phd.getVPeak(), m_vCount, m_nCount);
List<SeriseData> differance = gammaFileUtil.Differance(phd, phd.getVPeak());
map.put("barChart", differance);
ChartData channelBaseLine = gammaFileUtil.Channel_BaseLine(phd, m_nCount, colorMap.get("Color_Base"));
ChartData channelBaseLine = gammaFileUtil.Channel_BaseLine(phd, colorMap.get("Color_Base"));
map.put("channelBaseLineChart", channelBaseLine);
List<ChartData> peakSet = gammaFileUtil.PeakSet(phd.getVPeak(), phd.getVBase(), colorMap.get("Color_Peak"), m_nCount, null, false);
List<ChartData> peakSet = gammaFileUtil.PeakSet(phd.getVPeak(), phd.getVBase(), colorMap.get("Color_Peak"), phd.getSpec().getNum_g_channel(), null, false);
map.put("peakSet", peakSet);
List<ShapeData> shapeData = gammaFileUtil.CreateShapeCP(phd.getBaseCtrls());
map.put("shapeData", shapeData);
map.put("peak", phd.getVPeak());
// 更新主界面的 Chart
gammaFileUtil.UpdateChart(phd, map, colorMap);
result.setSuccess(true);
@ -3432,6 +3585,7 @@ public class GammaServiceImpl extends AbstractLogOrReport implements IGammaServi
result.error500("Please select the parse file first");
return result;
}
if (StringUtils.isNotBlank(phd.getOriTotalCmt())) {
String temp = phd.getOriTotalCmt().trim();
if (StringUtils.isNotBlank(temp)) {
comments += "Comments From Original Spectrum:\n" + temp;
@ -3445,6 +3599,7 @@ public class GammaServiceImpl extends AbstractLogOrReport implements IGammaServi
}
}
}
}
result.setSuccess(true);
result.setResult(comments);
return result;
@ -3517,7 +3672,7 @@ public class GammaServiceImpl extends AbstractLogOrReport implements IGammaServi
if (StringUtils.isBlank(reportPath)) {
throw new RuntimeException("The automatic handler generated report does not exist");
}
String pathName = StringPool.SLASH + spectrumPathProperties.getSaveFilePath() + StringPool.SLASH + reportPath.substring(0, reportPath.lastIndexOf(StringPool.SLASH));
String pathName = ftpUtil.getFtpRootPath() + spectrumPathProperties.getSaveFilePath() + StringPool.SLASH + reportPath.substring(0, reportPath.lastIndexOf(StringPool.SLASH));
String fileName = reportPath.substring(reportPath.lastIndexOf(StringPool.SLASH) + 1) + ".txt";
// 连接ftp
FTPClient ftpClient = ftpUtil.LoginFTP();
@ -3567,7 +3722,7 @@ public class GammaServiceImpl extends AbstractLogOrReport implements IGammaServi
public void exportARR(Integer sampleId, HttpServletResponse response) {
// 获取自动处理生成的报告地址
String reportPath = spectrumAnalysisMapper.viewARR(sampleId);
String pathName = StringPool.SLASH + spectrumPathProperties.getSaveFilePath() + StringPool.SLASH + reportPath.substring(0, reportPath.lastIndexOf(StringPool.SLASH));
String pathName = ftpUtil.getFtpRootPath() + spectrumPathProperties.getSaveFilePath() + StringPool.SLASH + reportPath.substring(0, reportPath.lastIndexOf(StringPool.SLASH));
String fileName = reportPath.substring(reportPath.lastIndexOf(StringPool.SLASH) + 1) + ".txt";
// 连接ftp
FTPClient ftpClient = ftpUtil.LoginFTP();
@ -4105,7 +4260,7 @@ public class GammaServiceImpl extends AbstractLogOrReport implements IGammaServi
if (StringUtils.isBlank(logPath)) {
throw new RuntimeException("The log generated by the automatic processor does not exist");
}
String pathName = StringPool.SLASH + spectrumPathProperties.getLogPath() + StringPool.SLASH + logPath.substring(0, logPath.lastIndexOf(StringPool.SLASH));
String pathName = ftpUtil.getFtpRootPath() + spectrumPathProperties.getLogPath() + StringPool.SLASH + logPath.substring(0, logPath.lastIndexOf(StringPool.SLASH));
String fileName = logPath.substring(logPath.lastIndexOf(StringPool.SLASH) + 1);
// 连接ftp
FTPClient ftpClient = ftpUtil.LoginFTP();
@ -4217,26 +4372,36 @@ public class GammaServiceImpl extends AbstractLogOrReport implements IGammaServi
public Result saveToDB(String fileName, HttpServletRequest request) {
Result result = new Result();
String userName = JwtUtil.getUserNameByToken(request);
boolean bRet = true;
boolean bRet = false;
Cache<String, PHDFile> phdCache = localCache.getPHDCache();
PHDFile phd = phdCache.getIfPresent(fileName + "-" + userName);
if (Objects.isNull(phd)) {
result.error500("Please select the parse file first");
return result;
}
// 查看分析员是否有权限操作当前台站信息
// 判断当前分析员是否有过排班任务
if (!phd.isBAnalyed()) {
result.error500("Please first Reprocessing!");
return result;
}
//获取当前操作的台站的id
Integer stationId = spectrumAnalysisMapper.getStationId(phd.getHeader().getSite_code());
boolean bAnalysisResultWriteAuthority = userTaskUtil.CheckUserWriteAuthorityForStation(userName, stationId);
//判断当前分析员当天是否有对应台站的排班任务
List<String> userStation = userTaskUtil.findUserStation(userName);
boolean bAnalysisResultWriteAuthority = false;
if (userStation.contains(String.valueOf(stationId))) {
bAnalysisResultWriteAuthority = true;
}
//根据系统类型获取系统存储前缀
String sysTemSubdir = nameStandUtil.GetSysTemSubdir(phd.getHeader().getSystem_type());
//根据数据类型获取数据存储前缀
String dateTypeSubdir = nameStandUtil.GetDateTypeSubdir(phd.getMsgInfo().getData_type());
//格式化名称
Map<String, String> nameMap = nameStandUtil.NameStandard(phd);
String mSaveFileName = nameMap.get("saveFile");
//判断当前分析员是否有过历史分析当前文件
Integer isExist = spectrumAnalysisMapper.SampleIsExist(sysTemSubdir+ dateTypeSubdir + StringPool.SLASH + mSaveFileName, userName);
// 如果用户没有权限操作 则查看当前用户是否是高级分析员/管理员
if (!bAnalysisResultWriteAuthority) {
List<String> roleCodes = userTaskUtil.findRoleCodeByUserName(userName);
// 如果只是分析员 则无法保存数据库 返回信息
if (roleCodes.contains(RoleType.Role_Analyst) && !roleCodes.contains(RoleType.Role_SuperAnalyst) && !roleCodes.contains(RoleType.Role_Manager)) {
bRet = false;
}
}
if (!bRet) {
if (!bAnalysisResultWriteAuthority && Objects.isNull(isExist)) {
result.error500("You have no permission to save results to DB!");
return result;
}
@ -4255,7 +4420,7 @@ public class GammaServiceImpl extends AbstractLogOrReport implements IGammaServi
phd.setStatus(sampleData.getStatus());
} else {
// 如果sampleData为空 存储数据
SaveSampleToDB(phd, middleData.analyses_save_filePath);
bRet = SaveSampleToDB(phd, middleData.analyses_save_filePath);
}
if (!bRet) {
result.error500("The Database hasn't this Spectrum(" + phd.getFilename() + ") and Insert it to Database failed.");
@ -4275,8 +4440,6 @@ public class GammaServiceImpl extends AbstractLogOrReport implements IGammaServi
if (StringUtils.isBlank(idAnalysis)) {
// RNMAN.GARDS_ANALYSES 表插入数据
analysesSpectrumService.insertEntity(middleData, phd, userName, comments);
// 查询idAnalysis
idAnalysis = spectrumAnalysisMapper.getIdAnalysisByIdAnalyst(phd.getId_sample(), userName);
// 修改sample_data状态
spectrumAnalysisMapper.updateAnalysesStatus(middleData.analyses_save_filePath);
} else {
@ -4316,27 +4479,48 @@ public class GammaServiceImpl extends AbstractLogOrReport implements IGammaServi
paths.add(middleData.analyses_lc_filePath);
paths.add(middleData.analyses_scac_filePath);
paths.add(middleData.analyses_baseline_filePath);
if (false) paths.add(middleData.analyses_save_filePath);
if (bRet) {
paths.add(middleData.analyses_save_filePath);
}
} else {
result.error500("Save analysis results to DB failed.");
return result;
}
Map<String, Object> map = new HashMap<>();
//更新detial Information
List<String> detailedInfo = gammaFileUtil.DetailedInfo(phd.getId_sample(), phd);
map.put("DetailedInformation", detailedInfo);
result.setSuccess(true);
result.setResult(map);
return result;
}
@Transactional
public void SaveSampleToDB(PHDFile phd, String input_file_name) {
public boolean SaveSampleToDB(PHDFile phd, String input_file_name) {
boolean bRet = false;
String sit_det_code = phd.getHeader().getSite_code();
String detect_code = phd.getHeader().getDetector_code();
// 查询 station_id detect_id
Integer station_id = spectrumAnalysisMapper.getStationId(sit_det_code);
Integer detect_id = spectrumAnalysisMapper.getDetectorId(detect_code);
// ORIGINAL.GARDS_SAMPLE_DATA 表数据
sampleDataSpectrumService.saveSampleDataGamma(phd, input_file_name, station_id, detect_id);
Integer saveSampleDataGamma = sampleDataSpectrumService.saveSampleDataGamma(phd, input_file_name, station_id, detect_id);
if (saveSampleDataGamma > 0) {
bRet = true;
} else {
bRet = false;
return bRet;
}
// 查询新增后的 sample_id 的值赋给变量 DECLARESAMPLEID
Integer sampleId = spectrumAnalysisMapper.getSampleId(input_file_name);
// ORIGINAL.GARDS_SAMPLE_AUX 表数据
sampleAuxSpectrumService.saveSampleAuxGamma(phd, sampleId);
Integer saveSampleAuxGamma = sampleAuxSpectrumService.saveSampleAuxGamma(phd, sampleId);
if (saveSampleAuxGamma > 0) {
bRet = true;
} else {
bRet = false;
return bRet;
}
// ORIGINAL.gards_sample_description 数据表
sampleDescriptionSpectrumService.saveSampleDescriptionGamma(phd, sampleId);
// ORIGINAL.GARDS_SAMPLE_CERT ORIGINAL.GARDS_SAMPLE_CERT_LINE 数据表
@ -4345,17 +4529,18 @@ public class GammaServiceImpl extends AbstractLogOrReport implements IGammaServi
sampleCertLineSpectrumService.saveSampleCertLineGamma(phd, sampleId);
}
// gards_ calibration_pairs_orig数据表
calibrationPairsOrigSpectrumService.saveCalibrationPairsOrigGamma(phd, sampleId);
Integer saveCalibrationPairsOrigGamma = calibrationPairsOrigSpectrumService.saveCalibrationPairsOrigGamma(phd, sampleId);
// gards_total_efficiency _pairs数据表(IDC没有)
totalEfficiencyPairsSpectrumService.saveTotalEfficiencyPairsGamma(phd, sampleId);
Integer saveTotalEfficiencyPairsGamma = totalEfficiencyPairsSpectrumService.saveTotalEfficiencyPairsGamma(phd, sampleId);
// gards_spectrum数据表
spectrumService.saveSpectrumGamma(phd, sampleId, input_file_name);
Integer saveSpectrumGamma = spectrumService.saveSpectrumGamma(phd, sampleId, input_file_name);
// 根据文件名称获取sample基础数据信息
GardsSampleDataSpectrum samplData = spectrumAnalysisMapper.findSampleByFilePath(input_file_name);
if (Objects.nonNull(samplData)) {
phd.setId_sample(samplData.getSampleId().toString());
phd.setStatus(samplData.getStatus());
}
return bRet;
}
@Override
@ -4412,6 +4597,7 @@ public class GammaServiceImpl extends AbstractLogOrReport implements IGammaServi
// 换行
strBuild.append(System.lineSeparator());
// 文本内容第三块数据
if (CollectionUtils.isNotEmpty(phd.getVPeak())) {
List<List<String>> peakNuclides = phd.getVPeak().stream().map(item -> item.nuclides).collect(Collectors.toList());
List<String> nuclides = new LinkedList<>();
for (int i = 0; i < peakNuclides.size(); i++) {
@ -4426,6 +4612,7 @@ public class GammaServiceImpl extends AbstractLogOrReport implements IGammaServi
strBuild.append(nuclideStr);
// 换行
strBuild.append(System.lineSeparator());
}
// 换行
strBuild.append(System.lineSeparator());
// 文本内容第四块
@ -4458,6 +4645,7 @@ public class GammaServiceImpl extends AbstractLogOrReport implements IGammaServi
strBuild.append(System.lineSeparator());
// 文本内容第五块数据
Map<String, NuclideActMda> mapNucActMda = phd.getMapNucActMda();
if (CollectionUtils.isNotEmpty(mapNucActMda)) {
for (Map.Entry<String, NuclideActMda> entry : mapNucActMda.entrySet()) {
String key = entry.getKey();
NuclideActMda nuc = entry.getValue();
@ -4496,6 +4684,7 @@ public class GammaServiceImpl extends AbstractLogOrReport implements IGammaServi
strBuild.append(System.lineSeparator());
}
}
}
strBuild.append(System.lineSeparator());
String detectorCode = phd.getHeader().getDetector_code();
String date = phd.getAcq().getAcquisition_start_date().replace("/", "");
@ -4503,6 +4692,10 @@ public class GammaServiceImpl extends AbstractLogOrReport implements IGammaServi
String dataType = phd.getMsgInfo().getData_type().substring(0, 1);
String format = ".txt";
String txtFileName = String.format("%s-%s_%s_%s_RESULT%s", detectorCode, date, time, dataType, format);
if (StrUtil.isNotBlank(fileName)){
if (StrUtil.contains(fileName, ".PHD"))
txtFileName = StrUtil.replace(fileName, ".PHD", ".txt");
}
// 导出数据内容到txt文本
OutputStream fos = null;
try {
@ -4641,6 +4834,10 @@ public class GammaServiceImpl extends AbstractLogOrReport implements IGammaServi
String dataType = phd.getMsgInfo().getData_type().substring(0, 1);
String format = ".xls";
String xlsFileName = String.format("%s-%s_%s_%s_RESULT%s", detectorCode, date, time, dataType, format);
if (StrUtil.isNotBlank(fileName)){
if (StrUtil.contains(fileName, ".PHD"))
xlsFileName = StrUtil.replace(fileName, ".PHD", ".xls");
}
String template = SAVETOEXCEL_G.getName();
// 导出时使用默认文件名 file.xls
ExportUtil.exportXls(response, template, data, xlsFileName);

View File

@ -98,8 +98,8 @@ public class SpectrumFileServiceImpl implements ISpectrumFileService {
boolean created = FTPUtil.createDirs(ftpClient, filePath);
if (!created) return Result.error(Prompt.DIR_CREATE_FAIL + filePath);
// 上传所有文件
System.out.println("filelist>>>>"+fileList.size());
String rootPath = spectrumPathProperties.getRootPath();
List<String> failList = new ArrayList<>();
String rootPath = ftpUtil.getFtpRootPath();
for (File oneFile : fileList) {
String fileName = oneFile.getName();
// 判断能谱文件名称是否符合规则不符合则进行重命名
@ -109,10 +109,12 @@ public class SpectrumFileServiceImpl implements ISpectrumFileService {
fileName = oneFile.getName().substring(0, 23)+suffix;
}
String fullFilePath = rootPath + filePath + slash + fileName;
System.out.println("fullFilePath>>>>"+fullFilePath);
FileInputStream local = new FileInputStream(oneFile);
ftpClient.storeFile(fullFilePath, local);
boolean success = ftpClient.storeFile(fullFilePath, local);
if (!success) failList.add(fullFilePath);
}
if (CollUtil.isNotEmpty(failList))
return Result.error(Prompt.UPLOAD_ERR, failList);
return Result.OK(Prompt.UPLOAD_SUCC);
} catch (IOException e) {
e.printStackTrace();
@ -142,7 +144,7 @@ public class SpectrumFileServiceImpl implements ISpectrumFileService {
String username = user.getUsername();
String slash = SymbolConstant.SINGLE_SLASH;
String comma = SymbolConstant.COMMA;
String filePath = slash + spectrumPathProperties.getUploadPath() + slash + username;
String filePath = ftpUtil.getFtpRootPath() + spectrumPathProperties.getUploadPath() + slash + username;
FTPClient ftpClient = null;
List<FileDto> fileDtos = new ArrayList<>();
Page<FileDto> page = new Page<>(pageNo, pageSize);

View File

@ -9,10 +9,7 @@ import org.jeecg.common.util.RedisUtil;
import org.jeecg.modules.system.entity.GardsSampleDataSystem;
import org.jeecg.modules.system.service.IGardsSampleDataService;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.web.bind.annotation.DeleteMapping;
import org.springframework.web.bind.annotation.GetMapping;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RestController;
import org.springframework.web.bind.annotation.*;
import java.util.Map;
@ -49,8 +46,9 @@ public class GardsSampleDataController {
@DeleteMapping("deleteById")
@ApiOperation(value = "删除DATA_BASE数据", notes = "删除DATA_BASE数据")
public Result deleteById(Integer sampleId){
return gardsSampleDataService.deleteById(sampleId);
public Result<?> deleteById(@RequestParam Integer sampleId, boolean sampleData,
boolean rnAuto, boolean rnMan){
return gardsSampleDataService.deleteById(sampleId, sampleData, rnAuto, rnMan);
}
}

View File

@ -1,7 +1,20 @@
package org.jeecg.modules.system.mapper;
import com.baomidou.mybatisplus.core.mapper.BaseMapper;
import org.apache.ibatis.annotations.Param;
import org.jeecg.modules.base.dto.OwnerDto;
import org.jeecg.modules.base.entity.rnauto.GardsAnalyses;
import org.jeecg.modules.system.entity.GardsSampleDataSystem;
import java.util.List;
public interface GardsSampleDataMapper extends BaseMapper<GardsSampleDataSystem> {
List<OwnerDto> containSampleId(String filed);
void delTables(@Param("tableNames") List<String> tableNames,
@Param("sampleId") Integer sampleId);
GardsAnalyses getAnalysis(@Param("sampleId") Integer sampleId,
@Param("owner") String owner);
}

View File

@ -0,0 +1,31 @@
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE mapper PUBLIC "-//mybatis.org//DTD Mapper 3.0//EN" "http://mybatis.org/dtd/mybatis-3-mapper.dtd">
<mapper namespace="org.jeecg.modules.system.mapper.GardsSampleDataMapper">
<delete id="delTables">
<foreach collection = "tableNames" item = "tableName" index = "index">
DELETE FROM ${tableName} WHERE SAMPLE_ID = #{sampleId};
</foreach>
</delete>
<select id="containSampleId" resultType="org.jeecg.modules.base.dto.OwnerDto">
SELECT
OWNER,
TABLE_NAME AS tableName
FROM
DBA_TAB_COLUMNS
WHERE
COLUMN_NAME = #{filed}
</select>
<select id="getAnalysis" resultType="org.jeecg.modules.base.entity.rnauto.GardsAnalyses">
SELECT
BASELINE_PATH AS baselinePath,
LC_PATH AS lcPath,
SCAC_PATH AS scacPath,
LOG_PATH AS logPath,
REPORT_PAHT AS reportPath
FROM
${owner}.GARDS_ANALYSES
WHERE
SAMPLE_ID = #{sampleId}
</select>
</mapper>

View File

@ -4,6 +4,7 @@ import com.baomidou.mybatisplus.core.metadata.IPage;
import com.baomidou.mybatisplus.extension.service.IService;
import org.jeecg.common.api.QueryRequest;
import org.jeecg.common.api.vo.Result;
import org.jeecg.modules.base.entity.rnauto.GardsAnalyses;
import org.jeecg.modules.system.entity.GardsSampleDataSystem;
public interface IGardsSampleDataService extends IService<GardsSampleDataSystem> {
@ -21,6 +22,7 @@ public interface IGardsSampleDataService extends IService<GardsSampleDataSystem>
* @param sampleId
* @return
*/
Result deleteById(Integer sampleId);
Result<?> deleteById(Integer sampleId, boolean sampleData, boolean rnAuto, boolean rnMan);
GardsSampleDataSystem getOne(Integer sampleId);
}

View File

@ -1,5 +1,10 @@
package org.jeecg.modules.system.service.impl;
import cn.hutool.core.collection.CollUtil;
import cn.hutool.core.collection.ListUtil;
import cn.hutool.core.date.DateUtil;
import cn.hutool.core.util.ObjectUtil;
import cn.hutool.core.util.StrUtil;
import com.baomidou.dynamic.datasource.annotation.DS;
import com.baomidou.mybatisplus.core.conditions.query.LambdaQueryWrapper;
import com.baomidou.mybatisplus.core.metadata.IPage;
@ -9,23 +14,44 @@ import com.baomidou.mybatisplus.extension.plugins.pagination.Page;
import com.baomidou.mybatisplus.extension.service.impl.ServiceImpl;
import org.jeecg.common.api.QueryRequest;
import org.jeecg.common.api.vo.Result;
import org.jeecg.common.constant.enums.FileTypeEnum;
import org.jeecg.common.properties.SpectrumPathProperties;
import org.jeecg.common.util.FTPUtil;
import org.jeecg.common.util.RedisUtil;
import org.jeecg.modules.base.dto.OwnerDto;
import org.jeecg.modules.base.entity.rnauto.GardsAnalyses;
import org.jeecg.modules.system.entity.GardsSampleDataSystem;
import org.jeecg.modules.system.mapper.GardsSampleDataMapper;
import org.jeecg.modules.system.service.IGardsSampleDataService;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.jdbc.datasource.DataSourceTransactionManager;
import org.springframework.stereotype.Service;
import org.springframework.transaction.PlatformTransactionManager;
import org.springframework.transaction.TransactionDefinition;
import org.springframework.transaction.TransactionStatus;
import org.springframework.transaction.annotation.Transactional;
import org.springframework.transaction.interceptor.TransactionAspectSupport;
import org.springframework.transaction.support.DefaultTransactionDefinition;
import java.util.HashMap;
import java.util.Objects;
import java.util.*;
import java.util.stream.Collectors;
@Service("gardsSampleDataService")
@DS("ora")
public class GardsSampleDataServiceImpl extends ServiceImpl<GardsSampleDataMapper, GardsSampleDataSystem> implements IGardsSampleDataService {
@Autowired
private RedisUtil redisUtil;
@Autowired
private FTPUtil ftpUtil;
@Autowired
private SpectrumPathProperties pathProperties;
@Autowired
private PlatformTransactionManager transactionManager;
@Override
public Result<IPage<GardsSampleDataSystem>> findPage(QueryRequest queryRequest, GardsSampleDataSystem gardsSampleData) {
//查询全部台站信息
@ -62,14 +88,115 @@ public class GardsSampleDataServiceImpl extends ServiceImpl<GardsSampleDataMappe
}
@Override
@Transactional
public Result deleteById(Integer sampleId) {
Result result = new Result();
LambdaQueryWrapper<GardsSampleDataSystem> queryWrapper = new LambdaQueryWrapper<>();
queryWrapper.eq(GardsSampleDataSystem::getSampleId, sampleId);
this.baseMapper.delete(queryWrapper);
result.success("Successfully deleted");
return result;
public Result<?> deleteById(Integer sampleId, boolean sampleData,
boolean rnAuto, boolean rnMan) {
String ftpRootPath = ftpUtil.getFtpRootPath();
String savePath = ftpRootPath + pathProperties.getSaveFilePath() + StrUtil.SLASH;
String logPath = ftpRootPath + pathProperties.getLogPath() + StrUtil.SLASH;
/* 删除数据库数据 */
// 过滤掉多余的表
String ORIGINAL = "ORIGINAL";String RNAUTO = "RNAUTO";String RNMAN = "RNMAN";
List<String> suitable = ListUtil.toList(ORIGINAL, RNAUTO, RNMAN);
List<OwnerDto> ownerDtos = baseMapper.containSampleId("SAMPLE_ID").stream()
.filter(owner -> CollUtil.contains(suitable, owner.getOwner()))
.collect(Collectors.toList());
String DOT = StrUtil.DOT;
// 手动控制事务
TransactionDefinition txDef = new DefaultTransactionDefinition();
TransactionStatus txStatus = transactionManager.getTransaction(txDef);
try {
List<String> needDel = new ArrayList<>();
if (sampleData){
// 收集所有表名
List<String> allTables = ownerDtos.stream()
.map(owner -> owner.getOwner() + DOT + owner.getTableName())
.collect(Collectors.toList());
// 删除表数据
if (CollUtil.isNotEmpty(allTables))
baseMapper.delTables(allTables, sampleId);
// 收集待删除文件路径
needDel.add(samplePath(savePath, sampleId)); // 原始谱文件
needDel.addAll(manOrAutoPath(savePath, logPath, sampleId, RNMAN)); // 人工交互文件
needDel.addAll(manOrAutoPath(savePath, logPath, sampleId, RNAUTO)); // 自动处理文件
}
else {
if (rnAuto){
// 收集自动处理库所有表名
List<String> autoTables = ownerDtos.stream()
.filter(owner -> StrUtil.equals(owner.getOwner(), RNAUTO))
.map(owner -> owner.getOwner() + DOT + owner.getTableName())
.collect(Collectors.toList());
// 删除表数据
if (CollUtil.isNotEmpty(autoTables))
baseMapper.delTables(autoTables, sampleId);
// 收集待删除文件路径
needDel.addAll(manOrAutoPath(savePath, logPath, sampleId, RNAUTO)); // 自动处理文件
}
if (rnMan){
// 收集人工交互库所有表名
List<String> manTables = ownerDtos.stream()
.filter(owner -> StrUtil.equals(owner.getOwner(), RNMAN))
.map(owner -> owner.getOwner() + DOT + owner.getTableName())
.collect(Collectors.toList());
// 删除表数据
if (CollUtil.isNotEmpty(manTables))
baseMapper.delTables(manTables, sampleId);
// 收集待删除文件路径
needDel.addAll(manOrAutoPath(savePath, logPath, sampleId, RNMAN)); // 人工交互文件
}
}
transactionManager.commit(txStatus);
needDel = needDel.stream().filter(StrUtil::isNotBlank).collect(Collectors.toList());
if (CollUtil.isEmpty(needDel))
return Result.OK("Data cleaning is complete. No files need to be cleaned!");
// 删除FTP文件
List<String> failList = ftpUtil.removeFiles(needDel);
if (CollUtil.isNotEmpty(failList))
return Result.error("Data clearing is complete, but file clearing fails!", failList);
return Result.OK("Data and file cleanup complete!");
}catch (Exception e){
transactionManager.rollback(txStatus);
e.printStackTrace();
return Result.error("Data deletion is abnormal, The file deletion operation has not been performed!");
}
}
@Override
public GardsSampleDataSystem getOne(Integer sampleId) {
LambdaQueryWrapper<GardsSampleDataSystem> wrapper = new LambdaQueryWrapper<>();
wrapper.eq(GardsSampleDataSystem::getSampleId, sampleId);
return Optional.ofNullable(getOne(wrapper))
.orElse(new GardsSampleDataSystem());
}
private String samplePath(String savePath, Integer sampleId){
GardsSampleDataSystem sampleData = getOne(sampleId);
String inputFileName = sampleData.getInputFileName();
if (StrUtil.isBlank(inputFileName))
return null;
return savePath + inputFileName;
}
private List<String> manOrAutoPath(String savePath, String logPath,
Integer sampleId, String owner){
List<String> fileList = new ArrayList<>();
GardsAnalyses analysisMan = baseMapper.getAnalysis(sampleId, owner);
if (ObjectUtil.isNull(analysisMan))
return fileList;
String baselinePath = analysisMan.getBaselinePath();
if (StrUtil.isNotBlank(baselinePath))
fileList.add(savePath + baselinePath);
String lcPath = analysisMan.getLcPath();
if (StrUtil.isNotBlank(lcPath))
fileList.add(savePath + lcPath);
String scacPath = analysisMan.getScacPath();
if (StrUtil.isNotBlank(scacPath))
fileList.add(savePath + scacPath);
if (StrUtil.isNotBlank(analysisMan.getLogPath()))
fileList.add(logPath + analysisMan.getLogPath());
String reportPath = analysisMan.getReportPath();
if (StrUtil.isNotBlank(reportPath))
fileList.add(savePath + reportPath + FileTypeEnum.txt.getType());
return fileList;
}
}

View File

@ -48,7 +48,7 @@ public class ReadLineUtil {
try {
ftpClient.enterLocalPassiveMode();
String fileName = filePath.substring(filePath.lastIndexOf(StringPool.SLASH) + 1);
String parameterFilePath = StringPool.SLASH + spectrumPathProperties.getSaveFilePath() + StringPool.SLASH + filePath.substring(0, filePath.lastIndexOf(StringPool.SLASH));
String parameterFilePath = ftpUtil.getFtpRootPath() + spectrumPathProperties.getSaveFilePath() + StringPool.SLASH + filePath.substring(0, filePath.lastIndexOf(StringPool.SLASH));
//判断文件路径是否为空
if (StringUtils.isNotBlank(parameterFilePath)){
//在当前工作路径下读取文件
@ -191,7 +191,7 @@ public class ReadLineUtil {
OutputStream outputStream = null;
InputStream inputStream = null;
try {
filePath = StringPool.SLASH + spectrumPathProperties.getSaveFilePath() + StringPool.SLASH + filePath;
filePath = ftpUtil.getFtpRootPath() + spectrumPathProperties.getSaveFilePath() + StringPool.SLASH + filePath;
// 切换工作目录为 /
ftpClient.changeWorkingDirectory(SymbolConstant.SINGLE_SLASH);