Merge remote-tracking branch 'origin/station' into station

This commit is contained in:
xiongzheng 2023-11-06 08:58:50 +08:00
commit eb99b95bd7
31 changed files with 2191 additions and 1403 deletions

View File

@ -1,7 +1,11 @@
package org.jeecg.common.util;
import cn.hutool.core.collection.CollUtil;
import cn.hutool.core.util.ObjectUtil;
import cn.hutool.core.util.StrUtil;
import com.baomidou.mybatisplus.core.toolkit.CollectionUtils;
import lombok.extern.slf4j.Slf4j;
import org.apache.commons.io.FileUtils;
import org.apache.commons.lang3.StringUtils;
import org.apache.commons.net.ftp.FTP;
import org.apache.commons.net.ftp.FTPClient;
@ -13,10 +17,12 @@ import org.springframework.stereotype.Component;
import javax.servlet.ServletOutputStream;
import javax.servlet.http.HttpServletResponse;
import java.io.File;
import java.io.IOException;
import java.io.InputStream;
import java.net.URLEncoder;
import java.nio.charset.StandardCharsets;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import java.util.Objects;
@ -289,6 +295,34 @@ public class FTPUtil {
}
}
/*
* 批量删除FTP文件 返回删除失败的文件路径
* */
public List<String> removeFiles(List<String> paths){
List<String> failList = new ArrayList<>();
if (CollUtil.isEmpty(paths))
return failList;
// 连接FTP服务
final FTPClient ftpClient = this.LoginFTP();
for (String path : paths) {
try {
if (StrUtil.isBlank(path)) continue;
boolean success = ftpClient.deleteFile(path);
if (!success) failList.add(path);
} catch (IOException e) {
failList.add(path);
e.printStackTrace();
}
}
// 关闭FTP连接
try {
if (ObjectUtil.isNotNull(ftpClient)) ftpClient.disconnect();
} catch (IOException e) {
e.printStackTrace();
}
return failList;
}
public static boolean createDirs(FTPClient ftp , String path) throws IOException {
/* 该部分为逐级创建 */
String[] split = path.split(SymbolConstant.SINGLE_SLASH);
@ -303,4 +337,66 @@ public class FTPUtil {
}
return true;
}
/*
* 将源FTP路径的文件保存为指定路径的临时文件
* */
public File downloadFile(String fromPath, String toPath) {
FTPClient ftpClient = null;
InputStream inputStream = null;
try {
ftpClient = LoginFTP();
// 切换被动模式
ftpClient.enterLocalPassiveMode();
ftpClient.setFileType(FTPClient.BINARY_FILE_TYPE);
// 设置编码当文件中存在中文且上传后文件乱码时可使用此配置项
ftpClient.setControlEncoding("UTF-8");
ftpClient.setFileTransferMode(FTPClient.STREAM_TRANSFER_MODE);
inputStream = ftpClient.retrieveFileStream(fromPath);
// 声明一个临时文件
File tempFile = File.createTempFile(toPath, null);
// 将FTP文件的输入流复制给临时文件
FileUtils.copyInputStreamToFile(inputStream, tempFile);
return tempFile;
} catch (IOException e) {
e.printStackTrace();
return null;
} finally {
try {
if (ObjectUtil.isNotNull(ftpClient))
ftpClient.disconnect();
if (ObjectUtil.isNotNull(inputStream))
inputStream.close();
} catch (IOException e) {
e.printStackTrace();
}
}
}
/*
* 将源FTP路径的文件转换为文件流
* */
public InputStream downloadFileStream(String fromPath) {
FTPClient ftpClient = null;
try {
ftpClient = LoginFTP();
// 切换被动模式
ftpClient.enterLocalPassiveMode();
ftpClient.setFileType(FTPClient.BINARY_FILE_TYPE);
// 设置编码当文件中存在中文且上传后文件乱码时可使用此配置项
ftpClient.setControlEncoding("UTF-8");
ftpClient.setFileTransferMode(FTPClient.STREAM_TRANSFER_MODE);
return ftpClient.retrieveFileStream(fromPath);
} catch (IOException e) {
e.printStackTrace();
return null;
} finally {
try {
if (ObjectUtil.isNotNull(ftpClient))
ftpClient.disconnect();
} catch (IOException e) {
e.printStackTrace();
}
}
}
}

View File

@ -0,0 +1,25 @@
package org.jeecg.config.mybatis;
import com.baomidou.dynamic.datasource.toolkit.DynamicDataSourceContextHolder;
/**
* 数据源切换器
*/
public class DSSwitcher {
private final static String ORACLE = "ora";
private final static String PGSQL = "master";
public static void switchToOracle(){
DynamicDataSourceContextHolder.push(ORACLE);
}
public static void switchToPGSql(){
DynamicDataSourceContextHolder.push(PGSQL);
}
public static void clear(){
DynamicDataSourceContextHolder.clear();
}
}

View File

@ -0,0 +1,17 @@
package org.jeecg.modules.base.dto;
import lombok.Data;
@Data
public class DBInfo {
private String tableName;
private Integer numRow;
private Double dataSize;
private Double indexSize;
private Double used;
}

View File

@ -0,0 +1,11 @@
package org.jeecg.modules.base.dto;
import lombok.Data;
@Data
public class OwnerDto {
private String owner;
private String tableName;
}

View File

@ -18,7 +18,7 @@ public class FittingBody implements Serializable {
private Integer count;
private String qcFileName;
private String sampleFileName;
private String tabName;

View File

@ -31,8 +31,16 @@ public class QCResult implements Serializable {
private String xe133MDCStatus;
private String gasBgEvaluationMetrics;
private String gasBgValue;
private boolean gasBgValueAndStatus;
private String detBgEvaluationMetrics;
private String detBgValue;
private boolean detBgValueAndStatus;
}

View File

@ -63,4 +63,17 @@ public class SysDatabaseController {
List<SourceDto> sourceDtos = sysDatabaseService.listAll();
return Result.OK(sourceDtos);
}
@GetMapping("dbNames")
@ApiOperation(value = "数据库名列表",notes = "数据库名列表")
public Result<?> dbNames(@RequestParam String dbType){
return Result.OK(sysDatabaseService.dbNames(dbType));
}
@GetMapping("dbInfo")
@ApiOperation(value = "数据库表详情信息",notes = "数据库表详情信息")
public Result<?> dbInfo(@RequestParam String dbType,
@RequestParam String dataBase) {
return Result.OK(sysDatabaseService.dbInfo(dbType, dataBase));
}
}

View File

@ -1,6 +1,8 @@
package org.jeecg.modules.mapper;
import com.baomidou.mybatisplus.core.mapper.BaseMapper;
import org.apache.ibatis.annotations.Param;
import org.jeecg.modules.base.dto.DBInfo;
import org.jeecg.modules.base.dto.DatabaseDto;
import org.jeecg.modules.base.entity.postgre.SysDatabase;
import org.jeecg.modules.entity.AlarmHistory;
@ -13,4 +15,16 @@ public interface SysDatabaseMapper extends BaseMapper<SysDatabase> {
List<AlarmHistory> findAlarmHistory(Map<String,Object> params);
List<DatabaseDto> findPage(Map<String,Object> params);
List<String> dbNamesPG();
List<String> dbNamesMY();
List<String> dbNamesOR();
List<DBInfo> dbInfoOR(@Param("owner") String dataBase);
List<DBInfo> dbIndexOR(@Param("owner") String dataBase);
List<DBInfo> dbInfoMY(@Param("owner") String dataBase);
}

View File

@ -18,10 +18,4 @@ public interface SysServerMapper extends BaseMapper<SysServer> {
List<SourceDto> pageAll(String itemName);
List<AlarmInfo> alarmInfo(String sourceId);
List<String> dbNamesPG();
List<String> dbNamesMY();
List<String> dbNamesOR();
}

View File

@ -72,14 +72,4 @@
WHERE r.source_id = #{sourceId}
ORDER BY l.alarm_start_date DESC
</select>
<select id="dbNamesPG" resultType="java.lang.String">
SELECT datname FROM pg_database WHERE datistemplate = false;
</select>
<select id="dbNamesMY" resultType="java.lang.String">
SHOW DATABASES;
</select>
<select id="dbNamesOR" resultType="java.lang.String">
SELECT username FROM all_users;
</select>
</mapper>

View File

@ -50,5 +50,51 @@
LIMIT #{pageSize} OFFSET #{pageStart}
</if>
</select>
<select id="dbNamesPG" resultType="java.lang.String">
SELECT datname FROM pg_database WHERE datistemplate = false;
</select>
<select id="dbNamesMY" resultType="java.lang.String">
SHOW DATABASES;
</select>
<select id="dbNamesOR" resultType="java.lang.String">
SELECT username FROM all_users;
</select>
<select id="dbInfoOR" resultType="org.jeecg.modules.base.dto.DBInfo">
SELECT
a.table_name AS tableName,
a.num_rows AS numRow,
COALESCE(ROUND((d.bytes / (1024 * 1024)), 2), 0) AS dataSize,
COALESCE(ROUND((d.bytes / (1024 * 1024)), 2), 0) AS indexSize,
COALESCE(ROUND((d.bytes / d.max_size) * 100, 3), 0) AS used
FROM
all_tables a
LEFT JOIN dba_segments d ON a.owner = d.owner AND a.table_name = d.segment_name AND d.segment_type = 'TABLE'
WHERE
a.owner = #{owner}
ORDER BY a.table_name
</select>
<select id="dbIndexOR" resultType="org.jeecg.modules.base.dto.DBInfo">
SELECT
a.table_name AS tableName,
COALESCE(ROUND((d.bytes / (1024 * 1024)), 2), 0) AS indexSize
FROM
all_tables a
LEFT JOIN dba_segments d ON a.owner = d.owner AND a.table_name = d.segment_name AND d.segment_type = 'INDEX'
WHERE
a.owner = #{owner}
ORDER BY a.table_name
</select>
<select id="dbInfoMY" resultType="org.jeecg.modules.base.dto.DBInfo">
SELECT
TABLE_NAME AS tableName,
TABLE_ROWS AS numRow,
round((data_length / 1024 / 1024), 2) AS dataSize,
round((index_length / 1024 / 1024), 2) AS indexSize
FROM
information_schema.tables
WHERE
table_schema = #{owner};
ORDER BY TABLE_NAME
</select>
</mapper>

View File

@ -3,6 +3,7 @@ package org.jeecg.modules.service;
import com.baomidou.mybatisplus.extension.service.IService;
import org.jeecg.common.api.QueryRequest;
import org.jeecg.common.api.vo.Result;
import org.jeecg.modules.base.dto.DBInfo;
import org.jeecg.modules.base.dto.SourceDto;
import org.jeecg.modules.base.entity.postgre.SysDatabase;
import org.jeecg.modules.base.bizVo.SourceVo;
@ -24,4 +25,8 @@ public interface ISysDatabaseService extends IService<SysDatabase> {
Result findAlarmHistory(SourceVo sourceVo);
List<SourceDto> listAll();
List<String> dbNames(String dbType);
List<DBInfo> dbInfo(String dbType, String dataBase);
}

View File

@ -33,6 +33,4 @@ public interface ISysServerService extends IService<SysServer> {
Result<?> details_BasicInfo(String hostId);
Result<?> details_AlarmInfo(String sourceId, Integer pageNo, Integer pageSize);
List<String> dbNames();
}

View File

@ -9,10 +9,13 @@ import com.baomidou.mybatisplus.extension.plugins.pagination.Page;
import com.baomidou.mybatisplus.extension.service.impl.ServiceImpl;
import org.jeecg.common.api.QueryRequest;
import org.jeecg.common.api.vo.Result;
import org.jeecg.common.constant.DataBaseConstant;
import org.jeecg.common.constant.DateConstant;
import org.jeecg.common.constant.DictConstant;
import org.jeecg.common.constant.Prompt;
import org.jeecg.common.system.vo.DictModel;
import org.jeecg.config.mybatis.DSSwitcher;
import org.jeecg.modules.base.dto.DBInfo;
import org.jeecg.modules.base.dto.DatabaseDto;
import org.jeecg.modules.base.dto.SourceDto;
import org.jeecg.modules.base.entity.postgre.SysDatabase;
@ -178,4 +181,87 @@ public class SysDatabaseServiceImpl extends ServiceImpl<SysDatabaseMapper, SysDa
return sourceDtos;
}
@Override
public List<String> dbNames(String dbType) {
List<String> dbNames = new ArrayList<>();
switch (dbType){
case DataBaseConstant.DB_TYPE_ORACLE:
dbNames = dbNamesOR();
break;
case DataBaseConstant.DB_TYPE_POSTGRESQL:
dbNames = dbNamesPG();
break;
case DataBaseConstant.DB_TYPE_MYSQL:
// ...
break;
default:
break;
}
return dbNames;
}
@Override
public List<DBInfo> dbInfo(String dbType, String dataBase) {
List<DBInfo> dbInfos = new ArrayList<>();
switch (dbType){
case DataBaseConstant.DB_TYPE_ORACLE:
dbInfos = dbInfoOR(dataBase);
break;
case DataBaseConstant.DB_TYPE_POSTGRESQL:
break;
case DataBaseConstant.DB_TYPE_MYSQL:
// ...
break;
default:
break;
}
return dbInfos;
}
private List<String> dbNamesOR(){
DSSwitcher.switchToOracle();
List<String> dbNames = baseMapper.dbNamesOR();
DSSwitcher.clear();
return dbNames;
}
private List<String> dbNamesPG(){
return baseMapper.dbNamesPG();
}
private List<String> dbNamesMY(){
// 切换数据源
return baseMapper.dbNamesMY();
// 清除数据源
}
private List<DBInfo> dbInfoOR(String dataBase){
DSSwitcher.switchToOracle();
List<DBInfo> dbInfos = baseMapper.dbInfoOR(dataBase);
Map<String, Double> indexSize = baseMapper.dbIndexOR(dataBase).stream()
.collect(Collectors.toMap(DBInfo::getTableName, DBInfo::getIndexSize));
for (DBInfo dbInfo : dbInfos) {
String tableName = dbInfo.getTableName();
dbInfo.setIndexSize(indexSize.get(tableName));
}
DSSwitcher.clear();
return dbInfos;
}
private List<DBInfo> dbInfoPG(String dataBase){
return null;
}
private List<DBInfo> dbInfoMY(String dataBase){
// 切换数据源
List<DBInfo> dbInfos = baseMapper.dbInfoMY(dataBase);
// 清除数据源
return dbInfos;
}
}

View File

@ -304,10 +304,4 @@ public class SysServerServiceImpl extends ServiceImpl<SysServerMapper, SysServer
page.setRecords(records);
return Result.OK(page);
}
@Override
public List<String> dbNames() {
return null;
}
}

View File

@ -62,7 +62,7 @@ public class GardsSampleDataServiceImpl extends ServiceImpl<GardsSampleDataMappe
*/
@Override
public GardsSampleData getSampleIdAndInputFileName(String measurementId, String dataType, String systemType) {
String detectorId = measurementId.substring(0, 8);
String detectorId = measurementId.substring(0, 9);
final List<GardsSampleData> sampleDatas = this.baseMapper.getSampleIdAndInputFileName(measurementId, dataType, systemType, detectorId);
if(!CollectionUtils.isEmpty(sampleDatas)){
//如果查询出多条则需要根据inputFileName字段降序排序后返回第一个

View File

@ -18,11 +18,8 @@ import org.jeecg.common.constant.*;
import org.jeecg.common.constant.enums.SpectrumSystemType;
import org.jeecg.common.properties.ParameterProperties;
import org.jeecg.common.properties.SpectrumPathProperties;
import org.jeecg.common.util.DateUtils;
import org.jeecg.common.util.GammaFileUtil;
import org.jeecg.common.util.RedisUtil;
import org.jeecg.common.util.*;
import org.jeecg.modules.base.bizVo.AttributeItemVo;
import org.jeecg.common.util.MyLogFormatUtil;
import org.jeecg.modules.base.dto.*;
import org.jeecg.modules.base.entity.original.GardsSampleData;
import org.jeecg.modules.base.entity.rnauto.*;
@ -61,6 +58,8 @@ public class Sample_G_Analysis {
private RedisUtil redisUtil;
private FTPUtil ftpUtil;
/**
* 系统类型
*/
@ -124,9 +123,10 @@ public class Sample_G_Analysis {
PHDFile phdFile = new PHDFile();
phdFile.setXmlFilePath(parameterProperties.getFilePath());
// 解析PHD文件
spectrumPathProperties = ApplicationContextUtil.getContext().getBean(SpectrumPathProperties.class);
spectrumPathProperties = SpringContextUtils.getBean(SpectrumPathProperties.class);
ftpUtil = SpringContextUtils.getBean(FTPUtil.class);
String sampleFilePath = sampleData.getInputFileName();
String pathName = File.separator + spectrumPathProperties.getSaveFilePath() + File.separator +
String pathName = ftpUtil.getFtpRootPath() + spectrumPathProperties.getSaveFilePath() + File.separator +
sampleFilePath.substring(0, sampleFilePath.lastIndexOf(StringPool.SLASH));
String fileName = sampleFilePath.substring(sampleFilePath.lastIndexOf(StringPool.SLASH)+1);

View File

@ -1,5 +1,6 @@
package org.jeecg.common.util;
import cn.hutool.core.util.ObjectUtil;
import com.baomidou.mybatisplus.core.toolkit.CollectionUtils;
import com.baomidou.mybatisplus.core.toolkit.StringPool;
import com.baomidou.mybatisplus.core.toolkit.StringUtils;
@ -262,52 +263,19 @@ public class PHDFileUtil extends AbstractLogOrReport {
public List<String> readLine(String filePath) {
String parameterFilePath = filePath.substring(0, filePath.lastIndexOf(StringPool.SLASH));
String fileName = filePath.substring(filePath.lastIndexOf(StringPool.SLASH) + 1);
//连接ftp
FTPClient ftpClient = ftpUtil.LoginFTP();
//判断ftp是否连接成功
if (Objects.isNull(ftpClient)){
throw new RuntimeException("ftp connection failed!");
}
InputStream iStream= null;
File file = null;
List<String> allLines = new ArrayList<>();
try {
//被动模式
ftpClient.enterLocalPassiveMode();
//设置文件类型--二进制文件
ftpClient.setFileType(FTP.BINARY_FILE_TYPE);
//
ftpClient.setControlEncoding("UTF-8");
ftpClient.setFileTransferMode(FTPClient.STREAM_TRANSFER_MODE);
//在当前工作路径下读取文件
ftpClient.changeWorkingDirectory(parameterFilePath);
//读取ftp文件的输入流
iStream=ftpClient.retrieveFileStream(fileName);
if (Objects.nonNull(iStream)) {
//声明一个临时文件
file = File.createTempFile("betaGamma", null);
//将ftp文件的输入流复制给临时文件
FileUtils.copyInputStreamToFile(iStream, file);
List<String> allLines = FileUtils.readLines(file, ftpUtil.getEncoding());
return allLines;
}
} catch (IOException e) {
throw new RuntimeException(e);
file = ftpUtil.downloadFile(filePath, "betaGamma");
return FileUtils.readLines(file, ftpUtil.getEncoding());
}catch (IOException e){
e.printStackTrace();
return allLines;
} finally {
try {
if (Objects.nonNull(ftpClient)){
ftpClient.disconnect();
}
if (Objects.nonNull(iStream)){
iStream.close();
}
if (Objects.nonNull(file)) {
file.delete();
}
} catch (IOException e) {
throw new RuntimeException(e);
}
if (ObjectUtil.isNotNull(file))
file.delete();
}
return Collections.emptyList();
}
public void getLightColor(Map<String, Object> sampleMap, Map<String, Object> gasBgMap, Map<String, Object> detBgMap, Map<String, Object> qcMap) {
@ -451,138 +419,83 @@ public class PHDFileUtil extends AbstractLogOrReport {
public Map<String, String> getFileData(String filePath, String sampleFileName) {
Map<String, String> map = new HashMap<>();
//连接ftp 获取ftp文件数据
FTPClient ftpClient = ftpUtil.LoginFTP();
if (Objects.isNull(ftpClient)){
return map;
}
InputStream inputStream = null;
File file = null;
try {
//切换被动模式
ftpClient.enterLocalPassiveMode();
ftpClient.setFileType(FTPClient.BINARY_FILE_TYPE);
// 设置编码当文件中存在中文且上传后文件乱码时可使用此配置项
ftpClient.setControlEncoding("UTF-8");
ftpClient.setFileTransferMode(FTPClient.STREAM_TRANSFER_MODE);
//切换工作路径
ftpClient.changeWorkingDirectory(filePath);
//解析sampleFile
inputStream = ftpClient.retrieveFileStream(sampleFileName);
if (Objects.nonNull(inputStream)) {
//声明一个临时文件
file = File.createTempFile("betaGamma", null);
//将ftp文件的输入流复制给临时文件
FileUtils.copyInputStreamToFile(inputStream, file);
//加载sampleFile内容
EnergySpectrumStruct struct = EnergySpectrumHandler.getSourceData(file.getAbsolutePath());
//获取所需要的数据
String dataType = struct.data_type;
String systemType = struct.system_type;
String spectrumQuantity = struct.spectrum_quantity;
double acquisitionLiveTime = struct.acquisition_live_time;
String measurementId = struct.measurement_id;
String gasBkMeasurementId = struct.gas_bk_measurement_id;
String detectorBkMeasurementId = struct.detector_bk_measurement_id;
//格式化文件名称
String fileSuffix = nameStandUtil.GetSuffix(dataType, spectrumQuantity, String.valueOf(acquisitionLiveTime));
String measurementName = nameStandUtil.GetFileNameFromDateTime(measurementId, fileSuffix);
String gasFileName = nameStandUtil.GetFileNameFromDateTime(gasBkMeasurementId, "_G.PHD");
String detaFileName = nameStandUtil.GetFileNameFromDateTime(detectorBkMeasurementId, "_D.PHD");
map.put("sampleFileName", measurementName);
map.put("gasFileName", gasFileName);
map.put("detaFileName", detaFileName);
map.put("sampleSystemType", systemType);
}
} catch (IOException e) {
throw new RuntimeException(e);
} finally {
try {
if (ftpClient!=null){
ftpClient.disconnect();
}
if (inputStream!=null){
inputStream.close();
}
if (Objects.nonNull(file)) {
file.delete();
}
} catch (IOException e) {
throw new RuntimeException(e);
}
String fromPath = filePath + StringPool.SLASH + sampleFileName;
file = ftpUtil.downloadFile(fromPath, "betaGamma");
//加载sampleFile内容
EnergySpectrumStruct struct = EnergySpectrumHandler.getSourceData(file.getAbsolutePath());
//获取所需要的数据
String dataType = struct.data_type;
String systemType = struct.system_type;
String spectrumQuantity = struct.spectrum_quantity;
double acquisitionLiveTime = struct.acquisition_live_time;
String measurementId = struct.measurement_id;
String gasBkMeasurementId = struct.gas_bk_measurement_id;
String detectorBkMeasurementId = struct.detector_bk_measurement_id;
//格式化文件名称
String fileSuffix = nameStandUtil.GetSuffix(dataType, spectrumQuantity, String.valueOf(acquisitionLiveTime));
String measurementName = nameStandUtil.GetFileNameFromDateTime(measurementId, fileSuffix);
String gasFileName = nameStandUtil.GetFileNameFromDateTime(gasBkMeasurementId, "_G.PHD");
String detaFileName = nameStandUtil.GetFileNameFromDateTime(detectorBkMeasurementId, "_D.PHD");
map.put("sampleFileName", measurementName);
map.put("gasFileName", gasFileName);
map.put("detaFileName", detaFileName);
map.put("sampleSystemType", systemType);
return map;
}catch (Exception e){
e.printStackTrace();
return map;
}finally {
if (ObjectUtil.isNotNull(file))
file.delete();
}
return map;
}
public String NameStandardBy(String filePath, String fileName) {
//连接ftp
FTPClient ftpClient = ftpUtil.LoginFTP();
InputStream inputStream = null;
File file = null;
StringBuffer path = new StringBuffer();
File file = null;
try {
//被动模式
ftpClient.enterLocalPassiveMode();
//设置文件类型--二进制文件
ftpClient.setFileType(FTP.BINARY_FILE_TYPE);
//
ftpClient.setControlEncoding("UTF-8");
ftpClient.setFileTransferMode(FTPClient.STREAM_TRANSFER_MODE);
//切换文件路径
ftpClient.changeWorkingDirectory(filePath);
inputStream = ftpClient.retrieveFileStream(fileName);
if (Objects.nonNull(inputStream)){
file = File.createTempFile("betaGamma", null);
//将ftp文件的输入流复制给临时文件
FileUtils.copyInputStreamToFile(inputStream, file);
EnergySpectrumStruct sourceData = EnergySpectrumHandler.getSourceData(file.getAbsolutePath());
String systemType = sourceData.system_type;
String dataType = sourceData.data_type;
if(systemType.contains("B")) {
path.append("Spectrum");
path.append(StringPool.SLASH+"Xenon");
path.append(StringPool.SLASH+"Sauna");
} else if(systemType.contains("G")) {
path.append("Spectrum");
path.append(StringPool.SLASH+"Xenon");
path.append(StringPool.SLASH+"Spalax");
}
if(dataType.contains("SAMPLEPHD")) {
path.append(StringPool.SLASH+"Samplephd");
} else if(dataType.contains("DETBKPHD")) {
path.append(StringPool.SLASH+"Detbkphd");
} else if(dataType.contains("GASBKPHD")) {
path.append(StringPool.SLASH+"Gasbkphd");
} else if(dataType.contains("QCPHD")) {
path.append(StringPool.SLASH+"Qcphd");
}
int pos = fileName.indexOf('-');
if(-1 == pos) {
String fromPath = ftpUtil.getFtpRootPath() + filePath +
StringPool.SLASH + fileName;
file = ftpUtil.downloadFile(fromPath, "betaGamma");
EnergySpectrumStruct sourceData = EnergySpectrumHandler.getSourceData(file.getAbsolutePath());
String systemType = sourceData.system_type;
String dataType = sourceData.data_type;
if(systemType.contains("B")) {
path.append("Spectrum");
path.append(StringPool.SLASH+"Xenon");
path.append(StringPool.SLASH+"Sauna");
} else if(systemType.contains("G")) {
path.append("Spectrum");
path.append(StringPool.SLASH+"Xenon");
path.append(StringPool.SLASH+"Spalax");
}
if(dataType.contains("SAMPLEPHD")) {
path.append(StringPool.SLASH+"Samplephd");
} else if(dataType.contains("DETBKPHD")) {
path.append(StringPool.SLASH+"Detbkphd");
} else if(dataType.contains("GASBKPHD")) {
path.append(StringPool.SLASH+"Gasbkphd");
} else if(dataType.contains("QCPHD")) {
path.append(StringPool.SLASH+"Qcphd");
}
int pos = fileName.indexOf('-');
if(-1 == pos) {
} else if(fileName.length() >= pos+7) {
path.append(StringPool.SLASH+fileName.substring(pos+1,pos+5));
path.append(StringPool.SLASH+fileName.substring(pos+5,pos+7));
}
path.append(StringPool.SLASH+fileName);
}
} catch (IOException e) {
throw new RuntimeException(e);
} finally {
try {
if (Objects.nonNull(ftpClient)){
ftpClient.disconnect();
}
if (inputStream != null){
inputStream.close();
}
if (Objects.nonNull(file)) {
file.delete();
}
} catch (IOException e) {
throw new RuntimeException(e);
} else if(fileName.length() >= pos+7) {
path.append(StringPool.SLASH+fileName.substring(pos+1,pos+5));
path.append(StringPool.SLASH+fileName.substring(pos+5,pos+7));
}
path.append(StringPool.SLASH+fileName);
return path.toString();
}catch (Exception e){
e.printStackTrace();
return path.toString();
}finally {
if (ObjectUtil.isNotNull(file))
file.delete();
}
return path.toString();
}
public List<String> FileNameByStandardForm(String filePath, String sampleFileName) {
@ -692,7 +605,8 @@ public class PHDFileUtil extends AbstractLogOrReport {
return file;
}
public void analyzeSpectrum(File sampleTmp, File gasTmp, File detTmp, BgCalibratePara BgCalPara, Map<String, Object> map) {
public boolean analyzeSpectrum(File sampleTmp, File gasTmp, File detTmp, BgCalibratePara BgCalPara, Map<String, Object> map) {
boolean bRet = true;
//调用动态库解析文件
BgAnalyseResult analyseResult = null;
if (Objects.isNull(BgCalPara)) {
@ -700,84 +614,90 @@ public class PHDFileUtil extends AbstractLogOrReport {
} else {
analyseResult = EnergySpectrumHandler.bgReAnalyse(sampleTmp.getAbsolutePath(), gasTmp.getAbsolutePath(), detTmp.getAbsolutePath(), BgCalPara);
}
EnergySpectrumStruct sample = analyzeFileSourceData(sampleTmp);
EnergySpectrumStruct gas = analyzeFileSourceData(gasTmp);
EnergySpectrumStruct det = analyzeFileSourceData(detTmp);
String sampleFileName = (String) map.get("sampleFileName");
String logName = sampleFileName.replace("PHD", "log");
OutPutRnLog(analyseResult, sample, gas, det, logName);
//需要返回到前端的XeData数据
List<GardsXeResultsSpectrum> xeResultsSpectrumList = new LinkedList<>();
//存入计算后得到的xeData数据
GardsXeResultsSpectrum xe131m = new GardsXeResultsSpectrum();
xe131m.setNuclideName(XeNuclideName.XE_131m.getType());
xe131m.setConc(analyseResult.Xe131m_con);
xe131m.setConcErr(analyseResult.Xe131m_uncer);
xe131m.setLc(analyseResult.LC_Xe131m);
xe131m.setMdc(analyseResult.MDC_Xe131m);
xe131m.setNidFlag(analyseResult.XE_131m_NID_FLAG);
xeResultsSpectrumList.add(xe131m);
GardsXeResultsSpectrum xe133 = new GardsXeResultsSpectrum();
xe133.setNuclideName(XeNuclideName.XE_133.getType());
xe133.setConc(analyseResult.Xe133_con);
xe133.setConcErr(analyseResult.Xe133_uncer);
xe133.setLc(analyseResult.LC_Xe133);
xe133.setMdc(analyseResult.MDC_Xe133);
xe133.setNidFlag(analyseResult.XE_133_NID_FLAG);
xeResultsSpectrumList.add(xe133);
GardsXeResultsSpectrum xe133m = new GardsXeResultsSpectrum();
xe133m.setNuclideName(XeNuclideName.XE_133m.getType());
xe133m.setConc(analyseResult.Xe133m_con);
xe133m.setConcErr(analyseResult.Xe133m_uncer);
xe133m.setLc(analyseResult.LC_Xe133m);
xe133m.setMdc(analyseResult.MDC_Xe133m);
xe133m.setNidFlag(analyseResult.XE_133m_NID_FLAG);
xeResultsSpectrumList.add(xe133m);
GardsXeResultsSpectrum xe135 = new GardsXeResultsSpectrum();
xe135.setNuclideName(XeNuclideName.XE_135.getType());
xe135.setConc(analyseResult.Xe135_con);
xe135.setConcErr(analyseResult.Xe135_uncer);
xe135.setLc(analyseResult.LC_Xe135);
xe135.setMdc(analyseResult.MDC_Xe135);
xe135.setNidFlag(analyseResult.XE_135_NID_FLAG);
xeResultsSpectrumList.add(xe135);
map.put("XeData", xeResultsSpectrumList);
//新计算得到的边界值
if (CollectionUtils.isNotEmpty(analyseResult.S_ROI_B_Boundary_start)) {
List<Boundary> boundaryList = new LinkedList<>();
for (int i=0; i<analyseResult.S_ROI_B_Boundary_start.size(); i++) {
Boundary boundary = new Boundary();
boundary.setMinX(analyseResult.S_ROI_B_Boundary_start.get(i));
boundary.setMaxX(analyseResult.S_ROI_B_Boundary_stop.get(i));
boundary.setMinY(analyseResult.S_ROI_G_Boundary_start.get(i));
boundary.setMaxY(analyseResult.S_ROI_G_Boundary_stop.get(i));
boundaryList.add(boundary);
if (StringUtils.isNotBlank(analyseResult.error_log) && !analyseResult.error_log.equalsIgnoreCase("no error.")) {
bRet = false;
return bRet;
} else {
EnergySpectrumStruct sample = analyzeFileSourceData(sampleTmp);
EnergySpectrumStruct gas = analyzeFileSourceData(gasTmp);
EnergySpectrumStruct det = analyzeFileSourceData(detTmp);
String sampleFileName = (String) map.get("sampleFileName");
String logName = sampleFileName.replace("PHD", "log");
OutPutRnLog(analyseResult, sample, gas, det, logName);
//需要返回到前端的XeData数据
List<GardsXeResultsSpectrum> xeResultsSpectrumList = new LinkedList<>();
//存入计算后得到的xeData数据
GardsXeResultsSpectrum xe131m = new GardsXeResultsSpectrum();
xe131m.setNuclideName(XeNuclideName.XE_131m.getType());
xe131m.setConc(analyseResult.Xe131m_con);
xe131m.setConcErr(analyseResult.Xe131m_uncer);
xe131m.setLc(analyseResult.LC_Xe131m);
xe131m.setMdc(analyseResult.MDC_Xe131m);
xe131m.setNidFlag(analyseResult.XE_131m_NID_FLAG);
xeResultsSpectrumList.add(xe131m);
GardsXeResultsSpectrum xe133 = new GardsXeResultsSpectrum();
xe133.setNuclideName(XeNuclideName.XE_133.getType());
xe133.setConc(analyseResult.Xe133_con);
xe133.setConcErr(analyseResult.Xe133_uncer);
xe133.setLc(analyseResult.LC_Xe133);
xe133.setMdc(analyseResult.MDC_Xe133);
xe133.setNidFlag(analyseResult.XE_133_NID_FLAG);
xeResultsSpectrumList.add(xe133);
GardsXeResultsSpectrum xe133m = new GardsXeResultsSpectrum();
xe133m.setNuclideName(XeNuclideName.XE_133m.getType());
xe133m.setConc(analyseResult.Xe133m_con);
xe133m.setConcErr(analyseResult.Xe133m_uncer);
xe133m.setLc(analyseResult.LC_Xe133m);
xe133m.setMdc(analyseResult.MDC_Xe133m);
xe133m.setNidFlag(analyseResult.XE_133m_NID_FLAG);
xeResultsSpectrumList.add(xe133m);
GardsXeResultsSpectrum xe135 = new GardsXeResultsSpectrum();
xe135.setNuclideName(XeNuclideName.XE_135.getType());
xe135.setConc(analyseResult.Xe135_con);
xe135.setConcErr(analyseResult.Xe135_uncer);
xe135.setLc(analyseResult.LC_Xe135);
xe135.setMdc(analyseResult.MDC_Xe135);
xe135.setNidFlag(analyseResult.XE_135_NID_FLAG);
xeResultsSpectrumList.add(xe135);
map.put("XeData", xeResultsSpectrumList);
//新计算得到的边界值
if (CollectionUtils.isNotEmpty(analyseResult.S_ROI_B_Boundary_start)) {
List<Boundary> boundaryList = new LinkedList<>();
for (int i=0; i<analyseResult.S_ROI_B_Boundary_start.size(); i++) {
Boundary boundary = new Boundary();
boundary.setMinX(analyseResult.S_ROI_B_Boundary_start.get(i));
boundary.setMaxX(analyseResult.S_ROI_B_Boundary_stop.get(i));
boundary.setMinY(analyseResult.S_ROI_G_Boundary_start.get(i));
boundary.setMaxY(analyseResult.S_ROI_G_Boundary_stop.get(i));
boundaryList.add(boundary);
}
map.put("SampleBoundary", boundaryList);
}
map.put("SampleBoundary", boundaryList);
}
if (CollectionUtils.isNotEmpty(analyseResult.G_ROI_B_Boundary_start)) {
List<Boundary> boundaryList = new LinkedList<>();
for (int i=0; i<analyseResult.G_ROI_B_Boundary_start.size(); i++) {
Boundary boundary = new Boundary();
boundary.setMinX(analyseResult.G_ROI_B_Boundary_start.get(i));
boundary.setMaxX(analyseResult.G_ROI_B_Boundary_stop.get(i));
boundary.setMinY(analyseResult.G_ROI_G_Boundary_start.get(i));
boundary.setMaxY(analyseResult.G_ROI_G_Boundary_stop.get(i));
boundaryList.add(boundary);
if (CollectionUtils.isNotEmpty(analyseResult.G_ROI_B_Boundary_start)) {
List<Boundary> boundaryList = new LinkedList<>();
for (int i=0; i<analyseResult.G_ROI_B_Boundary_start.size(); i++) {
Boundary boundary = new Boundary();
boundary.setMinX(analyseResult.G_ROI_B_Boundary_start.get(i));
boundary.setMaxX(analyseResult.G_ROI_B_Boundary_stop.get(i));
boundary.setMinY(analyseResult.G_ROI_G_Boundary_start.get(i));
boundary.setMaxY(analyseResult.G_ROI_G_Boundary_stop.get(i));
boundaryList.add(boundary);
}
map.put("GasBoundary", boundaryList);
}
map.put("GasBoundary", boundaryList);
}
if (CollectionUtils.isNotEmpty(analyseResult.D_ROI_B_Boundary_start)) {
List<Boundary> boundaryList = new LinkedList<>();
for (int i=0; i<analyseResult.D_ROI_B_Boundary_start.size(); i++) {
Boundary boundary = new Boundary();
boundary.setMinX(analyseResult.D_ROI_B_Boundary_start.get(i));
boundary.setMaxX(analyseResult.D_ROI_B_Boundary_stop.get(i));
boundary.setMinY(analyseResult.D_ROI_G_Boundary_start.get(i));
boundary.setMaxY(analyseResult.D_ROI_G_Boundary_stop.get(i));
boundaryList.add(boundary);
if (CollectionUtils.isNotEmpty(analyseResult.D_ROI_B_Boundary_start)) {
List<Boundary> boundaryList = new LinkedList<>();
for (int i=0; i<analyseResult.D_ROI_B_Boundary_start.size(); i++) {
Boundary boundary = new Boundary();
boundary.setMinX(analyseResult.D_ROI_B_Boundary_start.get(i));
boundary.setMaxX(analyseResult.D_ROI_B_Boundary_stop.get(i));
boundary.setMinY(analyseResult.D_ROI_G_Boundary_start.get(i));
boundary.setMaxY(analyseResult.D_ROI_G_Boundary_stop.get(i));
boundaryList.add(boundary);
}
map.put("DetBoundary", boundaryList);
}
map.put("DetBoundary", boundaryList);
return bRet;
}
}
@ -1326,11 +1246,27 @@ public class PHDFileUtil extends AbstractLogOrReport {
//换行
out.append(System.lineSeparator());
//第三十六行数据
out.append(rowFormat("energy to channel equation: CH(x) = (%s)+(%s)*x+(%s)x*x", NumberFormatUtil.numberFormat(String.valueOf(bgAnalyseResult.s_b_fitting_c_e.get(0))), NumberFormatUtil.numberFormat(String.valueOf(bgAnalyseResult.s_b_fitting_c_e.get(1))), NumberFormatUtil.numberFormat(String.valueOf(bgAnalyseResult.s_b_fitting_c_e.get(2)))));
if (bgAnalyseResult.s_b_fitting_c_e != null && CollectionUtils.isNotEmpty(bgAnalyseResult.s_b_fitting_c_e)) {
if (bgAnalyseResult.s_b_fitting_c_e.size() > 0) {
out.append(rowFormat("energy to channel equation: CH(x) = (%s)+(%s)*x+(%s)x*x", NumberFormatUtil.numberFormat(String.valueOf(bgAnalyseResult.s_b_fitting_c_e.get(0))), NumberFormatUtil.numberFormat(String.valueOf(bgAnalyseResult.s_b_fitting_c_e.get(1))), NumberFormatUtil.numberFormat(String.valueOf(bgAnalyseResult.s_b_fitting_c_e.get(2)))));
} else {
out.append(rowFormat("energy to channel equation: CH(x) = (%s)+(%s)*x+(%s)x*x", "?", "?", "?"));
}
} else {
out.append(rowFormat("energy to channel equation: CH(x) = (%s)+(%s)*x+(%s)x*x", "?", "?", "?"));
}
//换行
out.append(System.lineSeparator());
//第三十七行数据
out.append(rowFormat("channel to energy equation: E(x) = (%s)+(%s)*x+(%s)x*x", NumberFormatUtil.numberFormat(String.valueOf(bgAnalyseResult.s_b_fitting_e_c.get(0))), NumberFormatUtil.numberFormat(String.valueOf(bgAnalyseResult.s_b_fitting_e_c.get(1))), NumberFormatUtil.numberFormat(String.valueOf(bgAnalyseResult.s_b_fitting_e_c.get(2)))));
if (bgAnalyseResult.s_b_fitting_e_c != null && CollectionUtils.isNotEmpty(bgAnalyseResult.s_b_fitting_e_c)) {
if (bgAnalyseResult.s_b_fitting_e_c.size() > 0) {
out.append(rowFormat("channel to energy equation: E(x) = (%s)+(%s)*x+(%s)x*x", NumberFormatUtil.numberFormat(String.valueOf(bgAnalyseResult.s_b_fitting_e_c.get(0))), NumberFormatUtil.numberFormat(String.valueOf(bgAnalyseResult.s_b_fitting_e_c.get(1))), NumberFormatUtil.numberFormat(String.valueOf(bgAnalyseResult.s_b_fitting_e_c.get(2)))));
} else {
out.append(rowFormat("channel to energy equation: E(x) = (%s)+(%s)*x+(%s)x*x", "?", "?", "?"));
}
} else {
out.append(rowFormat("channel to energy equation: E(x) = (%s)+(%s)*x+(%s)x*x", "?", "?", "?"));
}
//换行
out.append(System.lineSeparator());
//换行
@ -1340,11 +1276,27 @@ public class PHDFileUtil extends AbstractLogOrReport {
//换行
out.append(System.lineSeparator());
//第三十六行数据
out.append(rowFormat("energy to channel equation: CH(x) = (%s)+(%s)*x+(%s)x*x", NumberFormatUtil.numberFormat(String.valueOf(bgAnalyseResult.s_g_fitting_c_e.get(0))), NumberFormatUtil.numberFormat(String.valueOf(bgAnalyseResult.s_g_fitting_c_e.get(1))), NumberFormatUtil.numberFormat(String.valueOf(bgAnalyseResult.s_g_fitting_c_e.get(2)))));
if (bgAnalyseResult.s_g_fitting_c_e !=null && CollectionUtils.isNotEmpty(bgAnalyseResult.s_g_fitting_c_e)) {
if (bgAnalyseResult.s_g_fitting_c_e.size() > 0) {
out.append(rowFormat("energy to channel equation: CH(x) = (%s)+(%s)*x+(%s)x*x", NumberFormatUtil.numberFormat(String.valueOf(bgAnalyseResult.s_g_fitting_c_e.get(0))), NumberFormatUtil.numberFormat(String.valueOf(bgAnalyseResult.s_g_fitting_c_e.get(1))), NumberFormatUtil.numberFormat(String.valueOf(bgAnalyseResult.s_g_fitting_c_e.get(2)))));
} else {
out.append(rowFormat("energy to channel equation: CH(x) = (%s)+(%s)*x+(%s)x*x", "?", "?", "?"));
}
} else {
out.append(rowFormat("energy to channel equation: CH(x) = (%s)+(%s)*x+(%s)x*x", "?", "?", "?"));
}
//换行
out.append(System.lineSeparator());
//第三十七行数据
out.append(rowFormat("channel to energy equation: E(x) = (%s)+(%s)*x+(%s)x*x", NumberFormatUtil.numberFormat(String.valueOf(bgAnalyseResult.s_g_fitting_e_c.get(0))), NumberFormatUtil.numberFormat(String.valueOf(bgAnalyseResult.s_g_fitting_e_c.get(1))), NumberFormatUtil.numberFormat(String.valueOf(bgAnalyseResult.s_g_fitting_e_c.get(2)))));
if (bgAnalyseResult.s_g_fitting_e_c != null && CollectionUtils.isNotEmpty(bgAnalyseResult.s_g_fitting_e_c)) {
if (bgAnalyseResult.s_g_fitting_e_c.size() > 0) {
out.append(rowFormat("channel to energy equation: E(x) = (%s)+(%s)*x+(%s)x*x", NumberFormatUtil.numberFormat(String.valueOf(bgAnalyseResult.s_g_fitting_e_c.get(0))), NumberFormatUtil.numberFormat(String.valueOf(bgAnalyseResult.s_g_fitting_e_c.get(1))), NumberFormatUtil.numberFormat(String.valueOf(bgAnalyseResult.s_g_fitting_e_c.get(2)))));
} else {
out.append(rowFormat("channel to energy equation: E(x) = (%s)+(%s)*x+(%s)x*x", "?", "?", "?"));
}
} else {
out.append(rowFormat("channel to energy equation: E(x) = (%s)+(%s)*x+(%s)x*x", "?", "?", "?"));
}
//换行
out.append(System.lineSeparator());
//换行
@ -1394,11 +1346,28 @@ public class PHDFileUtil extends AbstractLogOrReport {
//换行
out.append(System.lineSeparator());
//第四十四行数据
out.append(rowFormat("energy to channel equation: CH(x) = (%s)+(%s)*x+(%s)x*x", NumberFormatUtil.numberFormat(String.valueOf(bgAnalyseResult.d_b_fitting_c_e.get(0))), NumberFormatUtil.numberFormat(String.valueOf(bgAnalyseResult.d_b_fitting_c_e.get(1))), NumberFormatUtil.numberFormat(String.valueOf(bgAnalyseResult.d_b_fitting_c_e.get(2)))));
if (bgAnalyseResult.d_b_fitting_c_e != null && CollectionUtils.isNotEmpty(bgAnalyseResult.d_b_fitting_c_e)) {
if (bgAnalyseResult.d_b_fitting_c_e.size() > 0) {
out.append(rowFormat("energy to channel equation: CH(x) = (%s)+(%s)*x+(%s)x*x", NumberFormatUtil.numberFormat(String.valueOf(bgAnalyseResult.d_b_fitting_c_e.get(0))), NumberFormatUtil.numberFormat(String.valueOf(bgAnalyseResult.d_b_fitting_c_e.get(1))), NumberFormatUtil.numberFormat(String.valueOf(bgAnalyseResult.d_b_fitting_c_e.get(2)))));
} else {
out.append(rowFormat("energy to channel equation: CH(x) = (%s)+(%s)*x+(%s)x*x", "?", "?", "?"));
}
} else {
out.append(rowFormat("energy to channel equation: CH(x) = (%s)+(%s)*x+(%s)x*x", "?", "?", "?"));
}
//换行
out.append(System.lineSeparator());
//第四十五行数据
out.append(rowFormat("channel to energy equation: E(x) = (%s)+(%s)*x+(%s)x*x", NumberFormatUtil.numberFormat(String.valueOf(bgAnalyseResult.d_b_fitting_e_c.get(0))), NumberFormatUtil.numberFormat(String.valueOf(bgAnalyseResult.d_b_fitting_e_c.get(1))), NumberFormatUtil.numberFormat(String.valueOf(bgAnalyseResult.d_b_fitting_e_c.get(2)))));
if (bgAnalyseResult.d_b_fitting_e_c != null && CollectionUtils.isNotEmpty(bgAnalyseResult.d_b_fitting_e_c)) {
if (bgAnalyseResult.d_b_fitting_e_c.size() > 0) {
out.append(rowFormat("channel to energy equation: E(x) = (%s)+(%s)*x+(%s)x*x", NumberFormatUtil.numberFormat(String.valueOf(bgAnalyseResult.d_b_fitting_e_c.get(0))), NumberFormatUtil.numberFormat(String.valueOf(bgAnalyseResult.d_b_fitting_e_c.get(1))), NumberFormatUtil.numberFormat(String.valueOf(bgAnalyseResult.d_b_fitting_e_c.get(2)))));
} else {
out.append(rowFormat("channel to energy equation: E(x) = (%s)+(%s)*x+(%s)x*x", "?", "?", "?"));
}
} else {
out.append(rowFormat("channel to energy equation: E(x) = (%s)+(%s)*x+(%s)x*x", "?", "?", "?"));
}
//换行
out.append(System.lineSeparator());
//换行
@ -1408,11 +1377,27 @@ public class PHDFileUtil extends AbstractLogOrReport {
//换行
out.append(System.lineSeparator());
//第四十四行数据
out.append(rowFormat("energy to channel equation: CH(x) = (%s)+(%s)*x+(%s)x*x", NumberFormatUtil.numberFormat(String.valueOf(bgAnalyseResult.d_g_fitting_c_e.get(0))), NumberFormatUtil.numberFormat(String.valueOf(bgAnalyseResult.d_g_fitting_c_e.get(1))), NumberFormatUtil.numberFormat(String.valueOf(bgAnalyseResult.d_g_fitting_c_e.get(2)))));
if (bgAnalyseResult.d_g_fitting_c_e != null && CollectionUtils.isNotEmpty(bgAnalyseResult.d_g_fitting_c_e)) {
if (bgAnalyseResult.d_g_fitting_c_e.size() > 0) {
out.append(rowFormat("energy to channel equation: CH(x) = (%s)+(%s)*x+(%s)x*x", NumberFormatUtil.numberFormat(String.valueOf(bgAnalyseResult.d_g_fitting_c_e.get(0))), NumberFormatUtil.numberFormat(String.valueOf(bgAnalyseResult.d_g_fitting_c_e.get(1))), NumberFormatUtil.numberFormat(String.valueOf(bgAnalyseResult.d_g_fitting_c_e.get(2)))));
} else {
out.append(rowFormat("energy to channel equation: CH(x) = (%s)+(%s)*x+(%s)x*x", "?", "?", "?"));
}
} else {
out.append(rowFormat("energy to channel equation: CH(x) = (%s)+(%s)*x+(%s)x*x", "?", "?", "?"));
}
//换行
out.append(System.lineSeparator());
//第四十五行数据
out.append(rowFormat("channel to energy equation: E(x) = (%s)+(%s)*x+(%s)x*x", NumberFormatUtil.numberFormat(String.valueOf(bgAnalyseResult.d_g_fitting_e_c.get(0))), NumberFormatUtil.numberFormat(String.valueOf(bgAnalyseResult.d_g_fitting_e_c.get(1))), NumberFormatUtil.numberFormat(String.valueOf(bgAnalyseResult.d_g_fitting_e_c.get(2)))));
if (bgAnalyseResult.d_g_fitting_e_c != null && CollectionUtils.isNotEmpty(bgAnalyseResult.d_g_fitting_e_c)) {
if (bgAnalyseResult.d_g_fitting_e_c.size() > 0) {
out.append(rowFormat("channel to energy equation: E(x) = (%s)+(%s)*x+(%s)x*x", NumberFormatUtil.numberFormat(String.valueOf(bgAnalyseResult.d_g_fitting_e_c.get(0))), NumberFormatUtil.numberFormat(String.valueOf(bgAnalyseResult.d_g_fitting_e_c.get(1))), NumberFormatUtil.numberFormat(String.valueOf(bgAnalyseResult.d_g_fitting_e_c.get(2)))));
} else {
out.append(rowFormat("channel to energy equation: E(x) = (%s)+(%s)*x+(%s)x*x", "?", "?", "?"));
}
} else {
out.append(rowFormat("channel to energy equation: E(x) = (%s)+(%s)*x+(%s)x*x", "?", "?", "?"));
}
//换行
out.append(System.lineSeparator());
//换行
@ -1457,11 +1442,27 @@ public class PHDFileUtil extends AbstractLogOrReport {
//换行
out.append(System.lineSeparator());
//第五十一行数据
out.append(rowFormat("energy to channel equation: CH(x) = (%s)+(%s)*x+(%s)x*x", NumberFormatUtil.numberFormat(String.valueOf(bgAnalyseResult.g_b_fitting_c_e.get(0))), NumberFormatUtil.numberFormat(String.valueOf(bgAnalyseResult.g_b_fitting_c_e.get(1))), NumberFormatUtil.numberFormat(String.valueOf(bgAnalyseResult.g_b_fitting_c_e.get(2)))));
if (bgAnalyseResult.g_b_fitting_c_e != null && CollectionUtils.isNotEmpty(bgAnalyseResult.g_b_fitting_c_e)) {
if (bgAnalyseResult.g_b_fitting_c_e.size() > 0) {
out.append(rowFormat("energy to channel equation: CH(x) = (%s)+(%s)*x+(%s)x*x", NumberFormatUtil.numberFormat(String.valueOf(bgAnalyseResult.g_b_fitting_c_e.get(0))), NumberFormatUtil.numberFormat(String.valueOf(bgAnalyseResult.g_b_fitting_c_e.get(1))), NumberFormatUtil.numberFormat(String.valueOf(bgAnalyseResult.g_b_fitting_c_e.get(2)))));
} else {
out.append(rowFormat("energy to channel equation: CH(x) = (%s)+(%s)*x+(%s)x*x", "?", "?", "?"));
}
} else {
out.append(rowFormat("energy to channel equation: CH(x) = (%s)+(%s)*x+(%s)x*x", "?", "?", "?"));
}
//换行
out.append(System.lineSeparator());
//第五十二行数据
out.append(rowFormat("channel to energy equation: E(x) = (%s)+(%s)*x+(%s)x*x", NumberFormatUtil.numberFormat(String.valueOf(bgAnalyseResult.g_b_fitting_e_c.get(0))), NumberFormatUtil.numberFormat(String.valueOf(bgAnalyseResult.g_b_fitting_e_c.get(1))), NumberFormatUtil.numberFormat(String.valueOf(bgAnalyseResult.g_b_fitting_e_c.get(2)))));
if (bgAnalyseResult.g_b_fitting_e_c != null && CollectionUtils.isNotEmpty(bgAnalyseResult.g_b_fitting_e_c)) {
if (bgAnalyseResult.g_b_fitting_e_c.size() > 0) {
out.append(rowFormat("channel to energy equation: E(x) = (%s)+(%s)*x+(%s)x*x", NumberFormatUtil.numberFormat(String.valueOf(bgAnalyseResult.g_b_fitting_e_c.get(0))), NumberFormatUtil.numberFormat(String.valueOf(bgAnalyseResult.g_b_fitting_e_c.get(1))), NumberFormatUtil.numberFormat(String.valueOf(bgAnalyseResult.g_b_fitting_e_c.get(2)))));
} else {
out.append(rowFormat("channel to energy equation: E(x) = (%s)+(%s)*x+(%s)x*x", "?", "?", "?"));
}
} else {
out.append(rowFormat("channel to energy equation: E(x) = (%s)+(%s)*x+(%s)x*x", "?", "?", "?"));
}
//换行
out.append(System.lineSeparator());
//换行
@ -1471,11 +1472,27 @@ public class PHDFileUtil extends AbstractLogOrReport {
//换行
out.append(System.lineSeparator());
//第五十四行数据
out.append(rowFormat("energy to channel equation: CH(x) = (%s)+(%s)*x+(%s)x*x", NumberFormatUtil.numberFormat(String.valueOf(bgAnalyseResult.g_g_fitting_c_e.get(0))), NumberFormatUtil.numberFormat(String.valueOf(bgAnalyseResult.g_g_fitting_c_e.get(1))), NumberFormatUtil.numberFormat(String.valueOf(bgAnalyseResult.g_g_fitting_c_e.get(2)))));
if (bgAnalyseResult.g_g_fitting_c_e != null && CollectionUtils.isNotEmpty(bgAnalyseResult.g_g_fitting_c_e)) {
if (bgAnalyseResult.g_g_fitting_c_e.size() > 0) {
out.append(rowFormat("energy to channel equation: CH(x) = (%s)+(%s)*x+(%s)x*x", NumberFormatUtil.numberFormat(String.valueOf(bgAnalyseResult.g_g_fitting_c_e.get(0))), NumberFormatUtil.numberFormat(String.valueOf(bgAnalyseResult.g_g_fitting_c_e.get(1))), NumberFormatUtil.numberFormat(String.valueOf(bgAnalyseResult.g_g_fitting_c_e.get(2)))));
} else {
out.append(rowFormat("energy to channel equation: CH(x) = (%s)+(%s)*x+(%s)x*x", "?", "?", "?"));
}
} else {
out.append(rowFormat("energy to channel equation: CH(x) = (%s)+(%s)*x+(%s)x*x", "?", "?", "?"));
}
//换行
out.append(System.lineSeparator());
//第五十五行数据
out.append(rowFormat("channel to energy equation: E(x) = (%s)+(%s)*x+(%s)x*x", NumberFormatUtil.numberFormat(String.valueOf(bgAnalyseResult.g_g_fitting_e_c.get(0))), NumberFormatUtil.numberFormat(String.valueOf(bgAnalyseResult.g_g_fitting_e_c.get(1))), NumberFormatUtil.numberFormat(String.valueOf(bgAnalyseResult.g_g_fitting_e_c.get(2)))));
if (bgAnalyseResult.g_g_fitting_e_c != null && CollectionUtils.isNotEmpty(bgAnalyseResult.g_g_fitting_e_c)) {
if (bgAnalyseResult.g_g_fitting_e_c.size() > 0) {
out.append(rowFormat("channel to energy equation: E(x) = (%s)+(%s)*x+(%s)x*x", NumberFormatUtil.numberFormat(String.valueOf(bgAnalyseResult.g_g_fitting_e_c.get(0))), NumberFormatUtil.numberFormat(String.valueOf(bgAnalyseResult.g_g_fitting_e_c.get(1))), NumberFormatUtil.numberFormat(String.valueOf(bgAnalyseResult.g_g_fitting_e_c.get(2)))));
} else {
out.append(rowFormat("channel to energy equation: E(x) = (%s)+(%s)*x+(%s)x*x", "?", "?", "?"));
}
} else {
out.append(rowFormat("channel to energy equation: E(x) = (%s)+(%s)*x+(%s)x*x", "?", "?", "?"));
}
//换行
out.append(System.lineSeparator());
//换行

View File

@ -123,14 +123,14 @@ public class SpectrumAnalysesController {
@GetMapping("viewGammaDetectorCalibration")
@ApiOperation(value = "查询GammaDetectorCalibration数据", notes = "查询GammaDetectorCalibration数据")
public Result viewGammaDetectorCalibration(Integer sampleId, String qcFileName, HttpServletRequest request) {
return spectrumAnalysisService.viewGammaDetectorCalibration(sampleId, qcFileName, request);
public Result viewGammaDetectorCalibration(Integer sampleId, String sampleFileName, String qcFileName, HttpServletRequest request) {
return spectrumAnalysisService.viewGammaDetectorCalibration(sampleId, sampleFileName, qcFileName, request);
}
@GetMapping("viewBetaDetectorCalibration")
@ApiOperation(value = "查询BetaDetectorCalibration数据", notes = "查询BetaDetectorCalibration数据")
public Result viewBetaDetectorCalibration(Integer sampleId, String qcFileName, HttpServletRequest request) {
return spectrumAnalysisService.viewBetaDetectorCalibration(sampleId, qcFileName, request);
public Result viewBetaDetectorCalibration(Integer sampleId, String sampleFileName, String qcFileName, HttpServletRequest request) {
return spectrumAnalysisService.viewBetaDetectorCalibration(sampleId, sampleFileName, qcFileName, request);
}
@GetMapping("viewExtrapolation")
@ -176,7 +176,7 @@ public class SpectrumAnalysesController {
@PostMapping("fitting")
@ApiOperation(value = "公式计算新的曲线", notes = "公式计算新的曲线")
public Result fitting(@RequestBody FittingBody fittingBody, HttpServletRequest request) {
return spectrumAnalysisService.fitting(fittingBody.getParamA(), fittingBody.getParamB(), fittingBody.getParamC(), fittingBody.getTempPoints(), fittingBody.getCount(), fittingBody.getQcFileName(), fittingBody.getTabName(), request);
return spectrumAnalysisService.fitting(fittingBody.getParamA(), fittingBody.getParamB(), fittingBody.getParamC(), fittingBody.getTempPoints(), fittingBody.getCount(), fittingBody.getSampleFileName(), fittingBody.getTabName(), request);
}
@GetMapping("getGammaGated")

View File

@ -22,7 +22,7 @@ public class BgDataAnlyseResultIn implements Serializable {
/**
* 标识本次操作是否进行过解析
*/
private boolean bProcessed;
private boolean savedAnalysisResult;
/**
* 分析员名称
@ -266,7 +266,7 @@ public class BgDataAnlyseResultIn implements Serializable {
public BgDataAnlyseResultIn() {
comment = "";
bProcessed = false;
savedAnalysisResult = false;
userName = "";
stationName = "";
dbName = "";

View File

@ -46,9 +46,9 @@ public interface ISpectrumAnalysisService {
void exportRLR(BetaRLR betaRLR, HttpServletResponse response);
Result viewGammaDetectorCalibration(Integer sampleId, String qcFileName, HttpServletRequest request);
Result viewGammaDetectorCalibration(Integer sampleId, String sampleFileName, String qcFileName, HttpServletRequest request);
Result viewBetaDetectorCalibration(Integer sampleId, String qcFileName, HttpServletRequest request);
Result viewBetaDetectorCalibration(Integer sampleId, String sampleFileName, String qcFileName, HttpServletRequest request);
Result viewExtrapolation(Integer sampleId, String sampleFileName, HttpServletRequest request);
@ -64,7 +64,7 @@ public interface ISpectrumAnalysisService {
Result statisticsQueryBtn(Integer detectorId, String detectorName, Integer stationId, String statisticsType, Date startTime, Date endTime);
Result fitting(Double paramA, Double paramB, Double paramC, List<SeriseData> tempPointsArray, Integer count, String qcFileName, String tabName, HttpServletRequest request);
Result fitting(Double paramA, Double paramB, Double paramC, List<SeriseData> tempPointsArray, Integer count, String sampleFileName, String tabName, HttpServletRequest request);
Result getGammaGated(Integer chartHeight, Integer channelWidth, Integer gammaChannel, Integer sampleId, String qcFileName, HttpServletRequest request);

View File

@ -85,10 +85,14 @@ public class GammaServiceImpl extends AbstractLogOrReport implements IGammaServi
@Autowired
private FileFtransitUtil fileFtransitUtil;
@Autowired
private NameStandUtil nameStandUtil;
@Autowired
private DBUtil dbUtil;
@Autowired
private UserTaskUtil userTaskUtil;
@Autowired
private RedisUtil redisUtil;
@Autowired
private ISysDefaultNuclideSpectrumService defaultNuclideSpectrumService;
@Autowired
private ISysUserColorService sysUserColorService;
@ -138,12 +142,12 @@ public class GammaServiceImpl extends AbstractLogOrReport implements IGammaServi
@Override
public Result initValue(Integer sampleId, String dbName, String samfileName, HttpServletRequest request) {
Result result = new Result();
//
//获取用户名
String userName = JwtUtil.getUserNameByToken(request);
Cache<String, PHDFile> phdCache = localCache.getPHDCache();
PHDFile phd = new PHDFile();
// 读取文件内容
String lastName = "";
String key = "";
if (Objects.nonNull(sampleId) && StringUtils.isNotBlank(dbName)) {
// 根据sampleId获取sample文件路径
String sampleFilePath = spectrumAnalysisMapper.getSampleFilePath(sampleId);
@ -151,30 +155,56 @@ public class GammaServiceImpl extends AbstractLogOrReport implements IGammaServi
result.error500("Sample file does not exist!");
return result;
}
String pathName = StringPool.SLASH + spectrumPathProperties.getSaveFilePath() + StringPool.SLASH + sampleFilePath.substring(0, sampleFilePath.lastIndexOf(StringPool.SLASH));
String pathName = ftpUtil.getFtpRootPath() + spectrumPathProperties.getSaveFilePath() + StringPool.SLASH + sampleFilePath.substring(0, sampleFilePath.lastIndexOf(StringPool.SLASH));
String fileName = sampleFilePath.substring(sampleFilePath.lastIndexOf(StringPool.SLASH) + 1);
boolean flag = gammaFileUtil.loadFile(pathName, fileName, phd, result);
if (!flag) {
return result;
}
// 声明基础数组信息
gammaFileUtil.SetBaseInfo(phd);
if (dbName.equals("auto")) {
gammaFileUtil.SetBaseInfo(phd, "RNAUTO");
} else if (dbName.equals("man")) {
gammaFileUtil.SetBaseInfo(phd, userName);
}
// 从数据库中读取相关信息
boolean bRet = getResultFromDB(dbName, userName, sampleId, phd, result);
if (!redisUtil.hasKey(fileName+"-"+userName)) {
// 查询当前用户关联的核素信息
List<String> userLib = new LinkedList<>();
// 从postgreSql中获取当前用户关注的核素信息 如果当前用户没有 则返回管理员的
userLib = defaultNuclideSpectrumService.findNuclidesByUserName(userName, phd.getHeader().getSystem_type().toUpperCase());
if (CollectionUtils.isEmpty(userLib)) {
userLib = defaultNuclideSpectrumService.findNuclidesByUserName("admin", phd.getHeader().getSystem_type().toUpperCase());
}
Map<String, NuclideLines> nuclideMap = GetNuclideLines(userLib);
redisUtil.set(fileName+"-"+userName, nuclideMap);
}
if (!bRet) {
return result;
}
lastName = fileName;
key = fileName + "-" + userName;
} else {
String pathName = StringPool.SLASH + spectrumPathProperties.getUploadPath() + StringPool.SLASH + userName;
String pathName = ftpUtil.getFtpRootPath() + spectrumPathProperties.getUploadPath() + StringPool.SLASH + userName;
String fileName = samfileName;
boolean flag = gammaFileUtil.loadFile(pathName, fileName, phd, result);
if (!redisUtil.hasKey(fileName+"-"+userName)) {
// 查询当前用户关联的核素信息
List<String> userLib = new LinkedList<>();
// 从postgreSql中获取当前用户关注的核素信息 如果当前用户没有 则返回管理员的
userLib = defaultNuclideSpectrumService.findNuclidesByUserName(userName, phd.getHeader().getSystem_type().toUpperCase());
if (CollectionUtils.isEmpty(userLib)) {
userLib = defaultNuclideSpectrumService.findNuclidesByUserName("admin", phd.getHeader().getSystem_type().toUpperCase());
}
Map<String, NuclideLines> nuclideMap = GetNuclideLines(userLib);
redisUtil.set(fileName+"-"+userName, nuclideMap);
}
if (!flag) {
return result;
}
lastName = fileName;
key = fileName + "-" + userName;
}
phdCache.put(lastName, phd);
phdCache.put(key, phd);
localCache.setPHDCache(phdCache);
result.setSuccess(true);
result.setResult(phd);
@ -194,16 +224,9 @@ public class GammaServiceImpl extends AbstractLogOrReport implements IGammaServi
result.error500("Please select the parse file first");
return result;
}
// 查询当前用户关联的核素信息
List<String> nuclides = new LinkedList<>();
// 从postgreSql中获取当前用户关注的核素信息 如果当前用户没有 则返回管理员的
nuclides = defaultNuclideSpectrumService.findNuclidesByUserName(userName, systemType);
if (CollectionUtils.isEmpty(nuclides)) {
nuclides = defaultNuclideSpectrumService.findNuclidesByUserName("admin", systemType);
}
Map<String, NuclideLines> nuclideLinesMap = GetNuclideLines(nuclides);
Map<String, NuclideLines> nuclideLinesMap = (Map<String, NuclideLines>) redisUtil.get(fileName+"-"+userName);//GetNuclideLines(nuclides);
// 解析获取临时文件信息
File tmpFile = gammaFileUtil.analyzeFile(StringPool.SLASH + spectrumPathProperties.getUploadPath() + StringPool.SLASH + userName, fileName);
File tmpFile = gammaFileUtil.analyzeFile(ftpUtil.getFtpRootPath() + spectrumPathProperties.getUploadPath() + StringPool.SLASH + userName, fileName);
ObjectMapper mapper = new ObjectMapper();
try {
String phdStr = mapper.writeValueAsString(phd);
@ -375,16 +398,16 @@ public class GammaServiceImpl extends AbstractLogOrReport implements IGammaServi
ParameterInfo value = JSON.parseObject(JSON.toJSONString(entry.getValue()), ParameterInfo.class);
phd.setUsedTotEPara(value);
}
if (entry.getKey().equalsIgnoreCase("mapNucActMda")) {
HashMap<String, Object> jsonMap = JSON.parseObject(JSON.toJSONString(entry.getValue()), HashMap.class);
Map<String, NuclideActMda> value = new HashMap<>();
for (Map.Entry<String, Object> objectEntry : jsonMap.entrySet()) {
String key = objectEntry.getKey();
NuclideActMda entryValue = JSON.parseObject(JSON.toJSONString(objectEntry.getValue()), NuclideActMda.class);
value.put(key, entryValue);
}
phd.setMapNucActMda(value);
}
// if (entry.getKey().equalsIgnoreCase("mapNucActMda")) {
// HashMap<String, Object> jsonMap = JSON.parseObject(JSON.toJSONString(entry.getValue()), HashMap.class);
// Map<String, NuclideActMda> value = new HashMap<>();
// for (Map.Entry<String, Object> objectEntry : jsonMap.entrySet()) {
// String key = objectEntry.getKey();
// NuclideActMda entryValue = JSON.parseObject(JSON.toJSONString(objectEntry.getValue()), NuclideActMda.class);
// value.put(key, entryValue);
// }
// phd.setMapNucActMda(value);
// }
}
BeanUtils.copyProperties(phd.getSetting(), phd.getUsedSetting());
@ -396,6 +419,8 @@ public class GammaServiceImpl extends AbstractLogOrReport implements IGammaServi
peak.recoilDeltaChan = "1";
}
}
// 重新分析各峰值对应的核素信息
gammaFileUtil.NuclidesIdent(phd, nuclideLinesMap);
gammaFileUtil.RunQC(phd);
result.setResult(phd);
} catch (JsonProcessingException e) {
@ -425,7 +450,7 @@ public class GammaServiceImpl extends AbstractLogOrReport implements IGammaServi
return result;
}
// 切割数据库存储的文件路径获取路径信息
String pathName = StringPool.SLASH + spectrumPathProperties.getSaveFilePath() + StringPool.SLASH + sampleFilePath.substring(0, sampleFilePath.lastIndexOf(StringPool.SLASH));
String pathName = ftpUtil.getFtpRootPath() + spectrumPathProperties.getSaveFilePath() + StringPool.SLASH + sampleFilePath.substring(0, sampleFilePath.lastIndexOf(StringPool.SLASH));
// 切割数据库存储的文件路径获取文件名称
String fileName = sampleFilePath.substring(sampleFilePath.lastIndexOf(StringPool.SLASH) + 1);
// 声明phd实体类
@ -440,9 +465,24 @@ public class GammaServiceImpl extends AbstractLogOrReport implements IGammaServi
return result;
}
// 加载phd数据所需的lcscacbaseline数据
gammaFileUtil.SetBaseInfo(phd);
if (dbName.equals("auto")) {
gammaFileUtil.SetBaseInfo(phd, "RNAUTO");
} else if (dbName.equals("man")) {
gammaFileUtil.SetBaseInfo(phd, userName);
}
// 从数据库中读取phd其他相关信息
boolean bRet = getResultFromDB(dbName, userName, sampleId, phd, result);
if (!redisUtil.hasKey(fileName+"-"+userName)) {
// 查询当前用户关联的核素信息
List<String> userLib = new LinkedList<>();
// 从postgreSql中获取当前用户关注的核素信息 如果当前用户没有 则返回管理员的
userLib = defaultNuclideSpectrumService.findNuclidesByUserName(userName, phd.getHeader().getSystem_type().toUpperCase());
if (CollectionUtils.isEmpty(userLib)) {
userLib = defaultNuclideSpectrumService.findNuclidesByUserName("admin", phd.getHeader().getSystem_type().toUpperCase());
}
Map<String, NuclideLines> nuclideMap = GetNuclideLines(userLib);
redisUtil.set(fileName+"-"+userName, nuclideMap);
}
// 判断数据库信息是否读取正常
if (!bRet) {
return result;
@ -454,7 +494,7 @@ public class GammaServiceImpl extends AbstractLogOrReport implements IGammaServi
List<String> qcstate = gammaFileUtil.Qcstate(phd);
map.put("QCFlag", qcstate);
// 更新 Detailed Information 信息
List<String> detailedInfo = gammaFileUtil.DetailedInfo(sampleId, phd);
List<String> detailedInfo = gammaFileUtil.DetailedInfo(String.valueOf(sampleId), phd);
map.put("DetailedInformation", detailedInfo);
// 更新 ACQ 模块中各时间信息
map.put("start_time", phd.getAcq().getAcquisition_start_date() + "\n" + phd.getAcq().getAcquisition_start_time());
@ -465,6 +505,9 @@ public class GammaServiceImpl extends AbstractLogOrReport implements IGammaServi
map.put("checkBox_updateCal", phd.getSetting().isBUpdateCal());
map.put("bAnalyed", phd.isBAnalyed());
map.put("peak", phd.getVPeak());
map.put("BaseCtrls", phd.getBaseCtrls());
List<SeriseData> differance = gammaFileUtil.Differance(phd, phd.getVPeak());
map.put("barChart", differance);
// 更新页面折线图信息
gammaFileUtil.UpdateChart(phd, map, colorMap);
// 将当前加载的phd信息加入到缓存中 文件名称作为缓存信息的key
@ -477,7 +520,14 @@ public class GammaServiceImpl extends AbstractLogOrReport implements IGammaServi
public boolean getResultFromDB(String dbName, String userName, Integer sampleId, PHDFile phd, Result result) {
// 判断连接的数据库
String T_analy, T_calib, T_peaks, T_param, T_nuc_line, T_nuc_act, T_qc, T_setting;
String T_analy = "";
String T_calib = "";
String T_peaks = "";
String T_param = "";
String T_nuc_line = "";
String T_nuc_act = "";
String T_qc = "";
String T_setting = "";
if (dbName.equals("auto")) {
T_analy = "RNAUTO.GARDS_ANALYSES";
T_calib = "RNAUTO.GARDS_CALIBRATION_PAIRS";
@ -487,7 +537,7 @@ public class GammaServiceImpl extends AbstractLogOrReport implements IGammaServi
T_nuc_act = "RNAUTO.GARDS_NUCL_IDED";
T_qc = "RNAUTO.GARDS_QC_CHECK";
userName = "RNAUTO";
} else {
} else if (dbName.equals("man")) {
T_analy = "RNMAN.GARDS_ANALYSES";
T_calib = "RNMAN.GARDS_CALIBRATION_PAIRS";
T_peaks = "RNMAN.GARDS_PEAKS";
@ -776,7 +826,7 @@ public class GammaServiceImpl extends AbstractLogOrReport implements IGammaServi
Map<String, Object> map = new HashMap<>();
Cache<String, PHDFile> phdCache = localCache.getPHDCache();
// 上传文件路径
String path = StringPool.SLASH + spectrumPathProperties.getUploadPath() + StringPool.SLASH + userName;
String path = ftpUtil.getFtpRootPath() + spectrumPathProperties.getUploadPath() + StringPool.SLASH + userName;
// 获取当前角色的颜色配置
Map<String, String> colorMap = sysUserColorService.initColor(userName);
PHDFile phd = phdCache.getIfPresent(fileName + "-" + userName);
@ -787,12 +837,23 @@ public class GammaServiceImpl extends AbstractLogOrReport implements IGammaServi
if (!bRet) {
return result;
}
if (!redisUtil.hasKey(fileName+"-"+userName)) {
// 查询当前用户关联的核素信息
List<String> userLib = new LinkedList<>();
// 从postgreSql中获取当前用户关注的核素信息 如果当前用户没有 则返回管理员的
userLib = defaultNuclideSpectrumService.findNuclidesByUserName(userName, phd.getHeader().getSystem_type().toUpperCase());
if (CollectionUtils.isEmpty(userLib)) {
userLib = defaultNuclideSpectrumService.findNuclidesByUserName("admin", phd.getHeader().getSystem_type().toUpperCase());
}
Map<String, NuclideLines> nuclideMap = GetNuclideLines(userLib);
redisUtil.set(fileName+"-"+userName, nuclideMap);
}
}
// 更新 QC Flags 状态
List<String> qcstate = gammaFileUtil.Qcstate(phd);
map.put("QCFlag", qcstate);
// 更新 Detailed Information 信息
List<String> detailedInfo = gammaFileUtil.DetailedInfo(null, phd);
List<String> detailedInfo = gammaFileUtil.DetailedInfo(phd.getId_sample(), phd);
map.put("DetailedInformation", detailedInfo);
// 更新 ACQ 模块中各时间信息
map.put("start_time", phd.getAcq().getAcquisition_start_date() + "\n" + phd.getAcq().getAcquisition_start_time());
@ -803,6 +864,9 @@ public class GammaServiceImpl extends AbstractLogOrReport implements IGammaServi
map.put("checkBox_updateCal", phd.getSetting().isBUpdateCal());
map.put("bAnalyed", phd.isBAnalyed());
map.put("peak", phd.getVPeak());
map.put("BaseCtrls", phd.getBaseCtrls());
List<SeriseData> differance = gammaFileUtil.Differance(phd, phd.getVPeak());
map.put("barChart", differance);
gammaFileUtil.UpdateChart(phd, map, colorMap);
phdCache.put(fileName + "-" + userName, phd);
localCache.setPHDCache(phdCache);
@ -818,6 +882,7 @@ public class GammaServiceImpl extends AbstractLogOrReport implements IGammaServi
String key = fileName + StrUtil.DASHED + username;
// 删除指定key的Cache
localCache.deletePHDCache(key);
redisUtil.del(key);
}
@Override
@ -972,13 +1037,13 @@ public class GammaServiceImpl extends AbstractLogOrReport implements IGammaServi
phd.setXmlFilePath(parameterProperties.getFilePath());
// 获取当前角色的颜色配置
Map<String, String> colorMap = sysUserColorService.initColor(userName);
// 查询当前用户关联的核素信息
List<String> nuclides = new LinkedList<>();
// 从postgreSql中获取当前用户关注的核素信息 如果当前用户没有 则返回管理员的
nuclides = defaultNuclideSpectrumService.findNuclidesByUserName(userName, phd.getHeader().getSystem_type().toUpperCase());
if (CollectionUtils.isEmpty(nuclides)) {
nuclides = defaultNuclideSpectrumService.findNuclidesByUserName("admin", phd.getHeader().getSystem_type().toUpperCase());
}
// // 查询当前用户关联的核素信息
// List<String> nuclides = new LinkedList<>();
// // 从postgreSql中获取当前用户关注的核素信息 如果当前用户没有 则返回管理员的
// nuclides = defaultNuclideSpectrumService.findNuclidesByUserName(userName, phd.getHeader().getSystem_type().toUpperCase());
// if (CollectionUtils.isEmpty(nuclides)) {
// nuclides = defaultNuclideSpectrumService.findNuclidesByUserName("admin", phd.getHeader().getSystem_type().toUpperCase());
// }
// 分析文件数据
int flag = gammaFileUtil.AnalyseData(phd);
if (flag == 0) {
@ -987,7 +1052,7 @@ public class GammaServiceImpl extends AbstractLogOrReport implements IGammaServi
"2. You didn't change any setting or calibration.";
result.error500(warning);
} else if (flag == -1) {
Map<String, NuclideLines> nuclideLinesMap = GetNuclideLines(nuclides);
Map<String, NuclideLines> nuclideLinesMap = (Map<String, NuclideLines>) redisUtil.get(fileName+"-"+userName);//GetNuclideLines(nuclides);
gammaFileUtil.NuclidesIdent(phd, nuclideLinesMap);
gammaFileUtil.RunQC(phd);
String warning = "Finish three tasks:\n" +
@ -996,7 +1061,7 @@ public class GammaServiceImpl extends AbstractLogOrReport implements IGammaServi
"\t3.Test QC again.";
result.error500(warning);
} else {
Map<String, NuclideLines> nuclideLinesMap = GetNuclideLines(nuclides);
Map<String, NuclideLines> nuclideLinesMap = (Map<String, NuclideLines>) redisUtil.get(fileName+"-"+userName);//GetNuclideLines(nuclides);
gammaFileUtil.AnalyseSpectrum(phd, nuclideLinesMap);
// 重新分析各峰值对应的核素信息
// gammaFileUtil.NuclidesIdent(phd, nuclideLinesMap);
@ -1007,6 +1072,10 @@ public class GammaServiceImpl extends AbstractLogOrReport implements IGammaServi
map.put("QCFlag", qcstate);
map.put("bAnalyed", phd.isBAnalyed());
map.put("peak", phd.getVPeak());
map.put("BaseCtrls", phd.getBaseCtrls());
// Bar Chart 柱状图
List<SeriseData> differance = gammaFileUtil.Differance(phd, phd.getVPeak());
map.put("barChart", differance);
result.setSuccess(true);
result.setResult(map);
}
@ -1073,16 +1142,16 @@ public class GammaServiceImpl extends AbstractLogOrReport implements IGammaServi
if (m_nSChan == 0) {
m_vCount.add(0L);
}
ChartData channelCountChart = gammaFileUtil.Channel_Count(m_vCount, m_nCount, colorMap.get("Color_Spec"));
ChartData channelBaseLineChart = gammaFileUtil.Channel_BaseLine(phd, m_nCount, colorMap.get("Color_Base"));
List<ChartData> channelPeakChart = gammaFileUtil.Channel_Peak(phd, m_nCount, colorMap.get("Color_Peak"));
ChartData channelCountChart = gammaFileUtil.Channel_Count(phd, colorMap.get("Color_Spec"));
ChartData channelBaseLineChart = gammaFileUtil.Channel_BaseLine(phd, colorMap.get("Color_Base"));
List<ChartData> channelPeakChart = gammaFileUtil.Channel_Peak(phd, colorMap.get("Color_Peak"));
List<ShapeData> channelBaseCPChart = gammaFileUtil.Channel_BaseCP(phd);
map.put("channelCountChart", channelCountChart);
map.put("channelBaseLineChart", channelBaseLineChart);
map.put("channelPeakChart", channelPeakChart);
map.put("channelBaseCPChart", channelBaseCPChart);
// Bar Chart 柱状图
List<SeriseData> differance = gammaFileUtil.Differance(phd, phd.getVPeak(), m_vCount, m_nCount);
List<SeriseData> differance = gammaFileUtil.Differance(phd, phd.getVPeak());
map.put("barChart", differance);
// 赋值energy
map.put("energy", phd.getVEnergy());
@ -1370,11 +1439,11 @@ public class GammaServiceImpl extends AbstractLogOrReport implements IGammaServi
List<PeakInfo> vPeak = gammaFileUtil.InitPeakTable(phd.getVPeak());
map.put("table", vPeak);
List<ChartData> channelPeak = gammaFileUtil.Channel_Peak(phd, m_nCount, colorMap.get("Color_Peak"));
List<ChartData> channelPeak = gammaFileUtil.Channel_Peak(phd, colorMap.get("Color_Peak"));
map.put("channelPeakChart", channelPeak);
ChartData channelBaseLine = gammaFileUtil.Channel_BaseLine(phd, m_nCount, colorMap.get("Color_Base"));
ChartData channelBaseLine = gammaFileUtil.Channel_BaseLine(phd, colorMap.get("Color_Base"));
map.put("channelBaseLineChart", channelBaseLine);
List<SeriseData> differance = gammaFileUtil.Differance(phd, phd.getVPeak(), m_vCount, m_nCount);
List<SeriseData> differance = gammaFileUtil.Differance(phd, phd.getVPeak());
map.put("barChart", differance);
gammaFileUtil.UpdateChart(phd, map, colorMap);
} else {// 如果不保留 根据下标移除对应的vPeak数据
@ -1382,7 +1451,7 @@ public class GammaServiceImpl extends AbstractLogOrReport implements IGammaServi
phd.getVPeak().clear();
phd.setVPeak(oldPeak);
map.put("table", phd.getVPeak());
List<ChartData> channelPeak = gammaFileUtil.Channel_Peak(phd, m_nCount, colorMap.get("Color_Peak"));
List<ChartData> channelPeak = gammaFileUtil.Channel_Peak(phd, colorMap.get("Color_Peak"));
map.put("channelPeakChart", channelPeak);
}
}
@ -1397,24 +1466,64 @@ public class GammaServiceImpl extends AbstractLogOrReport implements IGammaServi
@Override
public Result deletePeak(String fileName, int curRow, HttpServletRequest request) {
Result result = new Result();
//获取用户名称
String userName = JwtUtil.getUserNameByToken(request);
//获取缓存的核素信息
Map<String, NuclideLines> nuclideMap = (Map<String, NuclideLines>) redisUtil.get(fileName + "-" + userName);
//获取PHDFile的缓存信息
Cache<String, PHDFile> phdCache = localCache.getPHDCache();
PHDFile phd = phdCache.getIfPresent(fileName + "-" + userName);
if (Objects.isNull(phd)) {
result.error500("Please select the parse file first");
return result;
}
//获取颜色信息
Map<String, String> colorMap = sysUserColorService.initColor(userName);
long m_nCount = phd.getSpec().getNum_g_channel();
//声明一个结果的map
HashMap<String, Object> map = new HashMap<>();
//获取峰的大小
int peakNum = phd.getVPeak().size();
if (peakNum < 1) {
result.error500("No peak to delete.");
return result;
}
//判断当前要操作的下标是否在范围内
if (curRow >= 0 && curRow < peakNum) {
//获取当前下标位置的峰值信息
PeakInfo info = phd.getVPeak().get(curRow);
//获取当前选中的峰值信息的能量值
double energy = info.energy;
if (CollectionUtils.isNotEmpty(info.nuclides)) {
//遍历核素信息
for (int i=0; i<info.nuclides.size(); i++) {
String nuclideName = info.nuclides.get(i);
//从缓存信息中获取核素名称
NuclideLines nuclideLines = nuclideMap.get(nuclideName);
//获取最大活度的核素位置
int maxYeildIdx = nuclideLines.maxYeildIdx;
//获取最大活度对应的核素能量值
Double maxEnergy = nuclideLines.getVenergy().get(maxYeildIdx);
//判断当前选中的峰值信息的能量值 是否在 最大活度对应的核素能量值公差范围内
if (energy >= maxEnergy-0.5 && energy <= maxEnergy+0.5) {
//则需要删除所有关联的核素信息并 从MapNucAct中移除相关核素内容
for (PeakInfo peakInfo: phd.getVPeak()) {
//如果峰的核素名称中包含当前删除的核素
if (peakInfo.nuclides.contains(nuclideName)) {
peakInfo.nuclides.remove(nuclideName);
}
}
//从核素相关map中移除核素信息
phd.getMapNucActMda().remove(nuclideName);
//移除核素信息
nuclideMap.remove(nuclideName);
}
}
}
//将当前行从峰数组中移除
phd.getVPeak().remove(curRow);
//重新计算核素活度浓度
gammaFileUtil.NuclidesIdent(phd, nuclideMap);
//重新分析数据
gammaFileUtil.PeaksChanged(phd);
for (int i = 0; i < phd.getVPeak().size(); i++) {
PeakInfo peakInfo = phd.getVPeak().get(i);
@ -1422,7 +1531,7 @@ public class GammaServiceImpl extends AbstractLogOrReport implements IGammaServi
}
List<PeakInfo> vPeak = gammaFileUtil.InitPeakTable(phd.getVPeak());
map.put("table", vPeak);
List<ChartData> channelPeak = gammaFileUtil.Channel_Peak(phd, m_nCount, colorMap.get("Color_Peak"));
List<ChartData> channelPeak = gammaFileUtil.Channel_Peak(phd, colorMap.get("Color_Peak"));
map.put("channelPeakChart", channelPeak);
gammaFileUtil.UpdateChart(phd, map, colorMap);
}
@ -1542,14 +1651,14 @@ public class GammaServiceImpl extends AbstractLogOrReport implements IGammaServi
// 根据要进行修改的列的数据下标 操作Vpeak数据
phd.getVPeak().get(curRow).nuclides.add(nuclideName);
// 查询当前用户所关心的核素名称
// 查询当前用户关联的核素信息
List<String> userLib = new LinkedList<>();
// 从postgreSql中获取当前用户关注的核素信息 如果当前用户没有 则返回管理员的
userLib = defaultNuclideSpectrumService.findNuclidesByUserName(userName, phd.getHeader().getSystem_type().toUpperCase());
if (CollectionUtils.isEmpty(userLib)) {
userLib = defaultNuclideSpectrumService.findNuclidesByUserName("admin", phd.getHeader().getSystem_type().toUpperCase());
}
Map<String, NuclideLines> mapNucLines = GetNuclideLines(userLib);
// // 查询当前用户关联的核素信息
// List<String> userLib = new LinkedList<>();
// // 从postgreSql中获取当前用户关注的核素信息 如果当前用户没有 则返回管理员的
// userLib = defaultNuclideSpectrumService.findNuclidesByUserName(userName, phd.getHeader().getSystem_type().toUpperCase());
// if (CollectionUtils.isEmpty(userLib)) {
// userLib = defaultNuclideSpectrumService.findNuclidesByUserName("admin", phd.getHeader().getSystem_type().toUpperCase());
// }
Map<String, NuclideLines> mapNucLines = (Map<String, NuclideLines>) redisUtil.get(fileName+"-"+userName);//GetNuclideLines(userLib);
// 查询出核素信息
NuclideLines it_line = mapNucLines.get(nuclideName);
// 如果核素信息不存在返回
@ -1576,30 +1685,65 @@ public class GammaServiceImpl extends AbstractLogOrReport implements IGammaServi
@Override
public Result deleteNuclide(Integer curRow, String nuclideName, String fileName, List<String> list_identify, HttpServletRequest request) {
Result result = new Result();
//获取用户名
String userName = JwtUtil.getUserNameByToken(request);
Map<String, Object> map = new HashMap<>();
Map<String, NuclideLines> nuclideMap = (Map<String, NuclideLines>) redisUtil.get(fileName + "-" + userName);
//缓存中获取缓存信息
Cache<String, PHDFile> phdCache = localCache.getPHDCache();
PHDFile phd = phdCache.getIfPresent(fileName + "-" + userName);
if (Objects.isNull(phd)) {
result.error500("Please select the parse file first");
return result;
}
//判断当前选择的核素名称是否包含在当前Peak的核素列表中
int index = list_identify.indexOf(nuclideName);
if (index >= 0) {
// 如果所选的行下标小于0 或者 超出界限 则不进行处理
if (curRow < 0 || curRow >= phd.getVPeak().size()) {
return result;
}
// 更新峰信息列表和表格
// 根据核素名称获取对应的下标并从list_identifyphd.getVPeak()移除
list_identify.remove(index);
int peakNuclIndex = phd.getVPeak().get(curRow).nuclides.indexOf(nuclideName);
phd.getVPeak().get(curRow).nuclides.remove(peakNuclIndex);
List<PeakInfo> vPeak = gammaFileUtil.InitPeakTable(phd.getVPeak());
// 处理核素MDAMDC
gammaFileUtil.ReCalcMdaMdc(phd, nuclideName, curRow + 1);
map.put("identify", list_identify);
map.put("table", vPeak);
//从缓存信息中获取核素名称
NuclideLines nuclideLines = nuclideMap.get(nuclideName);
//获取最大活度的核素位置
int maxYeildIdx = nuclideLines.maxYeildIdx;
//获取最大活度对应的核素能量值
Double maxEnergy = nuclideLines.getVenergy().get(maxYeildIdx);
//获取当前选中的峰值信息的能量值
double energy = phd.getVPeak().get(curRow).energy;
//判断当前选中的峰值信息的能量值 是否在 最大活度对应的核素能量值公差范围内
if (energy >= maxEnergy-0.5 && energy <= maxEnergy+0.5) {
//则需要删除所有关联的核素信息并 从MapNucAct中移除相关核素内容
for (PeakInfo peakInfo: phd.getVPeak()) {
//如果峰的核素名称中包含当前删除的核素
if (peakInfo.nuclides.contains(nuclideName)) {
peakInfo.nuclides.remove(nuclideName);
}
}
//从核素相关map中移除核素信息
phd.getMapNucActMda().remove(nuclideName);
//移除核素信息
nuclideMap.remove(nuclideName);
//重新计算核素活度浓度
gammaFileUtil.NuclidesIdent(phd, nuclideMap);
//从核素的选中列表中移除对应下标的核素信息
list_identify.remove(index);
//重新初始化峰列表信息
List<PeakInfo> vPeak = gammaFileUtil.InitPeakTable(phd.getVPeak());
map.put("identify", list_identify);
map.put("table", vPeak);
} else {
// 更新峰信息列表和表格
// 根据核素名称获取对应的下标并从list_identifyphd.getVPeak()移除
list_identify.remove(index);
int peakNuclIndex = phd.getVPeak().get(curRow).nuclides.indexOf(nuclideName);
phd.getVPeak().get(curRow).nuclides.remove(peakNuclIndex);
List<PeakInfo> vPeak = gammaFileUtil.InitPeakTable(phd.getVPeak());
// 处理核素MDAMDC
gammaFileUtil.ReCalcMdaMdc(phd, nuclideName, curRow + 1);
map.put("identify", list_identify);
map.put("table", vPeak);
}
result.setSuccess(true);
result.setResult(map);
}
@ -1739,6 +1883,28 @@ public class GammaServiceImpl extends AbstractLogOrReport implements IGammaServi
long span = phd.getSpec().getG_energy_span();
List<GardsNuclLinesLib> nuclideTableList = spectrumAnalysisMapper.getNuclideTable(name, span);
if (CollectionUtils.isNotEmpty(nuclideTableList)) {
nuclideTableList.stream().forEach(item-> {
if (Objects.nonNull(item.getEnergy())) {
item.setEnergy(Double.valueOf(String.format("%.3f", item.getEnergy())));
} else {
item.setEnergy(Double.valueOf(String.format("%.3f", 0.0)));
}
if (Objects.nonNull(item.getEnergyUncert())) {
item.setEnergyUncert(Double.valueOf(String.format("%.3f", item.getEnergyUncert())));
} else {
item.setEnergyUncert(Double.valueOf(String.format("%.3f", 0.0)));
}
if (Objects.nonNull(item.getYield())) {
item.setYield(Double.valueOf(String.format("%.3f", item.getYield())));
} else {
item.setYield(Double.valueOf(String.format("%.3f", 0.0)));
}
if (Objects.nonNull(item.getYieldUncert())) {
item.setYieldUncert(Double.valueOf(String.format("%.3f", item.getYieldUncert())));
} else {
item.setYieldUncert(Double.valueOf(String.format("%.3f", 0.0)));
}
});
map.put("table", nuclideTableList);
gammaFileUtil.InitChart(nuclideTableList, phd, map, colorMap);
}
@ -1909,30 +2075,17 @@ public class GammaServiceImpl extends AbstractLogOrReport implements IGammaServi
phd.setBaseCtrls(m_baseCtrl);
phd.setVBase(m_baseCtrl.getBaseline());
// }
List<Long> m_vCount = new LinkedList<>();
long m_nCount = phd.getSpec().getNum_g_channel();
long m_nSChan = phd.getSpec().getBegin_channel();
// 获取当前角色的颜色配置
Map<String, String> colorMap = sysUserColorService.initColor(userName);
// 确保绘制曲线时所有谱都是从1道开始
int i = 0;
if (m_nSChan == 0) {
i = 1;
}
for (; i < m_nCount; ++i) {
m_vCount.add(phd.getSpec().getCounts().get(i));
}
if (m_nSChan == 0) {
m_vCount.add(0L);
}
List<SeriseData> differance = gammaFileUtil.Differance(phd, phd.getVPeak(), m_vCount, m_nCount);
List<SeriseData> differance = gammaFileUtil.Differance(phd, phd.getVPeak());
map.put("barChart", differance);
ChartData channelBaseLine = gammaFileUtil.Channel_BaseLine(phd, m_nCount, colorMap.get("Color_Base"));
ChartData channelBaseLine = gammaFileUtil.Channel_BaseLine(phd, colorMap.get("Color_Base"));
map.put("channelBaseLineChart", channelBaseLine);
List<ChartData> peakSet = gammaFileUtil.PeakSet(phd.getVPeak(), phd.getVBase(), colorMap.get("Color_Peak"), m_nCount, null, false);
List<ChartData> peakSet = gammaFileUtil.PeakSet(phd.getVPeak(), phd.getVBase(), colorMap.get("Color_Peak"), phd.getSpec().getNum_g_channel(), null, false);
map.put("peakSet", peakSet);
List<ShapeData> shapeData = gammaFileUtil.CreateShapeCP(phd.getBaseCtrls());
map.put("shapeData", shapeData);
map.put("peak", phd.getVPeak());
// 更新主界面的 Chart
gammaFileUtil.UpdateChart(phd, map, colorMap);
result.setSuccess(true);
@ -3432,16 +3585,18 @@ public class GammaServiceImpl extends AbstractLogOrReport implements IGammaServi
result.error500("Please select the parse file first");
return result;
}
String temp = phd.getOriTotalCmt().trim();
if (StringUtils.isNotBlank(temp)) {
comments += "Comments From Original Spectrum:\n" + temp;
}
if (Objects.nonNull(sampleId)) {
CommentData commentData = spectrumAnalysisMapper.viewComment(sampleId);
if (Objects.nonNull(commentData)) {
temp = commentData.getComment() == null ? "" : commentData.getComment().trim();
if (StringUtils.isNotBlank(temp)) {
comments += "\n\nComments From " + commentData.getAnalyst() + "\n:" + temp;
if (StringUtils.isNotBlank(phd.getOriTotalCmt())) {
String temp = phd.getOriTotalCmt().trim();
if (StringUtils.isNotBlank(temp)) {
comments += "Comments From Original Spectrum:\n" + temp;
}
if (Objects.nonNull(sampleId)) {
CommentData commentData = spectrumAnalysisMapper.viewComment(sampleId);
if (Objects.nonNull(commentData)) {
temp = commentData.getComment() == null ? "" : commentData.getComment().trim();
if (StringUtils.isNotBlank(temp)) {
comments += "\n\nComments From " + commentData.getAnalyst() + "\n:" + temp;
}
}
}
}
@ -3517,7 +3672,7 @@ public class GammaServiceImpl extends AbstractLogOrReport implements IGammaServi
if (StringUtils.isBlank(reportPath)) {
throw new RuntimeException("The automatic handler generated report does not exist");
}
String pathName = StringPool.SLASH + spectrumPathProperties.getSaveFilePath() + StringPool.SLASH + reportPath.substring(0, reportPath.lastIndexOf(StringPool.SLASH));
String pathName = ftpUtil.getFtpRootPath() + spectrumPathProperties.getSaveFilePath() + StringPool.SLASH + reportPath.substring(0, reportPath.lastIndexOf(StringPool.SLASH));
String fileName = reportPath.substring(reportPath.lastIndexOf(StringPool.SLASH) + 1) + ".txt";
// 连接ftp
FTPClient ftpClient = ftpUtil.LoginFTP();
@ -3567,7 +3722,7 @@ public class GammaServiceImpl extends AbstractLogOrReport implements IGammaServi
public void exportARR(Integer sampleId, HttpServletResponse response) {
// 获取自动处理生成的报告地址
String reportPath = spectrumAnalysisMapper.viewARR(sampleId);
String pathName = StringPool.SLASH + spectrumPathProperties.getSaveFilePath() + StringPool.SLASH + reportPath.substring(0, reportPath.lastIndexOf(StringPool.SLASH));
String pathName = ftpUtil.getFtpRootPath() + spectrumPathProperties.getSaveFilePath() + StringPool.SLASH + reportPath.substring(0, reportPath.lastIndexOf(StringPool.SLASH));
String fileName = reportPath.substring(reportPath.lastIndexOf(StringPool.SLASH) + 1) + ".txt";
// 连接ftp
FTPClient ftpClient = ftpUtil.LoginFTP();
@ -4105,7 +4260,7 @@ public class GammaServiceImpl extends AbstractLogOrReport implements IGammaServi
if (StringUtils.isBlank(logPath)) {
throw new RuntimeException("The log generated by the automatic processor does not exist");
}
String pathName = StringPool.SLASH + spectrumPathProperties.getLogPath() + StringPool.SLASH + logPath.substring(0, logPath.lastIndexOf(StringPool.SLASH));
String pathName = ftpUtil.getFtpRootPath() + spectrumPathProperties.getLogPath() + StringPool.SLASH + logPath.substring(0, logPath.lastIndexOf(StringPool.SLASH));
String fileName = logPath.substring(logPath.lastIndexOf(StringPool.SLASH) + 1);
// 连接ftp
FTPClient ftpClient = ftpUtil.LoginFTP();
@ -4217,26 +4372,36 @@ public class GammaServiceImpl extends AbstractLogOrReport implements IGammaServi
public Result saveToDB(String fileName, HttpServletRequest request) {
Result result = new Result();
String userName = JwtUtil.getUserNameByToken(request);
boolean bRet = true;
boolean bRet = false;
Cache<String, PHDFile> phdCache = localCache.getPHDCache();
PHDFile phd = phdCache.getIfPresent(fileName + "-" + userName);
if (Objects.isNull(phd)) {
result.error500("Please select the parse file first");
return result;
}
// 查看分析员是否有权限操作当前台站信息
// 判断当前分析员是否有过排班任务
Integer stationId = spectrumAnalysisMapper.getStationId(phd.getHeader().getSite_code());
boolean bAnalysisResultWriteAuthority = userTaskUtil.CheckUserWriteAuthorityForStation(userName, stationId);
// 如果用户没有权限操作 则查看当前用户是否是高级分析员/管理员
if (!bAnalysisResultWriteAuthority) {
List<String> roleCodes = userTaskUtil.findRoleCodeByUserName(userName);
// 如果只是分析员 则无法保存数据库 返回信息
if (roleCodes.contains(RoleType.Role_Analyst) && !roleCodes.contains(RoleType.Role_SuperAnalyst) && !roleCodes.contains(RoleType.Role_Manager)) {
bRet = false;
}
if (!phd.isBAnalyed()) {
result.error500("Please first Reprocessing!");
return result;
}
if (!bRet) {
//获取当前操作的台站的id
Integer stationId = spectrumAnalysisMapper.getStationId(phd.getHeader().getSite_code());
//判断当前分析员当天是否有对应台站的排班任务
List<String> userStation = userTaskUtil.findUserStation(userName);
boolean bAnalysisResultWriteAuthority = false;
if (userStation.contains(String.valueOf(stationId))) {
bAnalysisResultWriteAuthority = true;
}
//根据系统类型获取系统存储前缀
String sysTemSubdir = nameStandUtil.GetSysTemSubdir(phd.getHeader().getSystem_type());
//根据数据类型获取数据存储前缀
String dateTypeSubdir = nameStandUtil.GetDateTypeSubdir(phd.getMsgInfo().getData_type());
//格式化名称
Map<String, String> nameMap = nameStandUtil.NameStandard(phd);
String mSaveFileName = nameMap.get("saveFile");
//判断当前分析员是否有过历史分析当前文件
Integer isExist = spectrumAnalysisMapper.SampleIsExist(sysTemSubdir+ dateTypeSubdir + StringPool.SLASH + mSaveFileName, userName);
// 如果用户没有权限操作 则查看当前用户是否是高级分析员/管理员
if (!bAnalysisResultWriteAuthority && Objects.isNull(isExist)) {
result.error500("You have no permission to save results to DB!");
return result;
}
@ -4255,7 +4420,7 @@ public class GammaServiceImpl extends AbstractLogOrReport implements IGammaServi
phd.setStatus(sampleData.getStatus());
} else {
// 如果sampleData为空 存储数据
SaveSampleToDB(phd, middleData.analyses_save_filePath);
bRet = SaveSampleToDB(phd, middleData.analyses_save_filePath);
}
if (!bRet) {
result.error500("The Database hasn't this Spectrum(" + phd.getFilename() + ") and Insert it to Database failed.");
@ -4275,8 +4440,6 @@ public class GammaServiceImpl extends AbstractLogOrReport implements IGammaServi
if (StringUtils.isBlank(idAnalysis)) {
// RNMAN.GARDS_ANALYSES 表插入数据
analysesSpectrumService.insertEntity(middleData, phd, userName, comments);
// 查询idAnalysis
idAnalysis = spectrumAnalysisMapper.getIdAnalysisByIdAnalyst(phd.getId_sample(), userName);
// 修改sample_data状态
spectrumAnalysisMapper.updateAnalysesStatus(middleData.analyses_save_filePath);
} else {
@ -4316,27 +4479,48 @@ public class GammaServiceImpl extends AbstractLogOrReport implements IGammaServi
paths.add(middleData.analyses_lc_filePath);
paths.add(middleData.analyses_scac_filePath);
paths.add(middleData.analyses_baseline_filePath);
if (false) paths.add(middleData.analyses_save_filePath);
if (bRet) {
paths.add(middleData.analyses_save_filePath);
}
} else {
result.error500("Save analysis results to DB failed.");
return result;
}
Map<String, Object> map = new HashMap<>();
//更新detial Information
List<String> detailedInfo = gammaFileUtil.DetailedInfo(phd.getId_sample(), phd);
map.put("DetailedInformation", detailedInfo);
result.setSuccess(true);
result.setResult(map);
return result;
}
@Transactional
public void SaveSampleToDB(PHDFile phd, String input_file_name) {
public boolean SaveSampleToDB(PHDFile phd, String input_file_name) {
boolean bRet = false;
String sit_det_code = phd.getHeader().getSite_code();
String detect_code = phd.getHeader().getDetector_code();
// 查询 station_id detect_id
Integer station_id = spectrumAnalysisMapper.getStationId(sit_det_code);
Integer detect_id = spectrumAnalysisMapper.getDetectorId(detect_code);
// ORIGINAL.GARDS_SAMPLE_DATA 表数据
sampleDataSpectrumService.saveSampleDataGamma(phd, input_file_name, station_id, detect_id);
Integer saveSampleDataGamma = sampleDataSpectrumService.saveSampleDataGamma(phd, input_file_name, station_id, detect_id);
if (saveSampleDataGamma > 0) {
bRet = true;
} else {
bRet = false;
return bRet;
}
// 查询新增后的 sample_id 的值赋给变量 DECLARESAMPLEID
Integer sampleId = spectrumAnalysisMapper.getSampleId(input_file_name);
// ORIGINAL.GARDS_SAMPLE_AUX 表数据
sampleAuxSpectrumService.saveSampleAuxGamma(phd, sampleId);
Integer saveSampleAuxGamma = sampleAuxSpectrumService.saveSampleAuxGamma(phd, sampleId);
if (saveSampleAuxGamma > 0) {
bRet = true;
} else {
bRet = false;
return bRet;
}
// ORIGINAL.gards_sample_description 数据表
sampleDescriptionSpectrumService.saveSampleDescriptionGamma(phd, sampleId);
// ORIGINAL.GARDS_SAMPLE_CERT ORIGINAL.GARDS_SAMPLE_CERT_LINE 数据表
@ -4345,17 +4529,18 @@ public class GammaServiceImpl extends AbstractLogOrReport implements IGammaServi
sampleCertLineSpectrumService.saveSampleCertLineGamma(phd, sampleId);
}
// gards_ calibration_pairs_orig数据表
calibrationPairsOrigSpectrumService.saveCalibrationPairsOrigGamma(phd, sampleId);
Integer saveCalibrationPairsOrigGamma = calibrationPairsOrigSpectrumService.saveCalibrationPairsOrigGamma(phd, sampleId);
// gards_total_efficiency _pairs数据表(IDC没有)
totalEfficiencyPairsSpectrumService.saveTotalEfficiencyPairsGamma(phd, sampleId);
Integer saveTotalEfficiencyPairsGamma = totalEfficiencyPairsSpectrumService.saveTotalEfficiencyPairsGamma(phd, sampleId);
// gards_spectrum数据表
spectrumService.saveSpectrumGamma(phd, sampleId, input_file_name);
Integer saveSpectrumGamma = spectrumService.saveSpectrumGamma(phd, sampleId, input_file_name);
// 根据文件名称获取sample基础数据信息
GardsSampleDataSpectrum samplData = spectrumAnalysisMapper.findSampleByFilePath(input_file_name);
if (Objects.nonNull(samplData)) {
phd.setId_sample(samplData.getSampleId().toString());
phd.setStatus(samplData.getStatus());
}
return bRet;
}
@Override
@ -4412,20 +4597,22 @@ public class GammaServiceImpl extends AbstractLogOrReport implements IGammaServi
// 换行
strBuild.append(System.lineSeparator());
// 文本内容第三块数据
List<List<String>> peakNuclides = phd.getVPeak().stream().map(item -> item.nuclides).collect(Collectors.toList());
List<String> nuclides = new LinkedList<>();
for (int i = 0; i < peakNuclides.size(); i++) {
List<String> peakNuclide = peakNuclides.get(i);
nuclides.addAll(peakNuclide);
if (CollectionUtils.isNotEmpty(phd.getVPeak())) {
List<List<String>> peakNuclides = phd.getVPeak().stream().map(item -> item.nuclides).collect(Collectors.toList());
List<String> nuclides = new LinkedList<>();
for (int i = 0; i < peakNuclides.size(); i++) {
List<String> peakNuclide = peakNuclides.get(i);
nuclides.addAll(peakNuclide);
}
nuclides = nuclides.stream().distinct().collect(Collectors.toList());
String nuclideStr = "";
for (int i = 0; i < nuclides.size(); i++) {
nuclideStr += " " + nuclides.get(i);
}
strBuild.append(nuclideStr);
// 换行
strBuild.append(System.lineSeparator());
}
nuclides = nuclides.stream().distinct().collect(Collectors.toList());
String nuclideStr = "";
for (int i = 0; i < nuclides.size(); i++) {
nuclideStr += " " + nuclides.get(i);
}
strBuild.append(nuclideStr);
// 换行
strBuild.append(System.lineSeparator());
// 换行
strBuild.append(System.lineSeparator());
// 文本内容第四块
@ -4458,42 +4645,44 @@ public class GammaServiceImpl extends AbstractLogOrReport implements IGammaServi
strBuild.append(System.lineSeparator());
// 文本内容第五块数据
Map<String, NuclideActMda> mapNucActMda = phd.getMapNucActMda();
for (Map.Entry<String, NuclideActMda> entry : mapNucActMda.entrySet()) {
String key = entry.getKey();
NuclideActMda nuc = entry.getValue();
String halflifeValue = "";
if (nuc.isBCalculateMDA()) {
String units = "S";
double halflife = nuc.getHalflife();
if (halflife >= 31556736) {// 1年 = 365.24 * 24 * 60 * 60 = 31556736s
halflife /= 31556736;
units = "A";
} else if (halflife >= 86400) {// 1天 = 24 * 60 * 60 = 86400s
halflife /= 86400;
units = "D";
} else if (halflife >= 3600) {
halflife /= 3600;
units = "H";
if (CollectionUtils.isNotEmpty(mapNucActMda)) {
for (Map.Entry<String, NuclideActMda> entry : mapNucActMda.entrySet()) {
String key = entry.getKey();
NuclideActMda nuc = entry.getValue();
String halflifeValue = "";
if (nuc.isBCalculateMDA()) {
String units = "S";
double halflife = nuc.getHalflife();
if (halflife >= 31556736) {// 1年 = 365.24 * 24 * 60 * 60 = 31556736s
halflife /= 31556736;
units = "A";
} else if (halflife >= 86400) {// 1天 = 24 * 60 * 60 = 86400s
halflife /= 86400;
units = "D";
} else if (halflife >= 3600) {
halflife /= 3600;
units = "H";
}
halflifeValue = NumberFormatUtil.numberFormat(String.valueOf(halflife)) + units;
}
String efficiency = NumberFormatUtil.numberFormat(String.valueOf(nuc.getEfficiency()));
String activity = NumberFormatUtil.numberFormat(String.valueOf(nuc.getActivity()));
String actErr = "";
if (nuc.getActivity() > 0) {
actErr = NumberFormatUtil.numberFormat(String.valueOf(nuc.getAct_err() / nuc.getActivity() * 100));
}
String mda = NumberFormatUtil.numberFormat(String.valueOf(nuc.getMda()));
String conc = NumberFormatUtil.numberFormat(String.valueOf(nuc.getConcentration()));
String mdc = NumberFormatUtil.numberFormat(String.valueOf(nuc.getMdc()));
if (nuc.getCalculateIdx() >= 0 && nuc.getCalculateIdx() < nuc.getVEnergy().size()) {
String yield = NumberFormatUtil.numberFormat(String.valueOf(nuc.getVYield().get(nuc.getCalculateIdx()) * 100));
String energy = NumberFormatUtil.numberFormat(String.valueOf(nuc.getVEnergy().get(nuc.getCalculateIdx())));
strBuild.append(rowFormat(title5, key, halflifeValue, yield, energy, efficiency, activity, actErr, mda, conc, mdc));
strBuild.append(System.lineSeparator());
} else {
strBuild.append(rowFormat(title5, key, halflifeValue, "NULL", "NULL", efficiency, activity, actErr, mda, conc, mdc));
strBuild.append(System.lineSeparator());
}
halflifeValue = NumberFormatUtil.numberFormat(String.valueOf(halflife)) + units;
}
String efficiency = NumberFormatUtil.numberFormat(String.valueOf(nuc.getEfficiency()));
String activity = NumberFormatUtil.numberFormat(String.valueOf(nuc.getActivity()));
String actErr = "";
if (nuc.getActivity() > 0) {
actErr = NumberFormatUtil.numberFormat(String.valueOf(nuc.getAct_err() / nuc.getActivity() * 100));
}
String mda = NumberFormatUtil.numberFormat(String.valueOf(nuc.getMda()));
String conc = NumberFormatUtil.numberFormat(String.valueOf(nuc.getConcentration()));
String mdc = NumberFormatUtil.numberFormat(String.valueOf(nuc.getMdc()));
if (nuc.getCalculateIdx() >= 0 && nuc.getCalculateIdx() < nuc.getVEnergy().size()) {
String yield = NumberFormatUtil.numberFormat(String.valueOf(nuc.getVYield().get(nuc.getCalculateIdx()) * 100));
String energy = NumberFormatUtil.numberFormat(String.valueOf(nuc.getVEnergy().get(nuc.getCalculateIdx())));
strBuild.append(rowFormat(title5, key, halflifeValue, yield, energy, efficiency, activity, actErr, mda, conc, mdc));
strBuild.append(System.lineSeparator());
} else {
strBuild.append(rowFormat(title5, key, halflifeValue, "NULL", "NULL", efficiency, activity, actErr, mda, conc, mdc));
strBuild.append(System.lineSeparator());
}
}
strBuild.append(System.lineSeparator());
@ -4503,6 +4692,10 @@ public class GammaServiceImpl extends AbstractLogOrReport implements IGammaServi
String dataType = phd.getMsgInfo().getData_type().substring(0, 1);
String format = ".txt";
String txtFileName = String.format("%s-%s_%s_%s_RESULT%s", detectorCode, date, time, dataType, format);
if (StrUtil.isNotBlank(fileName)){
if (StrUtil.contains(fileName, ".PHD"))
txtFileName = StrUtil.replace(fileName, ".PHD", ".txt");
}
// 导出数据内容到txt文本
OutputStream fos = null;
try {
@ -4641,6 +4834,10 @@ public class GammaServiceImpl extends AbstractLogOrReport implements IGammaServi
String dataType = phd.getMsgInfo().getData_type().substring(0, 1);
String format = ".xls";
String xlsFileName = String.format("%s-%s_%s_%s_RESULT%s", detectorCode, date, time, dataType, format);
if (StrUtil.isNotBlank(fileName)){
if (StrUtil.contains(fileName, ".PHD"))
xlsFileName = StrUtil.replace(fileName, ".PHD", ".xls");
}
String template = SAVETOEXCEL_G.getName();
// 导出时使用默认文件名 file.xls
ExportUtil.exportXls(response, template, data, xlsFileName);

View File

@ -98,8 +98,8 @@ public class SpectrumFileServiceImpl implements ISpectrumFileService {
boolean created = FTPUtil.createDirs(ftpClient, filePath);
if (!created) return Result.error(Prompt.DIR_CREATE_FAIL + filePath);
// 上传所有文件
System.out.println("filelist>>>>"+fileList.size());
String rootPath = spectrumPathProperties.getRootPath();
List<String> failList = new ArrayList<>();
String rootPath = ftpUtil.getFtpRootPath();
for (File oneFile : fileList) {
String fileName = oneFile.getName();
// 判断能谱文件名称是否符合规则不符合则进行重命名
@ -109,10 +109,12 @@ public class SpectrumFileServiceImpl implements ISpectrumFileService {
fileName = oneFile.getName().substring(0, 23)+suffix;
}
String fullFilePath = rootPath + filePath + slash + fileName;
System.out.println("fullFilePath>>>>"+fullFilePath);
FileInputStream local = new FileInputStream(oneFile);
ftpClient.storeFile(fullFilePath, local);
boolean success = ftpClient.storeFile(fullFilePath, local);
if (!success) failList.add(fullFilePath);
}
if (CollUtil.isNotEmpty(failList))
return Result.error(Prompt.UPLOAD_ERR, failList);
return Result.OK(Prompt.UPLOAD_SUCC);
} catch (IOException e) {
e.printStackTrace();
@ -142,7 +144,7 @@ public class SpectrumFileServiceImpl implements ISpectrumFileService {
String username = user.getUsername();
String slash = SymbolConstant.SINGLE_SLASH;
String comma = SymbolConstant.COMMA;
String filePath = slash + spectrumPathProperties.getUploadPath() + slash + username;
String filePath = ftpUtil.getFtpRootPath() + spectrumPathProperties.getUploadPath() + slash + username;
FTPClient ftpClient = null;
List<FileDto> fileDtos = new ArrayList<>();
Page<FileDto> page = new Page<>(pageNo, pageSize);

View File

@ -9,10 +9,7 @@ import org.jeecg.common.util.RedisUtil;
import org.jeecg.modules.system.entity.GardsSampleDataSystem;
import org.jeecg.modules.system.service.IGardsSampleDataService;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.web.bind.annotation.DeleteMapping;
import org.springframework.web.bind.annotation.GetMapping;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RestController;
import org.springframework.web.bind.annotation.*;
import java.util.Map;
@ -49,8 +46,9 @@ public class GardsSampleDataController {
@DeleteMapping("deleteById")
@ApiOperation(value = "删除DATA_BASE数据", notes = "删除DATA_BASE数据")
public Result deleteById(Integer sampleId){
return gardsSampleDataService.deleteById(sampleId);
public Result<?> deleteById(@RequestParam Integer sampleId, boolean sampleData,
boolean rnAuto, boolean rnMan){
return gardsSampleDataService.deleteById(sampleId, sampleData, rnAuto, rnMan);
}
}

View File

@ -1,7 +1,20 @@
package org.jeecg.modules.system.mapper;
import com.baomidou.mybatisplus.core.mapper.BaseMapper;
import org.apache.ibatis.annotations.Param;
import org.jeecg.modules.base.dto.OwnerDto;
import org.jeecg.modules.base.entity.rnauto.GardsAnalyses;
import org.jeecg.modules.system.entity.GardsSampleDataSystem;
import java.util.List;
public interface GardsSampleDataMapper extends BaseMapper<GardsSampleDataSystem> {
List<OwnerDto> containSampleId(String filed);
void delTables(@Param("tableNames") List<String> tableNames,
@Param("sampleId") Integer sampleId);
GardsAnalyses getAnalysis(@Param("sampleId") Integer sampleId,
@Param("owner") String owner);
}

View File

@ -0,0 +1,31 @@
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE mapper PUBLIC "-//mybatis.org//DTD Mapper 3.0//EN" "http://mybatis.org/dtd/mybatis-3-mapper.dtd">
<mapper namespace="org.jeecg.modules.system.mapper.GardsSampleDataMapper">
<delete id="delTables">
<foreach collection = "tableNames" item = "tableName" index = "index">
DELETE FROM ${tableName} WHERE SAMPLE_ID = #{sampleId};
</foreach>
</delete>
<select id="containSampleId" resultType="org.jeecg.modules.base.dto.OwnerDto">
SELECT
OWNER,
TABLE_NAME AS tableName
FROM
DBA_TAB_COLUMNS
WHERE
COLUMN_NAME = #{filed}
</select>
<select id="getAnalysis" resultType="org.jeecg.modules.base.entity.rnauto.GardsAnalyses">
SELECT
BASELINE_PATH AS baselinePath,
LC_PATH AS lcPath,
SCAC_PATH AS scacPath,
LOG_PATH AS logPath,
REPORT_PAHT AS reportPath
FROM
${owner}.GARDS_ANALYSES
WHERE
SAMPLE_ID = #{sampleId}
</select>
</mapper>

View File

@ -4,6 +4,7 @@ import com.baomidou.mybatisplus.core.metadata.IPage;
import com.baomidou.mybatisplus.extension.service.IService;
import org.jeecg.common.api.QueryRequest;
import org.jeecg.common.api.vo.Result;
import org.jeecg.modules.base.entity.rnauto.GardsAnalyses;
import org.jeecg.modules.system.entity.GardsSampleDataSystem;
public interface IGardsSampleDataService extends IService<GardsSampleDataSystem> {
@ -21,6 +22,7 @@ public interface IGardsSampleDataService extends IService<GardsSampleDataSystem>
* @param sampleId
* @return
*/
Result deleteById(Integer sampleId);
Result<?> deleteById(Integer sampleId, boolean sampleData, boolean rnAuto, boolean rnMan);
GardsSampleDataSystem getOne(Integer sampleId);
}

View File

@ -1,5 +1,10 @@
package org.jeecg.modules.system.service.impl;
import cn.hutool.core.collection.CollUtil;
import cn.hutool.core.collection.ListUtil;
import cn.hutool.core.date.DateUtil;
import cn.hutool.core.util.ObjectUtil;
import cn.hutool.core.util.StrUtil;
import com.baomidou.dynamic.datasource.annotation.DS;
import com.baomidou.mybatisplus.core.conditions.query.LambdaQueryWrapper;
import com.baomidou.mybatisplus.core.metadata.IPage;
@ -9,23 +14,44 @@ import com.baomidou.mybatisplus.extension.plugins.pagination.Page;
import com.baomidou.mybatisplus.extension.service.impl.ServiceImpl;
import org.jeecg.common.api.QueryRequest;
import org.jeecg.common.api.vo.Result;
import org.jeecg.common.constant.enums.FileTypeEnum;
import org.jeecg.common.properties.SpectrumPathProperties;
import org.jeecg.common.util.FTPUtil;
import org.jeecg.common.util.RedisUtil;
import org.jeecg.modules.base.dto.OwnerDto;
import org.jeecg.modules.base.entity.rnauto.GardsAnalyses;
import org.jeecg.modules.system.entity.GardsSampleDataSystem;
import org.jeecg.modules.system.mapper.GardsSampleDataMapper;
import org.jeecg.modules.system.service.IGardsSampleDataService;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.jdbc.datasource.DataSourceTransactionManager;
import org.springframework.stereotype.Service;
import org.springframework.transaction.PlatformTransactionManager;
import org.springframework.transaction.TransactionDefinition;
import org.springframework.transaction.TransactionStatus;
import org.springframework.transaction.annotation.Transactional;
import org.springframework.transaction.interceptor.TransactionAspectSupport;
import org.springframework.transaction.support.DefaultTransactionDefinition;
import java.util.HashMap;
import java.util.Objects;
import java.util.*;
import java.util.stream.Collectors;
@Service("gardsSampleDataService")
@DS("ora")
public class GardsSampleDataServiceImpl extends ServiceImpl<GardsSampleDataMapper, GardsSampleDataSystem> implements IGardsSampleDataService {
@Autowired
private RedisUtil redisUtil;
@Autowired
private FTPUtil ftpUtil;
@Autowired
private SpectrumPathProperties pathProperties;
@Autowired
private PlatformTransactionManager transactionManager;
@Override
public Result<IPage<GardsSampleDataSystem>> findPage(QueryRequest queryRequest, GardsSampleDataSystem gardsSampleData) {
//查询全部台站信息
@ -62,14 +88,115 @@ public class GardsSampleDataServiceImpl extends ServiceImpl<GardsSampleDataMappe
}
@Override
@Transactional
public Result deleteById(Integer sampleId) {
Result result = new Result();
LambdaQueryWrapper<GardsSampleDataSystem> queryWrapper = new LambdaQueryWrapper<>();
queryWrapper.eq(GardsSampleDataSystem::getSampleId, sampleId);
this.baseMapper.delete(queryWrapper);
result.success("Successfully deleted");
return result;
public Result<?> deleteById(Integer sampleId, boolean sampleData,
boolean rnAuto, boolean rnMan) {
String ftpRootPath = ftpUtil.getFtpRootPath();
String savePath = ftpRootPath + pathProperties.getSaveFilePath() + StrUtil.SLASH;
String logPath = ftpRootPath + pathProperties.getLogPath() + StrUtil.SLASH;
/* 删除数据库数据 */
// 过滤掉多余的表
String ORIGINAL = "ORIGINAL";String RNAUTO = "RNAUTO";String RNMAN = "RNMAN";
List<String> suitable = ListUtil.toList(ORIGINAL, RNAUTO, RNMAN);
List<OwnerDto> ownerDtos = baseMapper.containSampleId("SAMPLE_ID").stream()
.filter(owner -> CollUtil.contains(suitable, owner.getOwner()))
.collect(Collectors.toList());
String DOT = StrUtil.DOT;
// 手动控制事务
TransactionDefinition txDef = new DefaultTransactionDefinition();
TransactionStatus txStatus = transactionManager.getTransaction(txDef);
try {
List<String> needDel = new ArrayList<>();
if (sampleData){
// 收集所有表名
List<String> allTables = ownerDtos.stream()
.map(owner -> owner.getOwner() + DOT + owner.getTableName())
.collect(Collectors.toList());
// 删除表数据
if (CollUtil.isNotEmpty(allTables))
baseMapper.delTables(allTables, sampleId);
// 收集待删除文件路径
needDel.add(samplePath(savePath, sampleId)); // 原始谱文件
needDel.addAll(manOrAutoPath(savePath, logPath, sampleId, RNMAN)); // 人工交互文件
needDel.addAll(manOrAutoPath(savePath, logPath, sampleId, RNAUTO)); // 自动处理文件
}
else {
if (rnAuto){
// 收集自动处理库所有表名
List<String> autoTables = ownerDtos.stream()
.filter(owner -> StrUtil.equals(owner.getOwner(), RNAUTO))
.map(owner -> owner.getOwner() + DOT + owner.getTableName())
.collect(Collectors.toList());
// 删除表数据
if (CollUtil.isNotEmpty(autoTables))
baseMapper.delTables(autoTables, sampleId);
// 收集待删除文件路径
needDel.addAll(manOrAutoPath(savePath, logPath, sampleId, RNAUTO)); // 自动处理文件
}
if (rnMan){
// 收集人工交互库所有表名
List<String> manTables = ownerDtos.stream()
.filter(owner -> StrUtil.equals(owner.getOwner(), RNMAN))
.map(owner -> owner.getOwner() + DOT + owner.getTableName())
.collect(Collectors.toList());
// 删除表数据
if (CollUtil.isNotEmpty(manTables))
baseMapper.delTables(manTables, sampleId);
// 收集待删除文件路径
needDel.addAll(manOrAutoPath(savePath, logPath, sampleId, RNMAN)); // 人工交互文件
}
}
transactionManager.commit(txStatus);
needDel = needDel.stream().filter(StrUtil::isNotBlank).collect(Collectors.toList());
if (CollUtil.isEmpty(needDel))
return Result.OK("Data cleaning is complete. No files need to be cleaned!");
// 删除FTP文件
List<String> failList = ftpUtil.removeFiles(needDel);
if (CollUtil.isNotEmpty(failList))
return Result.error("Data clearing is complete, but file clearing fails!", failList);
return Result.OK("Data and file cleanup complete!");
}catch (Exception e){
transactionManager.rollback(txStatus);
e.printStackTrace();
return Result.error("Data deletion is abnormal, The file deletion operation has not been performed!");
}
}
@Override
public GardsSampleDataSystem getOne(Integer sampleId) {
LambdaQueryWrapper<GardsSampleDataSystem> wrapper = new LambdaQueryWrapper<>();
wrapper.eq(GardsSampleDataSystem::getSampleId, sampleId);
return Optional.ofNullable(getOne(wrapper))
.orElse(new GardsSampleDataSystem());
}
private String samplePath(String savePath, Integer sampleId){
GardsSampleDataSystem sampleData = getOne(sampleId);
String inputFileName = sampleData.getInputFileName();
if (StrUtil.isBlank(inputFileName))
return null;
return savePath + inputFileName;
}
private List<String> manOrAutoPath(String savePath, String logPath,
Integer sampleId, String owner){
List<String> fileList = new ArrayList<>();
GardsAnalyses analysisMan = baseMapper.getAnalysis(sampleId, owner);
if (ObjectUtil.isNull(analysisMan))
return fileList;
String baselinePath = analysisMan.getBaselinePath();
if (StrUtil.isNotBlank(baselinePath))
fileList.add(savePath + baselinePath);
String lcPath = analysisMan.getLcPath();
if (StrUtil.isNotBlank(lcPath))
fileList.add(savePath + lcPath);
String scacPath = analysisMan.getScacPath();
if (StrUtil.isNotBlank(scacPath))
fileList.add(savePath + scacPath);
if (StrUtil.isNotBlank(analysisMan.getLogPath()))
fileList.add(logPath + analysisMan.getLogPath());
String reportPath = analysisMan.getReportPath();
if (StrUtil.isNotBlank(reportPath))
fileList.add(savePath + reportPath + FileTypeEnum.txt.getType());
return fileList;
}
}

View File

@ -48,7 +48,7 @@ public class ReadLineUtil {
try {
ftpClient.enterLocalPassiveMode();
String fileName = filePath.substring(filePath.lastIndexOf(StringPool.SLASH) + 1);
String parameterFilePath = StringPool.SLASH + spectrumPathProperties.getSaveFilePath() + StringPool.SLASH + filePath.substring(0, filePath.lastIndexOf(StringPool.SLASH));
String parameterFilePath = ftpUtil.getFtpRootPath() + spectrumPathProperties.getSaveFilePath() + StringPool.SLASH + filePath.substring(0, filePath.lastIndexOf(StringPool.SLASH));
//判断文件路径是否为空
if (StringUtils.isNotBlank(parameterFilePath)){
//在当前工作路径下读取文件
@ -191,7 +191,7 @@ public class ReadLineUtil {
OutputStream outputStream = null;
InputStream inputStream = null;
try {
filePath = StringPool.SLASH + spectrumPathProperties.getSaveFilePath() + StringPool.SLASH + filePath;
filePath = ftpUtil.getFtpRootPath() + spectrumPathProperties.getSaveFilePath() + StringPool.SLASH + filePath;
// 切换工作目录为 /
ftpClient.changeWorkingDirectory(SymbolConstant.SINGLE_SLASH);