From 6c2f0a54152c4837195e6844350f27eaade206df Mon Sep 17 00:00:00 2001 From: nieziyan Date: Fri, 27 Oct 2023 19:19:39 +0800 Subject: [PATCH] =?UTF-8?q?feat=EF=BC=9Abugs?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../java/org/jeecg/common/util/ClassUtil.java | 16 +- .../org/jeecg/common/util/ExportUtil.java | 10 +- .../org/jeecg/modules/base/dto/ConcDto.java | 10 + .../org/jeecg/modules/base/dto/ConcDtoXe.java | 16 + .../mapper/GardsNuclIdedAutoMapper.java | 1 + .../mapper/GardsXeResultsAutoMapper.java | 3 +- .../mapper/GardsXeResultsManMapper.java | 3 +- .../mapper/xml/GardsXeResultsAutoMapper.xml | 2 +- .../mapper/xml/GardsXeResultsManMapper.xml | 2 +- .../service/IGardsNuclIdedAutoService.java | 1 + .../service/IGardsXeResultsAutoService.java | 3 +- .../service/IGardsXeResultsManService.java | 3 +- .../impl/CalculateConcServiceImpl.java | 28 +- .../impl/GardsNuclIdedAutoServiceImpl.java | 1 + .../impl/GardsXeResultsAutoServiceImpl.java | 3 +- .../impl/GardsXeResultsManServiceImpl.java | 3 +- .../SpectrumAnalysesController.java | 6 + .../service/ISpectrumAnalysisService.java | 2 + .../service/impl/GammaServiceImpl.java | 1825 +++++++++-------- .../impl/SpectrumAnalysisServiceImpl.java | 1818 ++++++++-------- 20 files changed, 1929 insertions(+), 1827 deletions(-) create mode 100644 jeecg-boot-base-core/src/main/java/org/jeecg/modules/base/dto/ConcDtoXe.java diff --git a/jeecg-boot-base-core/src/main/java/org/jeecg/common/util/ClassUtil.java b/jeecg-boot-base-core/src/main/java/org/jeecg/common/util/ClassUtil.java index b940fe78..d2f5f7a6 100644 --- a/jeecg-boot-base-core/src/main/java/org/jeecg/common/util/ClassUtil.java +++ b/jeecg-boot-base-core/src/main/java/org/jeecg/common/util/ClassUtil.java @@ -1,14 +1,15 @@ package org.jeecg.common.util; import cn.hutool.core.collection.CollUtil; -import cn.hutool.core.collection.ListUtil; import cn.hutool.core.util.StrUtil; -import org.jeecgframework.poi.excel.entity.TemplateExportParams; +import org.springframework.core.io.ClassPathResource; import org.springframework.core.io.DefaultResourceLoader; import org.springframework.core.io.Resource; import org.springframework.core.io.ResourceLoader; +import java.io.File; import java.io.IOException; +import java.io.InputStream; import java.lang.reflect.Field; import java.util.*; import java.util.stream.Collectors; @@ -71,6 +72,17 @@ public class ClassUtil { } } + public static InputStream classPathStream(String classPath){ + try { + ClassPathResource resource = new ClassPathResource(classPath); + return resource.getInputStream(); + } catch (IOException e) { + e.printStackTrace(); + return null; + } + } + + public static void copyProperties(T1 source, T2 target){ Class sourceClass = source.getClass(); Class targetClass = target.getClass(); diff --git a/jeecg-boot-base-core/src/main/java/org/jeecg/common/util/ExportUtil.java b/jeecg-boot-base-core/src/main/java/org/jeecg/common/util/ExportUtil.java index f1f29914..f887987f 100644 --- a/jeecg-boot-base-core/src/main/java/org/jeecg/common/util/ExportUtil.java +++ b/jeecg-boot-base-core/src/main/java/org/jeecg/common/util/ExportUtil.java @@ -12,15 +12,15 @@ import org.springframework.core.io.Resource; import org.springframework.core.io.ResourceLoader; import javax.servlet.http.HttpServletResponse; -import java.io.IOException; -import java.io.OutputStream; -import java.io.PrintWriter; +import java.io.*; import java.net.URLEncoder; import java.nio.charset.StandardCharsets; import java.util.ArrayList; import java.util.List; import java.util.Map; +import static org.jeecg.common.util.ClassUtil.classPathStream; + public class ExportUtil { private static final String UTF_8 = StandardCharsets.UTF_8.name(); @@ -101,7 +101,9 @@ public class ExportUtil { public static TemplateExportParams excelTemplate(String template){ String pathPrefix = "excelTemplate/"; String path = pathPrefix + template; - String templatePath = ClassUtil.classPath(path); + InputStream inputStream = classPathStream(path); + + String templatePath = ""; return new TemplateExportParams(templatePath); } diff --git a/jeecg-boot-base-core/src/main/java/org/jeecg/modules/base/dto/ConcDto.java b/jeecg-boot-base-core/src/main/java/org/jeecg/modules/base/dto/ConcDto.java index 3d9fa775..2cf9ad94 100644 --- a/jeecg-boot-base-core/src/main/java/org/jeecg/modules/base/dto/ConcDto.java +++ b/jeecg-boot-base-core/src/main/java/org/jeecg/modules/base/dto/ConcDto.java @@ -12,4 +12,14 @@ public class ConcDto implements Serializable { private String conc; private Date analysisBegin; + + public ConcDto() { + + } + + public ConcDto(String nuclideName, String conc, Date analysisBegin) { + this.nuclideName = nuclideName; + this.conc = conc; + this.analysisBegin = analysisBegin; + } } diff --git a/jeecg-boot-base-core/src/main/java/org/jeecg/modules/base/dto/ConcDtoXe.java b/jeecg-boot-base-core/src/main/java/org/jeecg/modules/base/dto/ConcDtoXe.java new file mode 100644 index 00000000..22439045 --- /dev/null +++ b/jeecg-boot-base-core/src/main/java/org/jeecg/modules/base/dto/ConcDtoXe.java @@ -0,0 +1,16 @@ +package org.jeecg.modules.base.dto; + +import lombok.Data; + +import java.io.Serializable; +import java.util.Date; + +@Data +public class ConcDtoXe implements Serializable { + + private String nuclideName; + + private Double conc; + + private Date analysisBegin; +} diff --git a/jeecg-module-abnormal-alarm/src/main/java/org/jeecg/modules/mapper/GardsNuclIdedAutoMapper.java b/jeecg-module-abnormal-alarm/src/main/java/org/jeecg/modules/mapper/GardsNuclIdedAutoMapper.java index 385d0567..38ea7d56 100644 --- a/jeecg-module-abnormal-alarm/src/main/java/org/jeecg/modules/mapper/GardsNuclIdedAutoMapper.java +++ b/jeecg-module-abnormal-alarm/src/main/java/org/jeecg/modules/mapper/GardsNuclIdedAutoMapper.java @@ -3,6 +3,7 @@ package org.jeecg.modules.mapper; import com.baomidou.mybatisplus.core.mapper.BaseMapper; import org.apache.ibatis.annotations.Mapper; import org.jeecg.modules.base.dto.ConcDto; +import org.jeecg.modules.base.dto.ConcDtoXe; import org.jeecg.modules.base.entity.rnauto.GardsNuclIded; import java.util.List; diff --git a/jeecg-module-abnormal-alarm/src/main/java/org/jeecg/modules/mapper/GardsXeResultsAutoMapper.java b/jeecg-module-abnormal-alarm/src/main/java/org/jeecg/modules/mapper/GardsXeResultsAutoMapper.java index 8895c756..4f2107cc 100644 --- a/jeecg-module-abnormal-alarm/src/main/java/org/jeecg/modules/mapper/GardsXeResultsAutoMapper.java +++ b/jeecg-module-abnormal-alarm/src/main/java/org/jeecg/modules/mapper/GardsXeResultsAutoMapper.java @@ -3,6 +3,7 @@ package org.jeecg.modules.mapper; import com.baomidou.mybatisplus.core.mapper.BaseMapper; import org.apache.ibatis.annotations.Mapper; import org.jeecg.modules.base.dto.ConcDto; +import org.jeecg.modules.base.dto.ConcDtoXe; import org.jeecg.modules.base.entity.rnauto.GardsXeResults; import java.util.List; @@ -12,7 +13,7 @@ import java.util.Set; @Mapper public interface GardsXeResultsAutoMapper extends BaseMapper { - List getConc(Map params); + List getConc(Map params); List nuclideNames(Set nuclideNames); } diff --git a/jeecg-module-abnormal-alarm/src/main/java/org/jeecg/modules/mapper/GardsXeResultsManMapper.java b/jeecg-module-abnormal-alarm/src/main/java/org/jeecg/modules/mapper/GardsXeResultsManMapper.java index b96fb8f8..0fc80607 100644 --- a/jeecg-module-abnormal-alarm/src/main/java/org/jeecg/modules/mapper/GardsXeResultsManMapper.java +++ b/jeecg-module-abnormal-alarm/src/main/java/org/jeecg/modules/mapper/GardsXeResultsManMapper.java @@ -3,6 +3,7 @@ package org.jeecg.modules.mapper; import com.baomidou.mybatisplus.core.mapper.BaseMapper; import org.apache.ibatis.annotations.Mapper; import org.jeecg.modules.base.dto.ConcDto; +import org.jeecg.modules.base.dto.ConcDtoXe; import org.jeecg.modules.base.entity.rnman.GardsXeResults; import java.util.List; @@ -12,7 +13,7 @@ import java.util.Set; @Mapper public interface GardsXeResultsManMapper extends BaseMapper { - List getConc(Map params); + List getConc(Map params); List nuclideNames(Set nuclideNames); } diff --git a/jeecg-module-abnormal-alarm/src/main/java/org/jeecg/modules/mapper/xml/GardsXeResultsAutoMapper.xml b/jeecg-module-abnormal-alarm/src/main/java/org/jeecg/modules/mapper/xml/GardsXeResultsAutoMapper.xml index 1e99f4f9..baf156e6 100644 --- a/jeecg-module-abnormal-alarm/src/main/java/org/jeecg/modules/mapper/xml/GardsXeResultsAutoMapper.xml +++ b/jeecg-module-abnormal-alarm/src/main/java/org/jeecg/modules/mapper/xml/GardsXeResultsAutoMapper.xml @@ -2,7 +2,7 @@ - SELECT xe.NUCLIDE_NAME, xe.CONC, diff --git a/jeecg-module-abnormal-alarm/src/main/java/org/jeecg/modules/mapper/xml/GardsXeResultsManMapper.xml b/jeecg-module-abnormal-alarm/src/main/java/org/jeecg/modules/mapper/xml/GardsXeResultsManMapper.xml index 35ddb68f..f0df833f 100644 --- a/jeecg-module-abnormal-alarm/src/main/java/org/jeecg/modules/mapper/xml/GardsXeResultsManMapper.xml +++ b/jeecg-module-abnormal-alarm/src/main/java/org/jeecg/modules/mapper/xml/GardsXeResultsManMapper.xml @@ -1,7 +1,7 @@ - SELECT xe.NUCLIDE_NAME, xe.CONC, diff --git a/jeecg-module-abnormal-alarm/src/main/java/org/jeecg/modules/service/IGardsNuclIdedAutoService.java b/jeecg-module-abnormal-alarm/src/main/java/org/jeecg/modules/service/IGardsNuclIdedAutoService.java index 20b48129..7d950222 100644 --- a/jeecg-module-abnormal-alarm/src/main/java/org/jeecg/modules/service/IGardsNuclIdedAutoService.java +++ b/jeecg-module-abnormal-alarm/src/main/java/org/jeecg/modules/service/IGardsNuclIdedAutoService.java @@ -2,6 +2,7 @@ package org.jeecg.modules.service; import com.baomidou.mybatisplus.extension.service.IService; import org.jeecg.modules.base.dto.ConcDto; +import org.jeecg.modules.base.dto.ConcDtoXe; import org.jeecg.modules.base.entity.rnauto.GardsNuclIded; import java.util.List; diff --git a/jeecg-module-abnormal-alarm/src/main/java/org/jeecg/modules/service/IGardsXeResultsAutoService.java b/jeecg-module-abnormal-alarm/src/main/java/org/jeecg/modules/service/IGardsXeResultsAutoService.java index aa568eb3..66c86bee 100644 --- a/jeecg-module-abnormal-alarm/src/main/java/org/jeecg/modules/service/IGardsXeResultsAutoService.java +++ b/jeecg-module-abnormal-alarm/src/main/java/org/jeecg/modules/service/IGardsXeResultsAutoService.java @@ -2,6 +2,7 @@ package org.jeecg.modules.service; import com.baomidou.mybatisplus.extension.service.IService; import org.jeecg.modules.base.dto.ConcDto; +import org.jeecg.modules.base.dto.ConcDtoXe; import org.jeecg.modules.base.entity.rnauto.GardsXeResults; import java.util.List; @@ -10,7 +11,7 @@ import java.util.Set; public interface IGardsXeResultsAutoService extends IService { - List getConc(Map params); + List getConc(Map params); List nuclideNames(Set nuclideNames); } diff --git a/jeecg-module-abnormal-alarm/src/main/java/org/jeecg/modules/service/IGardsXeResultsManService.java b/jeecg-module-abnormal-alarm/src/main/java/org/jeecg/modules/service/IGardsXeResultsManService.java index 5d3f0cac..269b42ed 100644 --- a/jeecg-module-abnormal-alarm/src/main/java/org/jeecg/modules/service/IGardsXeResultsManService.java +++ b/jeecg-module-abnormal-alarm/src/main/java/org/jeecg/modules/service/IGardsXeResultsManService.java @@ -2,6 +2,7 @@ package org.jeecg.modules.service; import com.baomidou.mybatisplus.extension.service.IService; import org.jeecg.modules.base.dto.ConcDto; +import org.jeecg.modules.base.dto.ConcDtoXe; import org.jeecg.modules.base.entity.rnman.GardsXeResults; import java.util.List; @@ -10,7 +11,7 @@ import java.util.Set; public interface IGardsXeResultsManService extends IService { - List getConc(Map params); + List getConc(Map params); List nuclideNames(Set nuclideNames); } diff --git a/jeecg-module-abnormal-alarm/src/main/java/org/jeecg/modules/service/impl/CalculateConcServiceImpl.java b/jeecg-module-abnormal-alarm/src/main/java/org/jeecg/modules/service/impl/CalculateConcServiceImpl.java index ea161754..7df9f770 100644 --- a/jeecg-module-abnormal-alarm/src/main/java/org/jeecg/modules/service/impl/CalculateConcServiceImpl.java +++ b/jeecg-module-abnormal-alarm/src/main/java/org/jeecg/modules/service/impl/CalculateConcServiceImpl.java @@ -7,6 +7,7 @@ import org.jeecg.common.constant.CommonConstant; import org.jeecg.common.constant.DateConstant; import org.jeecg.common.constant.SymbolConstant; import org.jeecg.modules.base.dto.ConcDto; +import org.jeecg.modules.base.dto.ConcDtoXe; import org.jeecg.modules.base.entity.postgre.AlarmAnalysisNuclideAvg; import org.jeecg.modules.base.entity.postgre.AlarmAnalysisNuclideParam; import org.jeecg.modules.base.entity.postgre.AlarmAnalysisRule; @@ -69,24 +70,25 @@ public class CalculateConcServiceImpl implements CalculateConcService { params.put("endDate",endDate + DateConstant.TIME_END); /* Auto自动处理 */ // beta-gamma - List xeConcAuto = xeResultsAutoService.getConc(params); + List xeConcAuto = xeResultsAutoService.getConc(params); // gamma List nuclConcAuto = nuclIdedAutoService.getConc(params); + Map autoResult = new HashMap<>(); - autoResult.putAll(calculate(xeConcAuto,index)); - autoResult.putAll(calculate(nuclConcAuto,index)); + autoResult.putAll(calculate(concDto(xeConcAuto), index)); + autoResult.putAll(calculate(nuclConcAuto, index)); List autoAvgs = autoResult.entrySet().stream() .map(entry -> new AlarmAnalysisNuclideAvg(entry.getKey(), entry.getValue())) .collect(Collectors.toList()); autoAvgs.forEach(item -> item.setDataSourceType(CommonConstant.ARMDARR)); /* Man人工交互 */ // beta-gamma - List xeConcMan = xeResultsManService.getConc(params); + List xeConcMan = xeResultsManService.getConc(params); // gamma List nuclConcMan = nuclIdedManService.getConc(params); Map manResult = new HashMap<>(); - manResult.putAll(calculate(xeConcMan,index)); - manResult.putAll(calculate(nuclConcMan,index)); + manResult.putAll(calculate(concDto(xeConcMan), index)); + manResult.putAll(calculate(nuclConcMan, index)); List manAvgs = manResult.entrySet().stream() .map(entry -> new AlarmAnalysisNuclideAvg(entry.getKey(), entry.getValue())) .collect(Collectors.toList()); @@ -103,7 +105,7 @@ public class CalculateConcServiceImpl implements CalculateConcService { @Override public Map calculate(List concDtos, BigDecimal index) { - Map result = new HashMap<>(); + Map result = new HashMap<>(); // 按照核素名进行分组 Map> concDtoMap = concDtos.stream() .collect(Collectors.groupingBy(ConcDto::getNuclideName)); @@ -119,8 +121,18 @@ public class CalculateConcServiceImpl implements CalculateConcService { BigDecimal line = new BigDecimal(baseLine); int i = line.multiply(index).setScale(0, RoundingMode.HALF_UP).intValue(); int j = Math.max(i - 1, 0); - result.put(nuclide,values.get(j).getConc()); + result.put(nuclide, values.get(j).getConc()); } return result; } + + private List concDto(List concDtoXes){ + List concDtos = new ArrayList<>(); + if (CollUtil.isEmpty(concDtoXes)) return concDtos; + + concDtos = concDtoXes.stream() + .map(xe -> new ConcDto(xe.getNuclideName(), String.valueOf(xe.getConc()), xe.getAnalysisBegin())) + .collect(Collectors.toList()); + return concDtos; + } } diff --git a/jeecg-module-abnormal-alarm/src/main/java/org/jeecg/modules/service/impl/GardsNuclIdedAutoServiceImpl.java b/jeecg-module-abnormal-alarm/src/main/java/org/jeecg/modules/service/impl/GardsNuclIdedAutoServiceImpl.java index 6770ef2f..b2b0b706 100644 --- a/jeecg-module-abnormal-alarm/src/main/java/org/jeecg/modules/service/impl/GardsNuclIdedAutoServiceImpl.java +++ b/jeecg-module-abnormal-alarm/src/main/java/org/jeecg/modules/service/impl/GardsNuclIdedAutoServiceImpl.java @@ -4,6 +4,7 @@ import com.baomidou.dynamic.datasource.annotation.DS; import com.baomidou.mybatisplus.core.conditions.query.LambdaQueryWrapper; import org.jeecg.common.constant.DateConstant; import org.jeecg.modules.base.dto.ConcDto; +import org.jeecg.modules.base.dto.ConcDtoXe; import org.jeecg.modules.base.entity.rnauto.GardsNuclIded; import org.jeecg.modules.base.entity.rnauto.GardsXeResults; import org.jeecg.modules.mapper.GardsNuclIdedAutoMapper; diff --git a/jeecg-module-abnormal-alarm/src/main/java/org/jeecg/modules/service/impl/GardsXeResultsAutoServiceImpl.java b/jeecg-module-abnormal-alarm/src/main/java/org/jeecg/modules/service/impl/GardsXeResultsAutoServiceImpl.java index 1e8dc318..59d5c5dd 100644 --- a/jeecg-module-abnormal-alarm/src/main/java/org/jeecg/modules/service/impl/GardsXeResultsAutoServiceImpl.java +++ b/jeecg-module-abnormal-alarm/src/main/java/org/jeecg/modules/service/impl/GardsXeResultsAutoServiceImpl.java @@ -4,6 +4,7 @@ import com.baomidou.dynamic.datasource.annotation.DS; import com.baomidou.mybatisplus.core.conditions.query.LambdaQueryWrapper; import org.jeecg.common.constant.DateConstant; import org.jeecg.modules.base.dto.ConcDto; +import org.jeecg.modules.base.dto.ConcDtoXe; import org.jeecg.modules.base.entity.rnauto.GardsXeResults; import org.jeecg.modules.mapper.GardsXeResultsAutoMapper; import org.jeecg.modules.service.IGardsXeResultsAutoService; @@ -23,7 +24,7 @@ public class GardsXeResultsAutoServiceImpl extends ServiceImpl getConc(Map params) { + public List getConc(Map params) { return baseMapper.getConc(params); } diff --git a/jeecg-module-abnormal-alarm/src/main/java/org/jeecg/modules/service/impl/GardsXeResultsManServiceImpl.java b/jeecg-module-abnormal-alarm/src/main/java/org/jeecg/modules/service/impl/GardsXeResultsManServiceImpl.java index 2c99b591..4ab3ca7d 100644 --- a/jeecg-module-abnormal-alarm/src/main/java/org/jeecg/modules/service/impl/GardsXeResultsManServiceImpl.java +++ b/jeecg-module-abnormal-alarm/src/main/java/org/jeecg/modules/service/impl/GardsXeResultsManServiceImpl.java @@ -5,6 +5,7 @@ import com.baomidou.mybatisplus.core.conditions.query.LambdaQueryWrapper; import com.baomidou.mybatisplus.extension.service.impl.ServiceImpl; import org.jeecg.common.constant.DateConstant; import org.jeecg.modules.base.dto.ConcDto; +import org.jeecg.modules.base.dto.ConcDtoXe; import org.jeecg.modules.base.entity.rnman.GardsXeResults; import org.jeecg.modules.mapper.GardsXeResultsManMapper; import org.jeecg.modules.service.IGardsXeResultsManService; @@ -23,7 +24,7 @@ public class GardsXeResultsManServiceImpl extends ServiceImpl getConc(Map params) { + public List getConc(Map params) { return baseMapper.getConc(params); } diff --git a/jeecg-module-spectrum-analysis/src/main/java/org/jeecg/modules/controller/SpectrumAnalysesController.java b/jeecg-module-spectrum-analysis/src/main/java/org/jeecg/modules/controller/SpectrumAnalysesController.java index f8b8ae86..29f3759c 100644 --- a/jeecg-module-spectrum-analysis/src/main/java/org/jeecg/modules/controller/SpectrumAnalysesController.java +++ b/jeecg-module-spectrum-analysis/src/main/java/org/jeecg/modules/controller/SpectrumAnalysesController.java @@ -230,4 +230,10 @@ public class SpectrumAnalysesController { @RequestBody BgDataAnlyseResultIn anlyseResultIn){ spectrumAnalysisService.saveToTxt(anlyseResultIn, response); } + + @GetMapping("viewAutomaticAnalysisLog") + @ApiOperation(value = "查看Automatic Analysis Log页面数据", notes = "查看Automatic Analysis Log页面数据") + public void viewAutomaticAnalysisLog(Integer sampleId, HttpServletResponse response) { + spectrumAnalysisService.viewAutomaticAnalysisLog(sampleId, response); + } } diff --git a/jeecg-module-spectrum-analysis/src/main/java/org/jeecg/modules/service/ISpectrumAnalysisService.java b/jeecg-module-spectrum-analysis/src/main/java/org/jeecg/modules/service/ISpectrumAnalysisService.java index 43e9832a..1e08145d 100644 --- a/jeecg-module-spectrum-analysis/src/main/java/org/jeecg/modules/service/ISpectrumAnalysisService.java +++ b/jeecg-module-spectrum-analysis/src/main/java/org/jeecg/modules/service/ISpectrumAnalysisService.java @@ -81,4 +81,6 @@ public interface ISpectrumAnalysisService { void saveToExcel(BgDataAnlyseResultIn anlyseResultIn, HttpServletResponse response); void saveToTxt(BgDataAnlyseResultIn anlyseResultIn, HttpServletResponse response); + + void viewAutomaticAnalysisLog(Integer sampleId, HttpServletResponse response); } diff --git a/jeecg-module-spectrum-analysis/src/main/java/org/jeecg/modules/service/impl/GammaServiceImpl.java b/jeecg-module-spectrum-analysis/src/main/java/org/jeecg/modules/service/impl/GammaServiceImpl.java index 23ef5c22..963a99a9 100644 --- a/jeecg-module-spectrum-analysis/src/main/java/org/jeecg/modules/service/impl/GammaServiceImpl.java +++ b/jeecg-module-spectrum-analysis/src/main/java/org/jeecg/modules/service/impl/GammaServiceImpl.java @@ -15,6 +15,7 @@ import com.baomidou.mybatisplus.core.toolkit.StringPool; import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.databind.ObjectMapper; import org.apache.commons.io.FileUtils; +import org.apache.commons.io.IOUtils; import org.apache.commons.lang3.StringUtils; import com.google.common.cache.Cache; import org.apache.commons.net.ftp.FTPClient; @@ -141,26 +142,26 @@ public class GammaServiceImpl extends AbstractLogOrReport implements IGammaServi String userName = JwtUtil.getUserNameByToken(request); Cache phdCache = localCache.getPHDCache(); PHDFile phd = new PHDFile(); - //读取文件内容 + // 读取文件内容 String lastName = ""; if (Objects.nonNull(sampleId) && StringUtils.isNotBlank(dbName)) { - //根据sampleId获取sample文件路径 + // 根据sampleId获取sample文件路径 String sampleFilePath = spectrumAnalysisMapper.getSampleFilePath(sampleId); - if (StringUtils.isBlank(sampleFilePath)){ + if (StringUtils.isBlank(sampleFilePath)) { result.error500("Sample file does not exist!"); return result; } String pathName = StringPool.SLASH + spectrumPathProperties.getSaveFilePath() + StringPool.SLASH + sampleFilePath.substring(0, sampleFilePath.lastIndexOf(StringPool.SLASH)); - String fileName = sampleFilePath.substring(sampleFilePath.lastIndexOf(StringPool.SLASH)+1); + String fileName = sampleFilePath.substring(sampleFilePath.lastIndexOf(StringPool.SLASH) + 1); boolean flag = gammaFileUtil.loadFile(pathName, fileName, phd, result); - if (!flag){ + if (!flag) { return result; } - //声明基础数组信息 + // 声明基础数组信息 gammaFileUtil.SetBaseInfo(phd); - //从数据库中读取相关信息 + // 从数据库中读取相关信息 boolean bRet = getResultFromDB(dbName, userName, sampleId, phd, result); - if (!bRet){ + if (!bRet) { return result; } lastName = fileName; @@ -168,7 +169,7 @@ public class GammaServiceImpl extends AbstractLogOrReport implements IGammaServi String pathName = StringPool.SLASH + spectrumPathProperties.getUploadPath() + StringPool.SLASH + userName; String fileName = samfileName; boolean flag = gammaFileUtil.loadFile(pathName, fileName, phd, result); - if (!flag){ + if (!flag) { return result; } lastName = fileName; @@ -185,23 +186,23 @@ public class GammaServiceImpl extends AbstractLogOrReport implements IGammaServi Result result = new Result(); String userName = JwtUtil.getUserNameByToken(request); Cache phdCache = localCache.getPHDCache(); - PHDFile phd = phdCache.getIfPresent(fileName+"-"+userName); + PHDFile phd = phdCache.getIfPresent(fileName + "-" + userName); phd.setUserId("1"); phd.setXmlFilePath(parameterProperties.getFilePath()); String systemType = fileName.substring(2, 3); - if (Objects.isNull(phd)){ + if (Objects.isNull(phd)) { result.error500("Please select the parse file first!"); return result; } - //查询当前用户关联的核素信息 + // 查询当前用户关联的核素信息 List nuclides = new LinkedList<>(); - //从postgreSql中获取当前用户关注的核素信息 如果当前用户没有 则返回管理员的 + // 从postgreSql中获取当前用户关注的核素信息 如果当前用户没有 则返回管理员的 nuclides = defaultNuclideSpectrumService.findNuclidesByUserName(userName, systemType); - if (CollectionUtils.isEmpty(nuclides)){ + if (CollectionUtils.isEmpty(nuclides)) { nuclides = defaultNuclideSpectrumService.findNuclidesByUserName("admin", systemType); } Map nuclideLinesMap = GetNuclideLines(nuclides); - //解析获取临时文件信息 + // 解析获取临时文件信息 File tmpFile = gammaFileUtil.analyzeFile(StringPool.SLASH + spectrumPathProperties.getUploadPath() + StringPool.SLASH + userName, fileName); ObjectMapper mapper = new ObjectMapper(); try { @@ -209,7 +210,7 @@ public class GammaServiceImpl extends AbstractLogOrReport implements IGammaServi String mapLines = mapper.writeValueAsString(nuclideLinesMap); String strValue = CalValuesHandler.analyseSpectrum(phdStr, mapLines, tmpFile.getAbsolutePath(), new AnalysisProcess()); Map parseMap = JSON.parseObject(strValue, Map.class); - for (Map.Entry entry:parseMap.entrySet()) { + for (Map.Entry entry : parseMap.entrySet()) { if (entry.getKey().equalsIgnoreCase("bAnalyed")) { boolean value = JSON.parseObject(JSON.toJSONString(entry.getValue()), Boolean.class); phd.setBAnalyed(value); @@ -217,7 +218,7 @@ public class GammaServiceImpl extends AbstractLogOrReport implements IGammaServi if (entry.getKey().equalsIgnoreCase("mapEnerPara")) { HashMap jsonMap = JSON.parseObject(JSON.toJSONString(entry.getValue()), HashMap.class); Map value = new HashMap<>(); - for (Map.Entry objectEntry:jsonMap.entrySet()) { + for (Map.Entry objectEntry : jsonMap.entrySet()) { String key = objectEntry.getKey(); ParameterInfo entryValue = JSON.parseObject(JSON.toJSONString(objectEntry.getValue()), ParameterInfo.class); value.put(key, entryValue); @@ -227,7 +228,7 @@ public class GammaServiceImpl extends AbstractLogOrReport implements IGammaServi if (entry.getKey().equalsIgnoreCase("mapResoPara")) { HashMap jsonMap = JSON.parseObject(JSON.toJSONString(entry.getValue()), HashMap.class); Map value = new HashMap<>(); - for (Map.Entry objectEntry:jsonMap.entrySet()) { + for (Map.Entry objectEntry : jsonMap.entrySet()) { String key = objectEntry.getKey(); ParameterInfo entryValue = JSON.parseObject(JSON.toJSONString(objectEntry.getValue()), ParameterInfo.class); value.put(key, entryValue); @@ -237,7 +238,7 @@ public class GammaServiceImpl extends AbstractLogOrReport implements IGammaServi if (entry.getKey().equalsIgnoreCase("mapEffiPara")) { HashMap jsonMap = JSON.parseObject(JSON.toJSONString(entry.getValue()), HashMap.class); Map value = new HashMap<>(); - for (Map.Entry objectEntry:jsonMap.entrySet()) { + for (Map.Entry objectEntry : jsonMap.entrySet()) { String key = objectEntry.getKey(); ParameterInfo entryValue = JSON.parseObject(JSON.toJSONString(objectEntry.getValue()), ParameterInfo.class); value.put(key, entryValue); @@ -247,7 +248,7 @@ public class GammaServiceImpl extends AbstractLogOrReport implements IGammaServi if (entry.getKey().equalsIgnoreCase("mapTotEPara")) { HashMap jsonMap = JSON.parseObject(JSON.toJSONString(entry.getValue()), HashMap.class); Map value = new HashMap<>(); - for (Map.Entry objectEntry:jsonMap.entrySet()) { + for (Map.Entry objectEntry : jsonMap.entrySet()) { String key = objectEntry.getKey(); ParameterInfo entryValue = JSON.parseObject(JSON.toJSONString(objectEntry.getValue()), ParameterInfo.class); value.put(key, entryValue); @@ -285,7 +286,7 @@ public class GammaServiceImpl extends AbstractLogOrReport implements IGammaServi if (entry.getKey().equalsIgnoreCase("mapEnerKD")) { HashMap jsonMap = JSON.parseObject(JSON.toJSONString(entry.getValue()), HashMap.class); Map value = new HashMap<>(); - for (Map.Entry objectEntry:jsonMap.entrySet()) { + for (Map.Entry objectEntry : jsonMap.entrySet()) { String key = objectEntry.getKey(); GEnergyBlock entryValue = JSON.parseObject(JSON.toJSONString(objectEntry.getValue()), GEnergyBlock.class); value.put(key, entryValue); @@ -295,7 +296,7 @@ public class GammaServiceImpl extends AbstractLogOrReport implements IGammaServi if (entry.getKey().equalsIgnoreCase("mapResoKD")) { HashMap jsonMap = JSON.parseObject(JSON.toJSONString(entry.getValue()), HashMap.class); Map value = new HashMap<>(); - for (Map.Entry objectEntry:jsonMap.entrySet()) { + for (Map.Entry objectEntry : jsonMap.entrySet()) { String key = objectEntry.getKey(); GResolutionBlock entryValue = JSON.parseObject(JSON.toJSONString(objectEntry.getValue()), GResolutionBlock.class); value.put(key, entryValue); @@ -377,7 +378,7 @@ public class GammaServiceImpl extends AbstractLogOrReport implements IGammaServi if (entry.getKey().equalsIgnoreCase("mapNucActMda")) { HashMap jsonMap = JSON.parseObject(JSON.toJSONString(entry.getValue()), HashMap.class); Map value = new HashMap<>(); - for (Map.Entry objectEntry:jsonMap.entrySet()) { + for (Map.Entry objectEntry : jsonMap.entrySet()) { String key = objectEntry.getKey(); NuclideActMda entryValue = JSON.parseObject(JSON.toJSONString(objectEntry.getValue()), NuclideActMda.class); value.put(key, entryValue); @@ -387,7 +388,7 @@ public class GammaServiceImpl extends AbstractLogOrReport implements IGammaServi } BeanUtils.copyProperties(phd.getSetting(), phd.getUsedSetting()); - for (PeakInfo peak:phd.getVPeak()) { + for (PeakInfo peak : phd.getVPeak()) { if (StringUtils.isBlank(peak.recoilBetaChan)) { peak.recoilBetaChan = "1"; } @@ -411,43 +412,43 @@ public class GammaServiceImpl extends AbstractLogOrReport implements IGammaServi @Override public Result gammaByDB(String dbName, Integer sampleId, HttpServletRequest request) { Result result = new Result(); - //通过token获取用户名 + // 通过token获取用户名 String userName = JwtUtil.getUserNameByToken(request); - //声明一个接收最后返回结果的map + // 声明一个接收最后返回结果的map Map map = new HashMap<>(); - //加载本地缓存信息 + // 加载本地缓存信息 Cache phdCache = localCache.getPHDCache(); - //根据sampleId获取sample文件路径 + // 根据sampleId获取sample文件路径 String sampleFilePath = spectrumAnalysisMapper.getSampleFilePath(sampleId); - if (StringUtils.isBlank(sampleFilePath)){ + if (StringUtils.isBlank(sampleFilePath)) { result.error500("Sample file does not exist!"); return result; } - //切割数据库存储的文件路径获取路径信息 + // 切割数据库存储的文件路径获取路径信息 String pathName = StringPool.SLASH + spectrumPathProperties.getSaveFilePath() + StringPool.SLASH + sampleFilePath.substring(0, sampleFilePath.lastIndexOf(StringPool.SLASH)); - //切割数据库存储的文件路径获取文件名称 - String fileName = sampleFilePath.substring(sampleFilePath.lastIndexOf(StringPool.SLASH)+1); - //声明phd实体类 - PHDFile phd = phdCache.getIfPresent(fileName+"-"+userName); + // 切割数据库存储的文件路径获取文件名称 + String fileName = sampleFilePath.substring(sampleFilePath.lastIndexOf(StringPool.SLASH) + 1); + // 声明phd实体类 + PHDFile phd = phdCache.getIfPresent(fileName + "-" + userName); if (Objects.isNull(phd)) { phd = new PHDFile(); - //读取文件内容 - //调用加载文件的方法 传入文件路径,文件名称,全局变量phd,响应结果result + // 读取文件内容 + // 调用加载文件的方法 传入文件路径,文件名称,全局变量phd,响应结果result boolean flag = gammaFileUtil.loadFile(pathName, fileName, phd, result); - //如果文件加载失败 返回失败原因 - if (!flag){ + // 如果文件加载失败 返回失败原因 + if (!flag) { return result; } - //加载phd数据所需的lc,scac,baseline数据 + // 加载phd数据所需的lc,scac,baseline数据 gammaFileUtil.SetBaseInfo(phd); - //从数据库中读取phd其他相关信息 + // 从数据库中读取phd其他相关信息 boolean bRet = getResultFromDB(dbName, userName, sampleId, phd, result); - //判断数据库信息是否读取正常 - if (!bRet){ + // 判断数据库信息是否读取正常 + if (!bRet) { return result; } } - //获取当前角色配置的颜色信息 + // 获取当前角色配置的颜色信息 Map colorMap = sysUserColorService.initColor(userName); // 更新 ‘QC Flags’ 状态 List qcstate = gammaFileUtil.Qcstate(phd); @@ -460,14 +461,14 @@ public class GammaServiceImpl extends AbstractLogOrReport implements IGammaServi map.put("real_time", String.format("%.2f", phd.getAcq().getAcquisition_real_time())); map.put("live_time", String.format("%.2f", phd.getAcq().getAcquisition_live_time())); double deadTime = (phd.getAcq().getAcquisition_real_time() - phd.getAcq().getAcquisition_live_time()) / phd.getAcq().getAcquisition_real_time(); - map.put("dead_time", String.format("%.2f", deadTime*100)); + map.put("dead_time", String.format("%.2f", deadTime * 100)); map.put("checkBox_updateCal", phd.getSetting().isBUpdateCal()); map.put("bAnalyed", phd.isBAnalyed()); map.put("peak", phd.getVPeak()); // 更新页面折线图信息 gammaFileUtil.UpdateChart(phd, map, colorMap); - //将当前加载的phd信息加入到缓存中 文件名称作为缓存信息的key - phdCache.put(fileName+"-"+userName, phd); + // 将当前加载的phd信息加入到缓存中 文件名称作为缓存信息的key + phdCache.put(fileName + "-" + userName, phd); localCache.setPHDCache(phdCache); result.setSuccess(true); result.setResult(map); @@ -475,9 +476,9 @@ public class GammaServiceImpl extends AbstractLogOrReport implements IGammaServi } public boolean getResultFromDB(String dbName, String userName, Integer sampleId, PHDFile phd, Result result) { - //判断连接的数据库 + // 判断连接的数据库 String T_analy, T_calib, T_peaks, T_param, T_nuc_line, T_nuc_act, T_qc, T_setting; - if (dbName.equals("auto")){ + if (dbName.equals("auto")) { T_analy = "RNAUTO.GARDS_ANALYSES"; T_calib = "RNAUTO.GARDS_CALIBRATION_PAIRS"; T_peaks = "RNAUTO.GARDS_PEAKS"; @@ -486,7 +487,7 @@ public class GammaServiceImpl extends AbstractLogOrReport implements IGammaServi T_nuc_act = "RNAUTO.GARDS_NUCL_IDED"; T_qc = "RNAUTO.GARDS_QC_CHECK"; userName = "RNAUTO"; - }else { + } else { T_analy = "RNMAN.GARDS_ANALYSES"; T_calib = "RNMAN.GARDS_CALIBRATION_PAIRS"; T_peaks = "RNMAN.GARDS_PEAKS"; @@ -498,19 +499,19 @@ public class GammaServiceImpl extends AbstractLogOrReport implements IGammaServi } String status = spectrumAnalysisMapper.getStatus(sampleId); - if (StringUtils.isNotBlank(status)){ + if (StringUtils.isNotBlank(status)) { phd.setStatus(status); - if (!phd.getStatus().equals("P") && !phd.getStatus().equals("R")){ + if (!phd.getStatus().equals("P") && !phd.getStatus().equals("R")) { return false; } - }else { + } else { return false; } // 获取 ananlysis_id、BaseLine、Lc、Scac、峰数量、Category、谱注释等信息 GardsAnalysesSpectrum analysis = spectrumAnalysisMapper.getAnalysis(T_analy, sampleId, userName); int peakNum = 0; - if (Objects.nonNull(analysis)){ + if (Objects.nonNull(analysis)) { phd.setId_analysis(analysis.getIdAnalysis().toString()); phd.setBaseline_path(StringPool.SLASH + spectrumPathProperties.getSaveFilePath() + StringPool.SLASH + analysis.getBaselinePath()); phd.setLc_path(StringPool.SLASH + spectrumPathProperties.getSaveFilePath() + StringPool.SLASH + analysis.getLcPath()); @@ -521,138 +522,138 @@ public class GammaServiceImpl extends AbstractLogOrReport implements IGammaServi phd.getBaseCtrls().setRg_high(analysis.getSearchEndChannel()); phd.getUsedSetting().setEnergyTolerance(analysis.getSearchThreshold()); } else { - result.error500("There are 0 records when sample_id is "+sampleId+"!"); + result.error500("There are 0 records when sample_id is " + sampleId + "!"); return false; } - if (peakNum>0){ + if (peakNum > 0) { // 获取峰信息列表 List peaks = spectrumAnalysisMapper.getPeaks(T_peaks, analysis.getIdAnalysis()); - if (CollectionUtils.isNotEmpty(peaks)){ - for (GardsPeaksSpectrum peaksSpectrum:peaks) { + if (CollectionUtils.isNotEmpty(peaks)) { + for (GardsPeaksSpectrum peaksSpectrum : peaks) { PeakInfo peakInfo = new PeakInfo(); - peakInfo.index = peaksSpectrum.getIdPeak()==null?0:peaksSpectrum.getIdPeak(); - peakInfo.multiIndex = peaksSpectrum.getMulitiIndex()==null?0:peaksSpectrum.getMulitiIndex().intValue(); - peakInfo.left = peaksSpectrum.getRoiStart()==null?0:peaksSpectrum.getRoiStart().intValue(); - peakInfo.right = peaksSpectrum.getRoiEnd()==null?0:peaksSpectrum.getRoiEnd().intValue(); - peakInfo.peakCentroid = peaksSpectrum.getCentroidChannel()==null?0:peaksSpectrum.getCentroidChannel(); - peakInfo.energy = peaksSpectrum.getEnergy()==null?0:peaksSpectrum.getEnergy(); - peakInfo.fwhmc = peaksSpectrum.getFwtm()==null?0:peaksSpectrum.getFwtm(); - peakInfo.fwhm = peaksSpectrum.getFwhm()==null?0:peaksSpectrum.getFwhm(); - peakInfo.area = peaksSpectrum.getArea()==null?0:peaksSpectrum.getArea(); - peakInfo.areaErr = peaksSpectrum.getUncArea()==null?0:peaksSpectrum.getUncArea(); - peakInfo.efficiency = peaksSpectrum.getEfficiency()==null?0:peaksSpectrum.getEfficiency(); - peakInfo.lc = peaksSpectrum.getLc()==null?0:peaksSpectrum.getLc(); - peakInfo.ld = peaksSpectrum.getLd()==null?0:peaksSpectrum.getLd(); - peakInfo.meanBackCount = peaksSpectrum.getMeanbackcount()==null?0:peaksSpectrum.getMeanbackcount(); - peakInfo.backgroundArea = peaksSpectrum.getBackgroundarea()==null?0:peaksSpectrum.getBackgroundarea(); - peakInfo.significance = peaksSpectrum.getSignificance()==null?0:peaksSpectrum.getSignificance(); - peakInfo.sensitivity = peaksSpectrum.getSensitivity()==null?0:peaksSpectrum.getSensitivity(); - peakInfo.stepRatio = peaksSpectrum.getStepraio()==null?0:peaksSpectrum.getStepraio(); - peakInfo.tail = peaksSpectrum.getTail()==null?0:peaksSpectrum.getTail(); - peakInfo.tailAlpha = peaksSpectrum.getTailAlpha()==null?0:peaksSpectrum.getTailAlpha(); - peakInfo.upperTail = peaksSpectrum.getUpperTail()==null?0:peaksSpectrum.getUpperTail(); - peakInfo.upperTailAlpha = peaksSpectrum.getUpperTailAlpha()==null?0:peaksSpectrum.getUpperTailAlpha(); - peakInfo.BWWidthChan = peaksSpectrum.getBwwidthchan()==null?0:peaksSpectrum.getBwwidthchan(); + peakInfo.index = peaksSpectrum.getIdPeak() == null ? 0 : peaksSpectrum.getIdPeak(); + peakInfo.multiIndex = peaksSpectrum.getMulitiIndex() == null ? 0 : peaksSpectrum.getMulitiIndex().intValue(); + peakInfo.left = peaksSpectrum.getRoiStart() == null ? 0 : peaksSpectrum.getRoiStart().intValue(); + peakInfo.right = peaksSpectrum.getRoiEnd() == null ? 0 : peaksSpectrum.getRoiEnd().intValue(); + peakInfo.peakCentroid = peaksSpectrum.getCentroidChannel() == null ? 0 : peaksSpectrum.getCentroidChannel(); + peakInfo.energy = peaksSpectrum.getEnergy() == null ? 0 : peaksSpectrum.getEnergy(); + peakInfo.fwhmc = peaksSpectrum.getFwtm() == null ? 0 : peaksSpectrum.getFwtm(); + peakInfo.fwhm = peaksSpectrum.getFwhm() == null ? 0 : peaksSpectrum.getFwhm(); + peakInfo.area = peaksSpectrum.getArea() == null ? 0 : peaksSpectrum.getArea(); + peakInfo.areaErr = peaksSpectrum.getUncArea() == null ? 0 : peaksSpectrum.getUncArea(); + peakInfo.efficiency = peaksSpectrum.getEfficiency() == null ? 0 : peaksSpectrum.getEfficiency(); + peakInfo.lc = peaksSpectrum.getLc() == null ? 0 : peaksSpectrum.getLc(); + peakInfo.ld = peaksSpectrum.getLd() == null ? 0 : peaksSpectrum.getLd(); + peakInfo.meanBackCount = peaksSpectrum.getMeanbackcount() == null ? 0 : peaksSpectrum.getMeanbackcount(); + peakInfo.backgroundArea = peaksSpectrum.getBackgroundarea() == null ? 0 : peaksSpectrum.getBackgroundarea(); + peakInfo.significance = peaksSpectrum.getSignificance() == null ? 0 : peaksSpectrum.getSignificance(); + peakInfo.sensitivity = peaksSpectrum.getSensitivity() == null ? 0 : peaksSpectrum.getSensitivity(); + peakInfo.stepRatio = peaksSpectrum.getStepraio() == null ? 0 : peaksSpectrum.getStepraio(); + peakInfo.tail = peaksSpectrum.getTail() == null ? 0 : peaksSpectrum.getTail(); + peakInfo.tailAlpha = peaksSpectrum.getTailAlpha() == null ? 0 : peaksSpectrum.getTailAlpha(); + peakInfo.upperTail = peaksSpectrum.getUpperTail() == null ? 0 : peaksSpectrum.getUpperTail(); + peakInfo.upperTailAlpha = peaksSpectrum.getUpperTailAlpha() == null ? 0 : peaksSpectrum.getUpperTailAlpha(); + peakInfo.BWWidthChan = peaksSpectrum.getBwwidthchan() == null ? 0 : peaksSpectrum.getBwwidthchan(); peakInfo.recoilBetaChan = "1"; - peakInfo.recoilDeltaChan = peaksSpectrum.getRecoildeltachan()==null?"1":peaksSpectrum.getRecoildeltachan().toString(); - peakInfo.comments = StringUtils.isNotBlank(peaksSpectrum.getPeakcomments())?peaksSpectrum.getPeakcomments():""; + peakInfo.recoilDeltaChan = peaksSpectrum.getRecoildeltachan() == null ? "1" : peaksSpectrum.getRecoildeltachan().toString(); + peakInfo.comments = StringUtils.isNotBlank(peaksSpectrum.getPeakcomments()) ? peaksSpectrum.getPeakcomments() : ""; phd.getVPeak().add(peakInfo); } } } // 获取刻度点数据 List calibrationPairs = spectrumAnalysisMapper.getCalibrationPairs(T_calib, analysis.getIdAnalysis()); - if (CollectionUtils.isNotEmpty(calibrationPairs)){ + if (CollectionUtils.isNotEmpty(calibrationPairs)) { GEnergyBlock gEnergyBlock = new GEnergyBlock(); GResolutionBlock gResolutionBlock = new GResolutionBlock(); GEfficiencyBlock gEfficiencyBlock = new GEfficiencyBlock(); TotaleffBlock totaleffBlock = new TotaleffBlock(); - for (GardsCalibrationPairsSpectrum pairsSpectrum:calibrationPairs) { + for (GardsCalibrationPairsSpectrum pairsSpectrum : calibrationPairs) { String calType = pairsSpectrum.getCaltype().trim(); - if(calType.equals(CalType.ENERGY_CAL.getType())) { + if (calType.equals(CalType.ENERGY_CAL.getType())) { phd.setUsedEner(pairsSpectrum.getInput()); gEnergyBlock.getCentroid_channel().add(pairsSpectrum.getXValue()); gEnergyBlock.getG_energy().add(pairsSpectrum.getYValue()); - gEnergyBlock.getUncertainty().add(StringUtils.isBlank(pairsSpectrum.getUncYValue())? 0:Double.valueOf(pairsSpectrum.getUncYValue())); - } else if(calType.equals(CalType.RESOLUTION_CAL.getType())) { + gEnergyBlock.getUncertainty().add(StringUtils.isBlank(pairsSpectrum.getUncYValue()) ? 0 : Double.valueOf(pairsSpectrum.getUncYValue())); + } else if (calType.equals(CalType.RESOLUTION_CAL.getType())) { phd.setUsedReso(pairsSpectrum.getInput()); gResolutionBlock.getG_energy().add(pairsSpectrum.getXValue()); gResolutionBlock.getFWHM().add(pairsSpectrum.getYValue()); - gResolutionBlock.getUncertainty().add(StringUtils.isBlank(pairsSpectrum.getUncYValue())? 0: Double.valueOf(pairsSpectrum.getUncYValue())); - } else if(calType.equals(CalType.EFFICIENCY_CAL.getType())) { + gResolutionBlock.getUncertainty().add(StringUtils.isBlank(pairsSpectrum.getUncYValue()) ? 0 : Double.valueOf(pairsSpectrum.getUncYValue())); + } else if (calType.equals(CalType.EFFICIENCY_CAL.getType())) { phd.setUsedEffi(pairsSpectrum.getInput()); gEfficiencyBlock.getG_energy().add(pairsSpectrum.getXValue()); gEfficiencyBlock.getEfficiency().add(pairsSpectrum.getYValue()); - gEfficiencyBlock.getUncertainty().add(StringUtils.isBlank(pairsSpectrum.getUncYValue())? 0: Double.valueOf(pairsSpectrum.getUncYValue())); - } else if(calType.equals(CalType.TOTALEFFICIENCY_CAL.getType())) { + gEfficiencyBlock.getUncertainty().add(StringUtils.isBlank(pairsSpectrum.getUncYValue()) ? 0 : Double.valueOf(pairsSpectrum.getUncYValue())); + } else if (calType.equals(CalType.TOTALEFFICIENCY_CAL.getType())) { phd.setUsedTotE(pairsSpectrum.getInput()); totaleffBlock.getG_energy().add(pairsSpectrum.getXValue()); totaleffBlock.getTotal_efficiency().add(pairsSpectrum.getYValue()); - totaleffBlock.getUncertainty().add(StringUtils.isBlank(pairsSpectrum.getUncYValue())? 0: Double.valueOf(pairsSpectrum.getUncYValue())); + totaleffBlock.getUncertainty().add(StringUtils.isBlank(pairsSpectrum.getUncYValue()) ? 0 : Double.valueOf(pairsSpectrum.getUncYValue())); } } - if (phd.getUsedEner().isEmpty()){ + if (phd.getUsedEner().isEmpty()) { phd.setUsedEner(CalName.CalPHD.getType()); } - if (phd.getUsedReso().isEmpty()){ + if (phd.getUsedReso().isEmpty()) { phd.setUsedReso(CalName.CalPHD.getType()); } - if (phd.getUsedEffi().isEmpty()){ + if (phd.getUsedEffi().isEmpty()) { phd.setUsedEffi(CalName.CalPHD.getType()); } - if (phd.getUsedTotE().isEmpty()){ + if (phd.getUsedTotE().isEmpty()) { phd.setUsedTotE(CalName.CalPHD.getType()); } gEnergyBlock.setRecord_count(gEnergyBlock.getG_energy().size()); gResolutionBlock.setRecord_count(gResolutionBlock.getG_energy().size()); gEfficiencyBlock.setRecord_count(gEfficiencyBlock.getG_energy().size()); totaleffBlock.setRecord_count(totaleffBlock.getG_energy().size()); - if(gEnergyBlock.getRecord_count() > 0) { + if (gEnergyBlock.getRecord_count() > 0) { phd.setUsedEnerKD(gEnergyBlock); phd.getMapEnerKD().put(phd.getUsedEner(), gEnergyBlock); } - if(gResolutionBlock.getRecord_count() > 0) { + if (gResolutionBlock.getRecord_count() > 0) { phd.setUsedResoKD(gResolutionBlock); phd.getMapResoKD().put(phd.getUsedReso(), gResolutionBlock); } - if(gEfficiencyBlock.getRecord_count() > 0) { + if (gEfficiencyBlock.getRecord_count() > 0) { phd.setUsedEffiKD(gEfficiencyBlock); phd.getMapEffiKD().put(phd.getUsedEffi(), gEfficiencyBlock); } - if(totaleffBlock.getRecord_count() > 0) { + if (totaleffBlock.getRecord_count() > 0) { phd.setUsedTotEKD(totaleffBlock); phd.getMapTotEKD().put(phd.getUsedTotE(), totaleffBlock); } } // 获取刻度拟合系数 List paras = spectrumAnalysisMapper.getPara(T_param, analysis.getIdAnalysis()); - if (CollectionUtils.isNotEmpty(paras)){ - for (GardsCalibrationSpectrum calibrationSpectrum:paras) { + if (CollectionUtils.isNotEmpty(paras)) { + for (GardsCalibrationSpectrum calibrationSpectrum : paras) { ParameterInfo para = new ParameterInfo(); String calType = calibrationSpectrum.getCalType().trim(); para.getP().add(calibrationSpectrum.getFunction().doubleValue()); String[] list_coeff = calibrationSpectrum.getCoeffString().split(StringPool.COMMA); - for (String str:list_coeff) { + for (String str : list_coeff) { para.getP().add(Double.valueOf(str)); para.getPerr().add(0.0); } - if(calType.equals(CalType.ENERGY_CAL.getType())) { + if (calType.equals(CalType.ENERGY_CAL.getType())) { phd.setUsedEnerPara(para); phd.getMapEnerPara().put(phd.getUsedEner(), para); phd.setNewEner(phd.getUsedEner()); - } else if(calType.equals(CalType.RESOLUTION_CAL.getType())) { + } else if (calType.equals(CalType.RESOLUTION_CAL.getType())) { phd.setUsedResoPara(para); phd.getMapResoPara().put(phd.getUsedReso(), para); phd.setNewReso(phd.getUsedReso()); - } else if(calType.equals(CalType.EFFICIENCY_CAL.getType())) { + } else if (calType.equals(CalType.EFFICIENCY_CAL.getType())) { phd.setUsedEffiPara(para); phd.getMapEffiPara().put(phd.getUsedEffi(), para); phd.setNewEffi(phd.getUsedEffi()); - } else if(calType.equals(CalType.TOTALEFFICIENCY_CAL.getType())) { + } else if (calType.equals(CalType.TOTALEFFICIENCY_CAL.getType())) { phd.setUsedTotEPara(para); phd.getMapTotEPara().put(phd.getUsedTotE(), para); phd.setNewTotE(phd.getUsedTotE()); @@ -661,23 +662,23 @@ public class GammaServiceImpl extends AbstractLogOrReport implements IGammaServi } // 获取被识别核素射线信息 List nuclLinesIdeds = spectrumAnalysisMapper.getNuclLinesIded(T_nuc_line, analysis.getIdAnalysis()); - if (CollectionUtils.isNotEmpty(nuclLinesIdeds)){ - for (GardsNuclLinesIdedSpectrum nuclLinesIdedSpectrum:nuclLinesIdeds) { + if (CollectionUtils.isNotEmpty(nuclLinesIdeds)) { + for (GardsNuclLinesIdedSpectrum nuclLinesIdedSpectrum : nuclLinesIdeds) { String str_key = nuclLinesIdedSpectrum.getNuclideName(); phd.getMapNucActMda().put(str_key, new NuclideActMda()); - if(Objects.nonNull(phd.getMapNucActMda().get(str_key))) { + if (Objects.nonNull(phd.getMapNucActMda().get(str_key))) { phd.getMapNucActMda().get(str_key).setActivity(Double.valueOf(nuclLinesIdedSpectrum.getActivity())); phd.getMapNucActMda().get(str_key).setAct_err(nuclLinesIdedSpectrum.getUncActivity()); phd.getMapNucActMda().get(str_key).setEfficiency(nuclLinesIdedSpectrum.getEffic()); phd.getMapNucActMda().get(str_key).setEffi_err(nuclLinesIdedSpectrum.getUnEffic()); phd.getMapNucActMda().get(str_key).setMda(nuclLinesIdedSpectrum.getMda()); - phd.getMapNucActMda().get(str_key).setMdc(nuclLinesIdedSpectrum.getMdc().equalsIgnoreCase("inf")?0.0:Double.valueOf(nuclLinesIdedSpectrum.getMdc())); - phd.getMapNucActMda().get(str_key).setConcentration(nuclLinesIdedSpectrum.getConcentration().equalsIgnoreCase("inf")?0.0:Double.valueOf(nuclLinesIdedSpectrum.getConcentration())); - if(phd.getMapNucActMda().get(str_key).getActivity() > 0){ + phd.getMapNucActMda().get(str_key).setMdc(nuclLinesIdedSpectrum.getMdc().equalsIgnoreCase("inf") ? 0.0 : Double.valueOf(nuclLinesIdedSpectrum.getMdc())); + phd.getMapNucActMda().get(str_key).setConcentration(nuclLinesIdedSpectrum.getConcentration().equalsIgnoreCase("inf") ? 0.0 : Double.valueOf(nuclLinesIdedSpectrum.getConcentration())); + if (phd.getMapNucActMda().get(str_key).getActivity() > 0) { phd.getMapNucActMda().get(str_key).setBCalculateMDA(true); } } - if(nuclLinesIdedSpectrum.getKeyFlag().toString().equals("1")){ + if (nuclLinesIdedSpectrum.getKeyFlag().toString().equals("1")) { phd.getMapNucActMda().get(str_key).setKey_flag(phd.getMapNucActMda().get(str_key).getVYield().size()); } phd.getMapNucActMda().get(str_key).getFullNames().add(nuclLinesIdedSpectrum.getNuclidefullname()); @@ -687,9 +688,9 @@ public class GammaServiceImpl extends AbstractLogOrReport implements IGammaServi phd.getMapNucActMda().get(str_key).getVUncertY().add(nuclLinesIdedSpectrum.getUncAbundance()); int idx = nuclLinesIdedSpectrum.getIdPeak() - 1; - if(idx >= 0 && idx < peakNum) { - phd.getMapNucActMda().get(str_key).getVPeakIdx().add(idx+1); - if(!phd.getVPeak().get(idx).nuclides.contains(str_key)){ + if (idx >= 0 && idx < peakNum) { + phd.getMapNucActMda().get(str_key).getVPeakIdx().add(idx + 1); + if (!phd.getVPeak().get(idx).nuclides.contains(str_key)) { phd.getVPeak().get(idx).nuclides.add(str_key); } } @@ -697,15 +698,15 @@ public class GammaServiceImpl extends AbstractLogOrReport implements IGammaServi } // 获取被识别核素的活度浓度 List nuclIdeds = spectrumAnalysisMapper.getNuclIded(T_nuc_act, analysis.getIdAnalysis()); - if (CollectionUtils.isNotEmpty(nuclIdeds)){ - for (GardsNuclIdedSpectrum nuclIdedSpectrum:nuclIdeds) { + if (CollectionUtils.isNotEmpty(nuclIdeds)) { + for (GardsNuclIdedSpectrum nuclIdedSpectrum : nuclIdeds) { String str_key = nuclIdedSpectrum.getNuclideName(); - if(Objects.nonNull(phd.getMapNucActMda().get(str_key))) { + if (Objects.nonNull(phd.getMapNucActMda().get(str_key))) { phd.getMapNucActMda().get(str_key).setHalflife(Double.valueOf(nuclIdedSpectrum.getHalflife())); List vEner = phd.getMapNucActMda().get(str_key).getVEnergy(); Double key_ener = Double.valueOf(nuclIdedSpectrum.getKeyEnergy()); - for(int i=0; i qcChecks = spectrumAnalysisMapper.getQcCheck(T_qc, analysis.getIdAnalysis()); - if (CollectionUtils.isNotEmpty(qcChecks)){ - for (GardsQcCheckSpectrum qcCheckSpectrum:qcChecks) { + if (CollectionUtils.isNotEmpty(qcChecks)) { + for (GardsQcCheckSpectrum qcCheckSpectrum : qcChecks) { String str_key = qcCheckSpectrum.getQcName(); QcCheckItem qcCheckItem = new QcCheckItem(); qcCheckItem.setValue(qcCheckSpectrum.getQcValue()); @@ -726,9 +727,9 @@ public class GammaServiceImpl extends AbstractLogOrReport implements IGammaServi } } // 从 RNMAN.GARDS_ANALY_SETTING 表读分析设置 - if (dbName.equals("auto")){ + if (dbName.equals("auto")) { GardsAnalySetting analySetting = spectrumAnalysisMapper.getAnalySetting(analysis.getIdAnalysis()); - if (Objects.nonNull(analySetting)){ + if (Objects.nonNull(analySetting)) { phd.getUsedSetting().setECutAnalysis_Low(analySetting.getEcutanalysisLow()); double t_d = analySetting.getEcutanalysisHigh(); phd.getUsedSetting().setECutAnalysis_High((t_d <= phd.getUsedSetting().getECutAnalysis_Low() ? 0 : t_d)); @@ -744,20 +745,20 @@ public class GammaServiceImpl extends AbstractLogOrReport implements IGammaServi phd.getUsedSetting().setBUpdateCal(analySetting.getBupdatecal() == 1); phd.getUsedSetting().setKeepCalPeakSearchPeaks(analySetting.getKeepcalpeakserchpeaks() == 1); Date reftimeAct = analySetting.getReftimeAct(); - if(Objects.nonNull(reftimeAct)){ + if (Objects.nonNull(reftimeAct)) { phd.getUsedSetting().setRefTime_act(reftimeAct); } Date reftimeConc = analySetting.getReftimeConc(); - if(Objects.nonNull(reftimeConc)){ + if (Objects.nonNull(reftimeConc)) { phd.getUsedSetting().setRefTime_conc(reftimeConc); } } } - if(!phd.getUsedEnerPara().getP().isEmpty()) { + if (!phd.getUsedEnerPara().getP().isEmpty()) { List vChan = new LinkedList<>(); double c = 1; - while(c <= phd.getSpec().getNum_g_channel()) { + while (c <= phd.getSpec().getNum_g_channel()) { vChan.add(c); c += 1; } @@ -774,14 +775,14 @@ public class GammaServiceImpl extends AbstractLogOrReport implements IGammaServi String userName = JwtUtil.getUserNameByToken(request); Map map = new HashMap<>(); Cache phdCache = localCache.getPHDCache(); - //上传文件路径 + // 上传文件路径 String path = StringPool.SLASH + spectrumPathProperties.getUploadPath() + StringPool.SLASH + userName; - //获取当前角色的颜色配置 + // 获取当前角色的颜色配置 Map colorMap = sysUserColorService.initColor(userName); - PHDFile phd = phdCache.getIfPresent(fileName+"-"+userName); + PHDFile phd = phdCache.getIfPresent(fileName + "-" + userName); if (Objects.isNull(phd)) { phd = new PHDFile(); - //加载文件内容 + // 加载文件内容 boolean bRet = gammaFileUtil.loadFile(path, fileName, phd, result); if (!bRet) { return result; @@ -798,12 +799,12 @@ public class GammaServiceImpl extends AbstractLogOrReport implements IGammaServi map.put("real_time", String.format("%.2f", phd.getAcq().getAcquisition_real_time())); map.put("live_time", String.format("%.2f", phd.getAcq().getAcquisition_live_time())); double deadTime = (phd.getAcq().getAcquisition_real_time() - phd.getAcq().getAcquisition_live_time()) / phd.getAcq().getAcquisition_real_time(); - map.put("dead_time", String.format("%.2f", deadTime*100)); + map.put("dead_time", String.format("%.2f", deadTime * 100)); map.put("checkBox_updateCal", phd.getSetting().isBUpdateCal()); map.put("bAnalyed", phd.isBAnalyed()); map.put("peak", phd.getVPeak()); gammaFileUtil.UpdateChart(phd, map, colorMap); - phdCache.put(fileName+"-"+userName, phd); + phdCache.put(fileName + "-" + userName, phd); localCache.setPHDCache(phdCache); result.setSuccess(true); result.setResult(map); @@ -812,7 +813,7 @@ public class GammaServiceImpl extends AbstractLogOrReport implements IGammaServi @Override public void delPHDCache(String fileName) { - LoginUser loginUser = (LoginUser)SecurityUtils.getSubject().getPrincipal(); + LoginUser loginUser = (LoginUser) SecurityUtils.getSubject().getPrincipal(); String username = loginUser.getUsername(); String key = fileName + StrUtil.DASHED + username; // 删除指定key的Cache @@ -823,18 +824,18 @@ public class GammaServiceImpl extends AbstractLogOrReport implements IGammaServi public Result Compare(String fileName, String compareFileName, HttpServletRequest request) { Result result = new Result(); String userName = JwtUtil.getUserNameByToken(request); - //获取当前角色配置的颜色信息 + // 获取当前角色配置的颜色信息 Map colorMap = sysUserColorService.initColor(userName); - //获取缓存的phd数据 + // 获取缓存的phd数据 Cache phdCache = localCache.getPHDCache(); - PHDFile phd = phdCache.getIfPresent(fileName+"-"+userName); + PHDFile phd = phdCache.getIfPresent(fileName + "-" + userName); if (Objects.isNull(phd)) { result.error500("Please select the parse file first!"); return result; } long m_nCount = phd.getSpec().getNum_g_channel(); List vEnergy = phd.getVEnergy(); - //获取Compare数据 + // 获取Compare数据 List m_vecCompare = gammaFileUtil.loadCompareData(compareFileName, userName, m_nCount, result); if (CollectionUtils.isNotEmpty(m_vecCompare)) { List chartDataList = gammaFileUtil.CompareData(m_vecCompare, m_nCount, colorMap, vEnergy); @@ -848,11 +849,11 @@ public class GammaServiceImpl extends AbstractLogOrReport implements IGammaServi public Result Strip(String fileName, String stripFileName, Double ratioRate, HttpServletRequest request) { Result result = new Result(); String userName = JwtUtil.getUserNameByToken(request); - //获取当前角色配置的颜色信息 + // 获取当前角色配置的颜色信息 Map colorMap = sysUserColorService.initColor(userName); - //获取缓存的phd数据 + // 获取缓存的phd数据 Cache phdCache = localCache.getPHDCache(); - PHDFile phd = phdCache.getIfPresent(fileName+"-"+userName); + PHDFile phd = phdCache.getIfPresent(fileName + "-" + userName); if (Objects.isNull(phd)) { result.error500("Please select the parse file first!"); return result; @@ -863,16 +864,16 @@ public class GammaServiceImpl extends AbstractLogOrReport implements IGammaServi List m_vCount = new LinkedList<>(); // 确保绘制曲线时所有谱都是从1道开始 int i = 0; - if(m_nSChan == 0) { + if (m_nSChan == 0) { i = 1; } - for(; i m_vecCompare = gammaFileUtil.loadCompareData(stripFileName, userName, m_nCount, result); if (CollectionUtils.isNotEmpty(m_vecCompare)) { List chartDataList = gammaFileUtil.StripData(m_vecCompare, m_vCount, m_nCount, colorMap, vEnergy, ratioRate); @@ -887,8 +888,8 @@ public class GammaServiceImpl extends AbstractLogOrReport implements IGammaServi Result result = new Result(); String userName = JwtUtil.getUserNameByToken(request); Cache phdCache = localCache.getPHDCache(); - PHDFile phd = phdCache.getIfPresent(fileName+"-"+userName); - if (Objects.isNull(phd)){ + PHDFile phd = phdCache.getIfPresent(fileName + "-" + userName); + if (Objects.isNull(phd)) { result.error500("Please select the parse file first!"); return result; } @@ -912,7 +913,7 @@ public class GammaServiceImpl extends AbstractLogOrReport implements IGammaServi map.put("dateTime_Conc", setup.getRefTime_conc()); // 当前谱文件为非 “P” 类型时禁用“刻度更新”模块 - if(!phd.getHeader().getSystem_type().toUpperCase().contains("P")) { + if (!phd.getHeader().getSystem_type().toUpperCase().contains("P")) { map.put("group_calPS", false); } result.setSuccess(true); @@ -927,8 +928,8 @@ public class GammaServiceImpl extends AbstractLogOrReport implements IGammaServi String userName = JwtUtil.getUserNameByToken(request); String fileName = configureData.getFileName(); Cache phdCache = localCache.getPHDCache(); - PHDFile phd = phdCache.getIfPresent(fileName+"-"+userName); - if (Objects.isNull(phd)){ + PHDFile phd = phdCache.getIfPresent(fileName + "-" + userName); + if (Objects.isNull(phd)) { result.error500("Please select the parse file first!"); return result; } @@ -957,47 +958,47 @@ public class GammaServiceImpl extends AbstractLogOrReport implements IGammaServi Result result = new Result(); String userName = JwtUtil.getUserNameByToken(request); Cache phdCache = localCache.getPHDCache(); - PHDFile phd = phdCache.getIfPresent(fileName+"-"+userName); - if (Objects.isNull(phd)){ + PHDFile phd = phdCache.getIfPresent(fileName + "-" + userName); + if (Objects.isNull(phd)) { result.error500("Please select the parse file first!"); return result; } - //获取当前用户信息 + // 获取当前用户信息 SysUser user = userTaskUtil.findUserByName(userName); if (Objects.nonNull(user)) { phd.setUserId(user.getId()); } - //赋值xml文件存放路径 + // 赋值xml文件存放路径 phd.setXmlFilePath(parameterProperties.getFilePath()); - //获取当前角色的颜色配置 + // 获取当前角色的颜色配置 Map colorMap = sysUserColorService.initColor(userName); - //查询当前用户关联的核素信息 + // 查询当前用户关联的核素信息 List nuclides = new LinkedList<>(); - //从postgreSql中获取当前用户关注的核素信息 如果当前用户没有 则返回管理员的 + // 从postgreSql中获取当前用户关注的核素信息 如果当前用户没有 则返回管理员的 nuclides = defaultNuclideSpectrumService.findNuclidesByUserName(userName, phd.getHeader().getSystem_type().toUpperCase()); - if (CollectionUtils.isEmpty(nuclides)){ + if (CollectionUtils.isEmpty(nuclides)) { nuclides = defaultNuclideSpectrumService.findNuclidesByUserName("admin", phd.getHeader().getSystem_type().toUpperCase()); } - //分析文件数据 + // 分析文件数据 int flag = gammaFileUtil.AnalyseData(phd); - if (flag == 0){ - String warning = "The spectrum needn't Analyed. Maybe:\n"+ - "1. It has already Analyed.\n"+ + if (flag == 0) { + String warning = "The spectrum needn't Analyed. Maybe:\n" + + "1. It has already Analyed.\n" + "2. You didn't change any setting or calibration."; result.error500(warning); - } else if (flag == -1){ + } else if (flag == -1) { Map nuclideLinesMap = GetNuclideLines(nuclides); gammaFileUtil.NuclidesIdent(phd, nuclideLinesMap); gammaFileUtil.RunQC(phd); - String warning = "Finish three tasks:\n"+ - "\t1.Update efficiencies of all peaks;\n"+ - "\t2.Identify nuclides again;\n"+ + String warning = "Finish three tasks:\n" + + "\t1.Update efficiencies of all peaks;\n" + + "\t2.Identify nuclides again;\n" + "\t3.Test QC again."; result.error500(warning); } else { Map nuclideLinesMap = GetNuclideLines(nuclides); gammaFileUtil.AnalyseSpectrum(phd, nuclideLinesMap); - //重新分析各峰值对应的核素信息 + // 重新分析各峰值对应的核素信息 // gammaFileUtil.NuclidesIdent(phd, nuclideLinesMap); Map map = new HashMap<>(); gammaFileUtil.UpdateChart(phd, map, colorMap); @@ -1014,19 +1015,19 @@ public class GammaServiceImpl extends AbstractLogOrReport implements IGammaServi public Map GetNuclideLines(List nuclideList) { Map mapLines = new HashMap<>(); - if(nuclideList.size() < 1){ + if (nuclideList.size() < 1) { return mapLines; } - for(String name : nuclideList) { + for (String name : nuclideList) { NuclideLines nlines = new NuclideLines(); List nuclideLineList = spectrumAnalysisMapper.getNuclideLines(name); - for(int j=0;j 0) { + if (Objects.nonNull(nuclideLineList.get(j).getKeyFlag()) && nuclideLineList.get(j).getKeyFlag().intValue() > 0) { nlines.key_flag = j; nlines.maxYeildIdx = j; } @@ -1034,9 +1035,9 @@ public class GammaServiceImpl extends AbstractLogOrReport implements IGammaServi mapLines.put(name, nlines); } List halfs = spectrumAnalysisMapper.getHalf(nuclideList); - for(int m=0;m map = new HashMap<>(); Cache phdCache = localCache.getPHDCache(); - PHDFile phd = phdCache.getIfPresent(fileName+"-"+userName); - if (Objects.isNull(phd)){ + PHDFile phd = phdCache.getIfPresent(fileName + "-" + userName); + if (Objects.isNull(phd)) { result.error500("Please select the parse file first!"); return result; } Map colorMap = sysUserColorService.initColor(userName); - //表单 + // 表单 List vPeak = gammaFileUtil.InitPeakTable(phd.getVPeak()); map.put("table", vPeak); - //Chart 折线图 + // Chart 折线图 List m_vCount = new LinkedList<>(); long m_nCount = phd.getSpec().getNum_g_channel(); long m_nSChan = phd.getSpec().getBegin_channel(); // 确保绘制曲线时所有谱都是从1道开始 int i = 0; - if(m_nSChan == 0){ + if (m_nSChan == 0) { i = 1; } - for(; i differance = gammaFileUtil.Differance(phd, phd.getVPeak(), m_vCount, m_nCount); map.put("barChart", differance); - //赋值energy + // 赋值energy map.put("energy", phd.getVEnergy()); - //赋值BaseCtrls + // 赋值BaseCtrls map.put("BaseCtrls", phd.getBaseCtrls()); - //FitBaseLine颜色 + // FitBaseLine颜色 map.put("FitBaseLine", colorMap.get("Color_Fitbase")); result.setSuccess(true); result.setResult(map); @@ -1100,13 +1101,13 @@ public class GammaServiceImpl extends AbstractLogOrReport implements IGammaServi String userName = JwtUtil.getUserNameByToken(request); Map map = new HashMap<>(); Cache phdCache = localCache.getPHDCache(); - PHDFile phd = phdCache.getIfPresent(fileName+"-"+userName); - if (Objects.isNull(phd)){ + PHDFile phd = phdCache.getIfPresent(fileName + "-" + userName); + if (Objects.isNull(phd)) { result.error500("Please select the parse file first!"); return result; } - //判断当前选中的channel是否在正常允许范围内 - if(curChan <= phd.getBaseCtrls().getRg_low()+1 || curChan >= phd.getBaseCtrls().getRg_high()-1) { + // 判断当前选中的channel是否在正常允许范围内 + if (curChan <= phd.getBaseCtrls().getRg_low() + 1 || curChan >= phd.getBaseCtrls().getRg_high() - 1) { result.error500("Couldn't insert peak, maybe out of range"); return result; } @@ -1121,7 +1122,7 @@ public class GammaServiceImpl extends AbstractLogOrReport implements IGammaServi List upperTailAlpha = new LinkedList<>(); List area = new LinkedList<>(); List stepRatio = new LinkedList<>(); - for (PeakInfo peakInfo: vOriPeaks){ + for (PeakInfo peakInfo : vOriPeaks) { peakCentroid.add(peakInfo.peakCentroid); fwhmc.add(peakInfo.fwhmc); tail.add(peakInfo.tail); @@ -1129,25 +1130,25 @@ public class GammaServiceImpl extends AbstractLogOrReport implements IGammaServi upperTail.add(peakInfo.upperTail); upperTailAlpha.add(peakInfo.upperTailAlpha); area.add(peakInfo.area); - stepRatio.add(peakInfo.area*peakInfo.stepRatio); + stepRatio.add(peakInfo.area * peakInfo.stepRatio); } - //声明基础数据 + // 声明基础数据 List m_vCount = new LinkedList<>(); long m_nCount = phd.getSpec().getNum_g_channel(); long m_nSChan = phd.getSpec().getBegin_channel(); // 确保绘制曲线时所有谱都是从1道开始 int i = 0; - if(m_nSChan == 0) i = 1; - for(; i newPeak = new LinkedList<>(); List newPeaks = new LinkedList<>(); newPeaks.addAll(phd.getVPeak()); - if (structInsertOutput.vIdx.size()>0){ - for (int j=0; j 0) { + for (int j = 0; j < structInsertOutput.vIdx.size(); j++) { int a = 0; - while (a < phd.getVPeak().size() && structInsertOutput.peakCentroid.get(j) > phd.getVPeak().get(a).peakCentroid){ + while (a < phd.getVPeak().size() && structInsertOutput.peakCentroid.get(j) > phd.getVPeak().get(a).peakCentroid) { a++; } PeakInfo peak = new PeakInfo(); - peak.index = a+1; - peak.left = structInsertOutput.vLeft.get(j).intValue();; - peak.right = structInsertOutput.vRight.get(j).intValue();; + peak.index = a + 1; + peak.left = structInsertOutput.vLeft.get(j).intValue(); + ; + peak.right = structInsertOutput.vRight.get(j).intValue(); + ; peak.peakCentroid = structInsertOutput.peakCentroid.get(j); peak.energy = structInsertOutput.energy.get(j); peak.area = structInsertOutput.area.get(j); @@ -1212,10 +1215,10 @@ public class GammaServiceImpl extends AbstractLogOrReport implements IGammaServi List vIdx = new LinkedList<>(); int ii = 0; - for (PeakInfo peak: newPeaks){ - if(peak.peakCentroid >= right){ + for (PeakInfo peak : newPeaks) { + if (peak.peakCentroid >= right) { break; - } else if(peak.peakCentroid > left) { + } else if (peak.peakCentroid > left) { vIdx.add(ii); } ii++; @@ -1232,17 +1235,17 @@ public class GammaServiceImpl extends AbstractLogOrReport implements IGammaServi List tablePeaksList = new LinkedList<>(); List newPeaks = new LinkedList<>(); int peakNum = vIdx.size(); - for (int i=0; i phdCache = localCache.getPHDCache(); - PHDFile phd = phdCache.getIfPresent(fileName+"-"+userName); - if (Objects.isNull(phd)){ + PHDFile phd = phdCache.getIfPresent(fileName + "-" + userName); + if (Objects.isNull(phd)) { result.error500("Please select the parse file first!"); return result; } @@ -1273,40 +1276,40 @@ public class GammaServiceImpl extends AbstractLogOrReport implements IGammaServi List m_vCount = new LinkedList<>(); long m_nCount = phd.getSpec().getNum_g_channel(); long m_nSChan = phd.getSpec().getBegin_channel(); - //获取当前角色的颜色配置 + // 获取当前角色的颜色配置 Map colorMap = sysUserColorService.initColor(userName); // 确保绘制曲线时所有谱都是从1道开始 int i = 0; - if(m_nSChan == 0){ + if (m_nSChan == 0) { i = 1; } - for(; i map = new HashMap<>(); - //根据boolean 决定是否保留本次数据 如果保留则不需要操作vPeak 并重新拟合线 - if (accept){ - if (flag.equalsIgnoreCase("fit")) {//如果传递的flag标识 是 fit则进行修改峰值等数据 - for (int j=0; j vE_Rg = CalValuesHandler.calFcnEval(vC_Rg, phd.getUsedEnerPara().getP()).counts; - if(vE_Rg.size() != 2 || vE_Rg.get(0).isNaN() || vE_Rg.get(1).isNaN()) { + if (vE_Rg.size() != 2 || vE_Rg.get(0).isNaN() || vE_Rg.get(1).isNaN()) { return result; } List Af = new LinkedList<>(); List Cf = new LinkedList<>(); List Ff = new LinkedList<>(); - //遍历列表中的数据 - for(int j=0; j< tablePeaksList.size(); j++){ + // 遍历列表中的数据 + for (int j = 0; j < tablePeaksList.size(); j++) { TablePeaks nPeak = tablePeaksList.get(j); - PeakInfo peak = phd.getVPeak().get(Integer.valueOf(nPeak.getLab())-1); + PeakInfo peak = phd.getVPeak().get(Integer.valueOf(nPeak.getLab()) - 1); double dE = CalValuesHandler.calDerivaOut(peak.peakCentroid, phd.getUsedEnerPara().getP()); - if(peak.energy < vE_Rg.get(0) || peak.energy > vE_Rg.get(1)) { + if (peak.energy < vE_Rg.get(0) || peak.energy > vE_Rg.get(1)) { result.error500("The energy isn't in the analysis range."); return result; } @@ -1338,17 +1341,17 @@ public class GammaServiceImpl extends AbstractLogOrReport implements IGammaServi peak.peakCentroid = CalValuesHandler.energyToChannel(energys, phd.getUsedEnerPara().getP()).counts.get(0); peak.stepRatio = Double.valueOf(nPeak.getStep()) / peak.area; peak.BWWidthChan = Double.valueOf(nPeak.getBwGamma()) / dE; - if(dE > 0) { + if (dE > 0) { peak.fwhmc = peak.fwhm / dE; } - if(!tablePeaksList.get(j).isNetAreaB()) { + if (!tablePeaksList.get(j).isNetAreaB()) { Af.add(Integer.valueOf(tablePeaksList.get(j).getLab())); } - if(!tablePeaksList.get(j).isCentroid()) { + if (!tablePeaksList.get(j).isCentroid()) { Cf.add(Integer.valueOf(tablePeaksList.get(j).getLab())); } - if(!tablePeaksList.get(j).isFwhmB()) { + if (!tablePeaksList.get(j).isFwhmB()) { Ff.add(Integer.valueOf(tablePeaksList.get(j).getLab())); } } @@ -1356,13 +1359,13 @@ public class GammaServiceImpl extends AbstractLogOrReport implements IGammaServi String phdStr = mapper.writeValueAsString(phd); String strValue = CalValuesHandler.fitPeakFull(phdStr, Af, Cf, Ff); Map parseMap = JSON.parseObject(strValue, Map.class); - for (Map.Entry entry:parseMap.entrySet()) { + for (Map.Entry entry : parseMap.entrySet()) { if (entry.getKey().equalsIgnoreCase("vPeak")) { List value = JSON.parseArray(JSON.toJSONString(entry.getValue()), PeakInfo.class); phd.setVPeak(value); } } - //重新计算peak的改变 + // 重新计算peak的改变 gammaFileUtil.PeaksChanged(phd); List vPeak = gammaFileUtil.InitPeakTable(phd.getVPeak()); @@ -1374,7 +1377,7 @@ public class GammaServiceImpl extends AbstractLogOrReport implements IGammaServi List differance = gammaFileUtil.Differance(phd, phd.getVPeak(), m_vCount, m_nCount); map.put("barChart", differance); gammaFileUtil.UpdateChart(phd, map, colorMap); - } else {//如果不保留 根据下标移除对应的vPeak数据 + } else {// 如果不保留 根据下标移除对应的vPeak数据 if (CollectionUtils.isNotEmpty(oldPeak)) { phd.getVPeak().clear(); phd.setVPeak(oldPeak); @@ -1396,8 +1399,8 @@ public class GammaServiceImpl extends AbstractLogOrReport implements IGammaServi Result result = new Result(); String userName = JwtUtil.getUserNameByToken(request); Cache phdCache = localCache.getPHDCache(); - PHDFile phd = phdCache.getIfPresent(fileName+"-"+userName); - if (Objects.isNull(phd)){ + PHDFile phd = phdCache.getIfPresent(fileName + "-" + userName); + if (Objects.isNull(phd)) { result.error500("Please select the parse file first!"); return result; } @@ -1405,17 +1408,17 @@ public class GammaServiceImpl extends AbstractLogOrReport implements IGammaServi long m_nCount = phd.getSpec().getNum_g_channel(); HashMap map = new HashMap<>(); int peakNum = phd.getVPeak().size(); - if(peakNum < 1) { + if (peakNum < 1) { result.error500("No peak to delete."); return result; } - if(curRow >= 0 && curRow < peakNum) { + if (curRow >= 0 && curRow < peakNum) { phd.getVPeak().remove(curRow); gammaFileUtil.PeaksChanged(phd); - for (int i=0;i vPeak = gammaFileUtil.InitPeakTable(phd.getVPeak()); map.put("table", vPeak); @@ -1432,21 +1435,21 @@ public class GammaServiceImpl extends AbstractLogOrReport implements IGammaServi public Result fitPeak(int left, int right, String fileName, HttpServletRequest request) { Result result = new Result(); HashMap map = new HashMap<>(); - //获取用户名 + // 获取用户名 String userName = JwtUtil.getUserNameByToken(request); Cache phdCache = localCache.getPHDCache(); - PHDFile phd = phdCache.getIfPresent(fileName+"-"+userName); - if (Objects.isNull(phd)){ + PHDFile phd = phdCache.getIfPresent(fileName + "-" + userName); + if (Objects.isNull(phd)) { result.error500("Please select the parse file first!"); return result; } - //判断当前phd文件对应的peak的数据是否小于1 - if(phd.getVPeak().size() < 1) { + // 判断当前phd文件对应的peak的数据是否小于1 + if (phd.getVPeak().size() < 1) { result.error500("No peak to fit."); return result; } - //判断当前选择的左侧道值是否大于右侧道值 - if(left > right) { + // 判断当前选择的左侧道值是否大于右侧道值 + if (left > right) { int temp = left; left = right; right = temp; @@ -1455,16 +1458,16 @@ public class GammaServiceImpl extends AbstractLogOrReport implements IGammaServi // 找出与插入峰相关联的峰的索引 List vIdx = new LinkedList<>(); int ii = 0; - for(PeakInfo peak: phd.getVPeak()) { - if(peak.peakCentroid >= right) { + for (PeakInfo peak : phd.getVPeak()) { + if (peak.peakCentroid >= right) { break; - } else if(peak.peakCentroid > left) { + } else if (peak.peakCentroid > left) { vIdx.add(ii); } ii++; } - if(CollectionUtils.isEmpty(vIdx)) { - result.error500("There are 0 peak between channel "+left+" and "+right); + if (CollectionUtils.isEmpty(vIdx)) { + result.error500("There are 0 peak between channel " + left + " and " + right); return result; } // 备份原来的峰列表 @@ -1483,27 +1486,27 @@ public class GammaServiceImpl extends AbstractLogOrReport implements IGammaServi Map map = new HashMap<>(); String userName = JwtUtil.getUserNameByToken(request); Cache phdCache = localCache.getPHDCache(); - PHDFile phd = phdCache.getIfPresent(fileName+"-"+userName); - if (Objects.isNull(phd)){ + PHDFile phd = phdCache.getIfPresent(fileName + "-" + userName); + if (Objects.isNull(phd)) { result.error500("Please select the parse file first!"); return result; } - if(phd.getVPeak().size() < 1){ + if (phd.getVPeak().size() < 1) { return result; } int index = gammaFileUtil.FindNearPeak(phd.getVPeak(), channel, false); - //查询当前用户关联的核素信息 + // 查询当前用户关联的核素信息 List userLib = new LinkedList<>(); - //从postgreSql中获取当前用户关注的核素信息 如果当前用户没有 则返回管理员的 + // 从postgreSql中获取当前用户关注的核素信息 如果当前用户没有 则返回管理员的 userLib = defaultNuclideSpectrumService.findNuclidesByUserName(userName, phd.getHeader().getSystem_type().toUpperCase()); - if (CollectionUtils.isEmpty(userLib)){ + if (CollectionUtils.isEmpty(userLib)) { userLib = defaultNuclideSpectrumService.findNuclidesByUserName("admin", phd.getHeader().getSystem_type().toUpperCase()); } double min = phd.getVPeak().get(index).energy - phd.getSetting().getEnergyTolerance(); double max = phd.getVPeak().get(index).energy + phd.getSetting().getEnergyTolerance(); List list_possible = spectrumAnalysisMapper.getPossibleNuclide(userLib, min, max); - //用户当前已选中的核素名称 + // 用户当前已选中的核素名称 List list_identify = phd.getVPeak().get(index).nuclides; map.put("index", index); @@ -1520,43 +1523,43 @@ public class GammaServiceImpl extends AbstractLogOrReport implements IGammaServi Map map = new HashMap<>(); String userName = JwtUtil.getUserNameByToken(request); Cache phdCache = localCache.getPHDCache(); - PHDFile phd = phdCache.getIfPresent(fileName+"-"+userName); - if (Objects.isNull(phd)){ + PHDFile phd = phdCache.getIfPresent(fileName + "-" + userName); + if (Objects.isNull(phd)) { result.error500("Please select the parse file first!"); return result; } - //获取需要新增的核素名称 - if(StringUtils.isBlank(nuclideName)) { + // 获取需要新增的核素名称 + if (StringUtils.isBlank(nuclideName)) { result.error500("The nuclide name cannot be empty!"); return result; } - //判断当前用户的核素列表是否有核素信息 如果不为空就返回 不进行改变 - if(list_identify.indexOf(nuclideName)>0) { + // 判断当前用户的核素列表是否有核素信息 如果不为空就返回 不进行改变 + if (list_identify.indexOf(nuclideName) > 0) { return result; } - //用户当前的核素信息新增核素名称 + // 用户当前的核素信息新增核素名称 list_identify.add(nuclideName); - //根据要进行修改的列的数据下标 操作Vpeak数据 + // 根据要进行修改的列的数据下标 操作Vpeak数据 phd.getVPeak().get(curRow).nuclides.add(nuclideName); - //查询当前用户所关心的核素名称 - //查询当前用户关联的核素信息 + // 查询当前用户所关心的核素名称 + // 查询当前用户关联的核素信息 List userLib = new LinkedList<>(); - //从postgreSql中获取当前用户关注的核素信息 如果当前用户没有 则返回管理员的 + // 从postgreSql中获取当前用户关注的核素信息 如果当前用户没有 则返回管理员的 userLib = defaultNuclideSpectrumService.findNuclidesByUserName(userName, phd.getHeader().getSystem_type().toUpperCase()); - if (CollectionUtils.isEmpty(userLib)){ + if (CollectionUtils.isEmpty(userLib)) { userLib = defaultNuclideSpectrumService.findNuclidesByUserName("admin", phd.getHeader().getSystem_type().toUpperCase()); } Map mapNucLines = GetNuclideLines(userLib); - //查询出核素信息 + // 查询出核素信息 NuclideLines it_line = mapNucLines.get(nuclideName); - //如果核素信息不存在返回 - if(Objects.isNull(it_line)){ + // 如果核素信息不存在返回 + if (Objects.isNull(it_line)) { return result; } List vPeakIdx = new LinkedList<>(); // 从 0 开始 int t_idx = 0; - for (PeakInfo peak: phd.getVPeak()) { - if(peak.nuclides.contains(nuclideName)) { + for (PeakInfo peak : phd.getVPeak()) { + if (peak.nuclides.contains(nuclideName)) { vPeakIdx.add(t_idx); } t_idx++; @@ -1576,25 +1579,25 @@ public class GammaServiceImpl extends AbstractLogOrReport implements IGammaServi String userName = JwtUtil.getUserNameByToken(request); Map map = new HashMap<>(); Cache phdCache = localCache.getPHDCache(); - PHDFile phd = phdCache.getIfPresent(fileName+"-"+userName); - if (Objects.isNull(phd)){ + PHDFile phd = phdCache.getIfPresent(fileName + "-" + userName); + if (Objects.isNull(phd)) { result.error500("Please select the parse file first!"); return result; } int index = list_identify.indexOf(nuclideName); - if(index>=0) { + if (index >= 0) { // 如果所选的行下标小于0 或者 超出界限 则不进行处理 - if(curRow < 0 || curRow >= phd.getVPeak().size()) { + if (curRow < 0 || curRow >= phd.getVPeak().size()) { return result; } // 更新峰信息列表和表格 - //根据核素名称获取对应的下标并从list_identify,phd.getVPeak()移除 + // 根据核素名称获取对应的下标并从list_identify,phd.getVPeak()移除 list_identify.remove(index); int peakNuclIndex = phd.getVPeak().get(curRow).nuclides.indexOf(nuclideName); phd.getVPeak().get(curRow).nuclides.remove(peakNuclIndex); List vPeak = gammaFileUtil.InitPeakTable(phd.getVPeak()); // 处理核素MDA、MDC - gammaFileUtil.ReCalcMdaMdc(phd, nuclideName, curRow+1); + gammaFileUtil.ReCalcMdaMdc(phd, nuclideName, curRow + 1); map.put("identify", list_identify); map.put("table", vPeak); result.setSuccess(true); @@ -1608,12 +1611,12 @@ public class GammaServiceImpl extends AbstractLogOrReport implements IGammaServi Result result = new Result(); String userName = JwtUtil.getUserNameByToken(request); Cache phdCache = localCache.getPHDCache(); - PHDFile phd = phdCache.getIfPresent(fileName+"-"+userName); + PHDFile phd = phdCache.getIfPresent(fileName + "-" + userName); if (Objects.isNull(phd)) { result.error500("Please select the parse file first!"); return result; } - if(curRow >= 0 && curRow < phd.getVPeak().size()) { + if (curRow >= 0 && curRow < phd.getVPeak().size()) { result.setSuccess(true); result.setResult(phd.getVPeak().get(curRow).comments); } else { @@ -1628,12 +1631,12 @@ public class GammaServiceImpl extends AbstractLogOrReport implements IGammaServi String userName = JwtUtil.getUserNameByToken(request); Map map = new HashMap<>(); Cache phdCache = localCache.getPHDCache(); - PHDFile phd = phdCache.getIfPresent(fileName+"-"+userName); + PHDFile phd = phdCache.getIfPresent(fileName + "-" + userName); if (Objects.isNull(phd)) { result.error500("Please select the parse file first!"); return result; } - if(curRow >= 0 && curRow < phd.getVPeak().size()) { + if (curRow >= 0 && curRow < phd.getVPeak().size()) { phd.getVPeak().get(curRow).comments = comments; List vPeak = gammaFileUtil.InitPeakTable(phd.getVPeak()); map.put("table", vPeak); @@ -1650,7 +1653,7 @@ public class GammaServiceImpl extends AbstractLogOrReport implements IGammaServi Result result = new Result(); String userName = JwtUtil.getUserNameByToken(request); Cache phdCache = localCache.getPHDCache(); - PHDFile phd = phdCache.getIfPresent(fileName+"-"+userName); + PHDFile phd = phdCache.getIfPresent(fileName + "-" + userName); if (Objects.isNull(phd)) { result.error500("Please select the parse file first!"); return result; @@ -1665,7 +1668,7 @@ public class GammaServiceImpl extends AbstractLogOrReport implements IGammaServi Result result = new Result(); String userName = JwtUtil.getUserNameByToken(request); Cache phdCache = localCache.getPHDCache(); - PHDFile phd = phdCache.getIfPresent(fileName+"-"+userName); + PHDFile phd = phdCache.getIfPresent(fileName + "-" + userName); if (Objects.isNull(phd)) { result.error500("Please select the parse file first!"); return result; @@ -1679,14 +1682,14 @@ public class GammaServiceImpl extends AbstractLogOrReport implements IGammaServi @Override public Result nuclideReview(Integer sampleId, String fileName, Double channel, HttpServletRequest request) { Result result = new Result(); - if (Objects.isNull(channel)){ + if (Objects.isNull(channel)) { return result; } Map map = new HashMap<>(); String userName = JwtUtil.getUserNameByToken(request); Cache phdCache = localCache.getPHDCache(); - PHDFile phd = phdCache.getIfPresent(fileName+"-"+userName); - if (Objects.isNull(phd)){ + PHDFile phd = phdCache.getIfPresent(fileName + "-" + userName); + if (Objects.isNull(phd)) { result.error500("Please select the parse file first!"); return result; } @@ -1695,15 +1698,15 @@ public class GammaServiceImpl extends AbstractLogOrReport implements IGammaServi BigDecimal bigDecimal = BigDecimal.valueOf(value); bigDecimal = bigDecimal.setScale(2, BigDecimal.ROUND_HALF_UP); map.put("energy", bigDecimal); - //查询当前用户关联的核素信息 + // 查询当前用户关联的核素信息 List nuclides = new LinkedList<>(); - //从postgreSql中获取当前用户关注的核素信息 如果当前用户没有 则返回管理员的 + // 从postgreSql中获取当前用户关注的核素信息 如果当前用户没有 则返回管理员的 nuclides = defaultNuclideSpectrumService.findNuclidesByUserName(userName, phd.getHeader().getSystem_type().toUpperCase()); - if (CollectionUtils.isEmpty(nuclides)){ + if (CollectionUtils.isEmpty(nuclides)) { nuclides = defaultNuclideSpectrumService.findNuclidesByUserName("admin", phd.getHeader().getSystem_type().toUpperCase()); } List nuclideList = InitList(bigDecimal.doubleValue(), 0.5, nuclides); - if(CollectionUtils.isNotEmpty(nuclideList)) { + if (CollectionUtils.isNotEmpty(nuclideList)) { map.put("list", nuclideList); String name = nuclideList.get(0); InitTable(name, map, phd, colorMap); @@ -1722,7 +1725,7 @@ public class GammaServiceImpl extends AbstractLogOrReport implements IGammaServi } public List InitList(double energy, double tolerance, List nuclides) { - if(nuclides.size() < 1){ + if (nuclides.size() < 1) { return new LinkedList<>(); } double min = energy - tolerance; @@ -1735,7 +1738,7 @@ public class GammaServiceImpl extends AbstractLogOrReport implements IGammaServi InitNuclideInfo(name, map); long span = phd.getSpec().getG_energy_span(); List nuclideTableList = spectrumAnalysisMapper.getNuclideTable(name, span); - if (CollectionUtils.isNotEmpty(nuclideTableList)){ + if (CollectionUtils.isNotEmpty(nuclideTableList)) { map.put("table", nuclideTableList); gammaFileUtil.InitChart(nuclideTableList, phd, map, colorMap); } @@ -1745,30 +1748,30 @@ public class GammaServiceImpl extends AbstractLogOrReport implements IGammaServi map.put("name", name); GardsNuclLib nuclideInfo = spectrumAnalysisMapper.getNuclideInfo(name); - if(Objects.nonNull(nuclideInfo)) { + if (Objects.nonNull(nuclideInfo)) { map.put("lines", nuclideInfo.getNumLines()); - if(Objects.isNull(nuclideInfo.getHalflife())) { + if (Objects.isNull(nuclideInfo.getHalflife())) { map.put("halfLife", ""); } else { String units = "D"; double halflife = nuclideInfo.getHalflife().doubleValue(); - if(halflife >= 1000) { + if (halflife >= 1000) { halflife = halflife / 365.25; units = "A"; - } else if(halflife < 0.1 && halflife >= 1.0 / 1440) { + } else if (halflife < 0.1 && halflife >= 1.0 / 1440) { halflife = halflife * 1440; units = "M"; - } else if(halflife <= 1.0 / 1440 && halflife > 0) { + } else if (halflife <= 1.0 / 1440 && halflife > 0) { halflife = halflife * 86400; units = "S"; } halflife = halflife + 0.0001; map.put("halfLife", String.format("%.3f", halflife) + units); } - if(Objects.isNull(nuclideInfo.getHalflifeErr())) { + if (Objects.isNull(nuclideInfo.getHalflifeErr())) { map.put("halfLifeErr", ""); } else { - map.put("halfLifeErr", String.format("%.3f", nuclideInfo.getHalflifeErr().doubleValue())+"%"); + map.put("halfLifeErr", String.format("%.3f", nuclideInfo.getHalflifeErr().doubleValue()) + "%"); } } } @@ -1779,8 +1782,8 @@ public class GammaServiceImpl extends AbstractLogOrReport implements IGammaServi String userName = JwtUtil.getUserNameByToken(request); Map map = new HashMap<>(); Cache phdCache = localCache.getPHDCache(); - PHDFile phd = phdCache.getIfPresent(fileName+"-"+userName); - if (Objects.isNull(phd)){ + PHDFile phd = phdCache.getIfPresent(fileName + "-" + userName); + if (Objects.isNull(phd)) { result.error500("Please select the parse file first!"); return result; } @@ -1797,8 +1800,8 @@ public class GammaServiceImpl extends AbstractLogOrReport implements IGammaServi Map map = new HashMap<>(); String userName = JwtUtil.getUserNameByToken(request); Cache phdCache = localCache.getPHDCache(); - PHDFile phd = phdCache.getIfPresent(fileName+"-"+userName); - if (Objects.isNull(phd)){ + PHDFile phd = phdCache.getIfPresent(fileName + "-" + userName); + if (Objects.isNull(phd)) { result.error500("Please select the parse file first!"); return result; } @@ -1806,15 +1809,15 @@ public class GammaServiceImpl extends AbstractLogOrReport implements IGammaServi BigDecimal bigDecimal = BigDecimal.valueOf(energy); bigDecimal = bigDecimal.setScale(2, BigDecimal.ROUND_HALF_UP); map.put("energy", bigDecimal); - //查询当前用户关联的核素信息 + // 查询当前用户关联的核素信息 List nuclides = new LinkedList<>(); - //从postgreSql中获取当前用户关注的核素信息 如果当前用户没有 则返回管理员的 + // 从postgreSql中获取当前用户关注的核素信息 如果当前用户没有 则返回管理员的 nuclides = defaultNuclideSpectrumService.findNuclidesByUserName(userName, phd.getHeader().getSystem_type().toUpperCase()); - if (CollectionUtils.isEmpty(nuclides)){ + if (CollectionUtils.isEmpty(nuclides)) { nuclides = defaultNuclideSpectrumService.findNuclidesByUserName("admin", phd.getHeader().getSystem_type().toUpperCase()); } List nuclideList = InitList(bigDecimal.doubleValue(), tolerance, nuclides); - if(CollectionUtils.isNotEmpty(nuclideList)) { + if (CollectionUtils.isNotEmpty(nuclideList)) { map.put("list", nuclideList); String name = nuclideList.get(0); InitTable(name, map, phd, colorMap); @@ -1836,33 +1839,33 @@ public class GammaServiceImpl extends AbstractLogOrReport implements IGammaServi public Result replotBaseLine(BaseLineCtrls baseLineCtrls, HttpServletRequest request) { Result result = new Result(); HashMap map = new HashMap<>(); - //获取用户名 + // 获取用户名 String userName = JwtUtil.getUserNameByToken(request); - //获取文件名称 + // 获取文件名称 String fileName = baseLineCtrls.getFileName(); Cache phdCache = localCache.getPHDCache(); - PHDFile phd = phdCache.getIfPresent(fileName+"-"+userName); - if (Objects.isNull(phd)){ + PHDFile phd = phdCache.getIfPresent(fileName + "-" + userName); + if (Objects.isNull(phd)) { result.error500("Please select the parse file first!"); return result; } List m_vCount = new LinkedList<>(); long m_nCount = phd.getSpec().getNum_g_channel(); long m_nSChan = phd.getSpec().getBegin_channel(); - //获取当前角色的颜色配置 + // 获取当前角色的颜色配置 Map colorMap = sysUserColorService.initColor(userName); // 确保绘制曲线时所有谱都是从1道开始 int i = 0; - if(m_nSChan == 0){ + if (m_nSChan == 0) { i = 1; } - for(; i shapeData = gammaFileUtil.CreateShapeCP(m_baseCtrl); map.put("shapeData", shapeData); - if(m_baseCtrl.getBaseStack().size() > 2) { - for (int j =1; j 2) { + for (int j = 1; j < m_baseCtrl.getBaseStack().size() - 1; j++) { m_baseCtrl.getBaseStack().remove(j); } } @@ -1887,13 +1890,13 @@ public class GammaServiceImpl extends AbstractLogOrReport implements IGammaServi public Result acceptBaseLine(BaseLineCtrls baseLineCtrls, HttpServletRequest request) { Result result = new Result(); HashMap map = new HashMap<>(); - //获取用户名 + // 获取用户名 String userName = JwtUtil.getUserNameByToken(request); - //获取文件名称 + // 获取文件名称 String fileName = baseLineCtrls.getFileName(); Cache phdCache = localCache.getPHDCache(); - PHDFile phd = phdCache.getIfPresent(fileName+"-"+userName); - if (Objects.isNull(phd)){ + PHDFile phd = phdCache.getIfPresent(fileName + "-" + userName); + if (Objects.isNull(phd)) { result.error500("Please select the parse file first!"); return result; } @@ -1901,25 +1904,25 @@ public class GammaServiceImpl extends AbstractLogOrReport implements IGammaServi // for (int i=0; i m_vCount = new LinkedList<>(); long m_nCount = phd.getSpec().getNum_g_channel(); long m_nSChan = phd.getSpec().getBegin_channel(); - //获取当前角色的颜色配置 + // 获取当前角色的颜色配置 Map colorMap = sysUserColorService.initColor(userName); // 确保绘制曲线时所有谱都是从1道开始 int i = 0; - if(m_nSChan == 0){ + if (m_nSChan == 0) { i = 1; } - for(; i differance = gammaFileUtil.Differance(phd, phd.getVPeak(), m_vCount, m_nCount); @@ -1930,7 +1933,7 @@ public class GammaServiceImpl extends AbstractLogOrReport implements IGammaServi map.put("peakSet", peakSet); List shapeData = gammaFileUtil.CreateShapeCP(phd.getBaseCtrls()); map.put("shapeData", shapeData); - //更新主界面的 Chart + // 更新主界面的 Chart gammaFileUtil.UpdateChart(phd, map, colorMap); result.setSuccess(true); result.setResult(map); @@ -1942,14 +1945,14 @@ public class GammaServiceImpl extends AbstractLogOrReport implements IGammaServi Result result = new Result(); Map map = new HashMap<>(); List zeroTimeList = new LinkedList<>(); - if (StringUtils.isNotBlank(ZeroTimeStr)){ + if (StringUtils.isNotBlank(ZeroTimeStr)) { zeroTimeList = Arrays.asList(ZeroTimeStr.split(StringPool.COMMA)); } map.put("list_fission1", zeroTimeList); map.put("list_fission2", zeroTimeList); LocalDateTime now = LocalDateTime.now(); - map.put("Date", now.getYear()+"-"+now.getMonthValue()+"-"+now.getDayOfMonth()); - map.put("Time", now.getHour()+":"+now.getMinute()+":"+now.getSecond()); + map.put("Date", now.getYear() + "-" + now.getMonthValue() + "-" + now.getDayOfMonth()); + map.put("Time", now.getHour() + ":" + now.getMinute() + ":" + now.getSecond()); result.setSuccess(true); result.setResult(map); return result; @@ -1959,51 +1962,51 @@ public class GammaServiceImpl extends AbstractLogOrReport implements IGammaServi public Result ZeroTimeAnalyse(String nuclide1, String nuclide2, Double product1, Double product2, String target, String energyTFH, String date, String time) { Result result = new Result(); Map map = new HashMap<>(); - //建立数据库连接 + // 建立数据库连接 Connection conn = dbUtil.openDB(); Statement statement = null; try { statement = conn.createStatement(); - //判断product的数值是否为空 - if (Objects.isNull(product1) || Objects.isNull(product2)){ + // 判断product的数值是否为空 + if (Objects.isNull(product1) || Objects.isNull(product2)) { result.error500("The Fission Product is invalid!"); return result; } double active1 = product1.doubleValue(); double active2 = product2.doubleValue(); - if (nuclide1.equals(nuclide2)){ + if (nuclide1.equals(nuclide2)) { return result; } - //根据核素名称查询halflife值 + // 根据核素名称查询halflife值 long halflife1 = fission_changed(nuclide1); long halflife2 = fission_changed(nuclide2); - //读取数据库获取计算值 + // 读取数据库获取计算值 double FY_N1_C = 0, FY_N1_I = 0, FY_N2_C = 0, FY_N2_I = 0; String N1_A = "", N2_A = ""; - //查询第一个需要用到的计算值 - String sql1 = "select FY,A from FY_Table where NUCLIDE_NAME = '"+nuclide1+"' and Target = '"+target+"' and IC = 'C' and ENERGY = '"+energyTFH+"'"; + // 查询第一个需要用到的计算值 + String sql1 = "select FY,A from FY_Table where NUCLIDE_NAME = '" + nuclide1 + "' and Target = '" + target + "' and IC = 'C' and ENERGY = '" + energyTFH + "'"; ResultSet query1 = statement.executeQuery(sql1); - while(query1.next()){ + while (query1.next()) { FY_N1_C = query1.getDouble("FY"); N1_A = query1.getString("A"); } - //查询第一个需要用到的计算值 - String sql2 = "select FY from FY_Table where NUCLIDE_NAME = '"+nuclide1+"' and Target = '"+target+"' and IC = 'I' and ENERGY = '"+energyTFH+"'"; + // 查询第一个需要用到的计算值 + String sql2 = "select FY from FY_Table where NUCLIDE_NAME = '" + nuclide1 + "' and Target = '" + target + "' and IC = 'I' and ENERGY = '" + energyTFH + "'"; ResultSet query2 = statement.executeQuery(sql2); - while(query2.next()){ + while (query2.next()) { FY_N1_I = query2.getDouble("FY"); } - //查询第一个需要用到的计算值 - String sql3 = "select FY,A from FY_Table where NUCLIDE_NAME = '"+nuclide2+"' and Target = '"+target+"' and IC = 'C' and ENERGY = '"+energyTFH+"'"; + // 查询第一个需要用到的计算值 + String sql3 = "select FY,A from FY_Table where NUCLIDE_NAME = '" + nuclide2 + "' and Target = '" + target + "' and IC = 'C' and ENERGY = '" + energyTFH + "'"; ResultSet query3 = statement.executeQuery(sql3); - while(query3.next()){ + while (query3.next()) { FY_N2_C = query3.getDouble("FY"); N2_A = query3.getString("A"); } - //查询第一个需要用到的计算值 - String sql4 = "select FY,A from FY_Table where NUCLIDE_NAME = '"+nuclide2+"' and Target = '"+target+"' and IC = 'I' and ENERGY = '"+energyTFH+"'"; + // 查询第一个需要用到的计算值 + String sql4 = "select FY,A from FY_Table where NUCLIDE_NAME = '" + nuclide2 + "' and Target = '" + target + "' and IC = 'I' and ENERGY = '" + energyTFH + "'"; ResultSet query4 = statement.executeQuery(sql4); - while(query4.next()){ + while (query4.next()) { FY_N2_I = query4.getDouble("FY"); } map.put("FY_N1_C", FY_N1_C); @@ -2012,24 +2015,24 @@ public class GammaServiceImpl extends AbstractLogOrReport implements IGammaServi map.put("FY_N2_I", FY_N2_I); map.put("halflife1", halflife1); map.put("halflife2", halflife2); - if(FY_N1_C > 0 && FY_N2_C > 0 && halflife1 != halflife2) { + if (FY_N1_C > 0 && FY_N2_C > 0 && halflife1 != halflife2) { // Zero Time of the Nuclides double lamada_n1 = Math.log(2) / halflife1; double lamada_n2 = Math.log(2) / halflife2; double temp = 1.0 / (lamada_n2 - lamada_n1); double t; - if(N1_A == N2_A) { - t = -temp * Math.log(((1/lamada_n2)*(active2/active1)-temp) / ((FY_N2_I/(FY_N1_C*lamada_n1))-temp)); + if (N1_A == N2_A) { + t = -temp * Math.log(((1 / lamada_n2) * (active2 / active1) - temp) / ((FY_N2_I / (FY_N1_C * lamada_n1)) - temp)); } else { - t = temp * Math.log((active1/active2) * (FY_N2_C/FY_N1_C) * (lamada_n2/lamada_n1)); + t = temp * Math.log((active1 / active2) * (FY_N2_C / FY_N1_C) * (lamada_n2 / lamada_n1)); } // date of Zero Time Date datetime = DateUtils.parseDate(date + StringPool.SPACE + time, "yyyy-MM-dd HH:mm:ss"); Date oriDate = DateUtils.parseDate("1970-01-01 00:00:00", "yyyy-MM-dd HH:mm:ss"); - double second = (datetime.getTime()/1000 - oriDate.getTime()/1000) - t - 8*60*60; + double second = (datetime.getTime() / 1000 - oriDate.getTime() / 1000) - t - 8 * 60 * 60; - Date resultDateTime = DateUtils.getDate((long)second*1000); + Date resultDateTime = DateUtils.getDate((long) second * 1000); String zerotime = DateUtils.formatDate(resultDateTime, "yyyy-MM-dd HH:mm:ss"); @@ -2041,7 +2044,7 @@ public class GammaServiceImpl extends AbstractLogOrReport implements IGammaServi } catch (ParseException e) { throw new RuntimeException(e); } finally { - dbUtil.close(statement,conn); + dbUtil.close(statement, conn); } result.setSuccess(true); return result; @@ -2060,32 +2063,35 @@ public class GammaServiceImpl extends AbstractLogOrReport implements IGammaServi annlyse.put("date", date); annlyse.put("time", time); - String pathPrefix = "excelTemplate/"; - String path = pathPrefix + ZEROTIME_G.getName(); - String template = ClassUtil.classPath(path); - List lines = FileUtil.readUtf8Lines(template); - // 正则表达式,匹配${}中的内容 - String regex = "\\$\\{([^}]+)}"; - List newLines = new ArrayList<>(); - - for (String line : lines) { - List fieldNames = ReUtil.findAllGroup1(regex, line); - if (CollUtil.isEmpty(fieldNames)){ - newLines.add(line); - continue; - } - // 一行内可能有多个需要替换的变量 - for (String fieldName : fieldNames) { - Object value = annlyse.get(fieldName); - String search = "${" + fieldName + "}"; - String replacement = StrUtil.toString(value); - replacement = StrUtil.isBlank(replacement) ? "null" : replacement; - line = StrUtil.replace(line, search, replacement); - } - newLines.add(line); - } PrintWriter writer = null; try { + String pathPrefix = "excelTemplate/"; + String path = pathPrefix + ZEROTIME_G.getName(); + /*String template = ClassUtil.classPath(path); + List lines = FileUtil.readUtf8Lines(template);*/ + InputStream inputStream = ClassUtil.classPathStream(path); + List lines = IOUtils.readLines(inputStream, "UTF-8"); + // 正则表达式,匹配${}中的内容 + String regex = "\\$\\{([^}]+)}"; + List newLines = new ArrayList<>(); + + for (String line : lines) { + List fieldNames = ReUtil.findAllGroup1(regex, line); + if (CollUtil.isEmpty(fieldNames)) { + newLines.add(line); + continue; + } + // 一行内可能有多个需要替换的变量 + for (String fieldName : fieldNames) { + Object value = annlyse.get(fieldName); + String search = "${" + fieldName + "}"; + String replacement = StrUtil.toString(value); + replacement = StrUtil.isBlank(replacement) ? "null" : replacement; + line = StrUtil.replace(line, search, replacement); + } + newLines.add(line); + } + String export = "ZeroTime-G.txt"; writer = ExportUtil.streamWriter(response, export); for (String newLine : newLines) { @@ -2093,7 +2099,7 @@ public class GammaServiceImpl extends AbstractLogOrReport implements IGammaServi } } catch (IOException e) { e.printStackTrace(); - }finally { + } finally { if (ObjectUtil.isNotNull(writer)) writer.close(); } @@ -2101,15 +2107,15 @@ public class GammaServiceImpl extends AbstractLogOrReport implements IGammaServi public long fission_changed(String name) { long halflife = 0; - //建立数据库连接 + // 建立数据库连接 Connection conn = dbUtil.openDB(); Statement statement = null; try { statement = conn.createStatement(); - String sql = "SELECT HALFLIFE FROM libdata WHERE NAME = '"+name+"'"; + String sql = "SELECT HALFLIFE FROM libdata WHERE NAME = '" + name + "'"; ResultSet rs = statement.executeQuery(sql); - while (rs.next()){ + while (rs.next()) { halflife = (long) (24 * 60 * 60 * rs.getDouble("HALFLIFE")); } } catch (SQLException e) { @@ -2124,7 +2130,7 @@ public class GammaServiceImpl extends AbstractLogOrReport implements IGammaServi public Result Korsum() { Result result = new Result(); Map map = new HashMap<>(); - //读取文件获取Energy Nuclide数据 + // 读取文件获取Energy Nuclide数据 List m_vEnergy = new LinkedList<>(); List m_vNuclide = new LinkedList<>(); gammaFileUtil.ReadData(m_vEnergy, m_vNuclide); @@ -2163,22 +2169,22 @@ public class GammaServiceImpl extends AbstractLogOrReport implements IGammaServi vEffi.add(0.008); List energys = coeffData.energys; List inputDataList = new LinkedList<>(); - for(int i=0; i< energys.size(); ++i) { + for (int i = 0; i < energys.size(); ++i) { InputData data = new InputData(); // 将 keV 转换成 MeV - ener = energys.get(i)/1000; + ener = energys.get(i) / 1000; // 如果能量小于34keV,则放弃计算 - if(ener < 0.034) { - if(i < 3) { + if (ener < 0.034) { + if (i < 3) { data.setTotalEffi(vTotE.get(i)); data.setPeakEffi(vEffi.get(i)); } else { data.setTotalEffi(null); data.setPeakEffi(null); } - }else { - effi = Math.exp( coeffData.Effciency1*ener + coeffData.Effciency2 + coeffData.Effciency3/ener + coeffData.Effciency4/Math.pow(ener,2) + coeffData.Effciency5/Math.pow(ener, 3) + coeffData.Effciency6/Math.pow(ener, 4) ); - totE = Math.exp( coeffData.totalEf1*ener + coeffData.totalEf2 + coeffData.totalEf3/ener + coeffData.totalEf4/Math.pow(ener,2) + coeffData.totalEf5/Math.pow(ener, 3) + coeffData.totalEf6/Math.pow(ener, 4) ); + } else { + effi = Math.exp(coeffData.Effciency1 * ener + coeffData.Effciency2 + coeffData.Effciency3 / ener + coeffData.Effciency4 / Math.pow(ener, 2) + coeffData.Effciency5 / Math.pow(ener, 3) + coeffData.Effciency6 / Math.pow(ener, 4)); + totE = Math.exp(coeffData.totalEf1 * ener + coeffData.totalEf2 + coeffData.totalEf3 / ener + coeffData.totalEf4 / Math.pow(ener, 2) + coeffData.totalEf5 / Math.pow(ener, 3) + coeffData.totalEf6 / Math.pow(ener, 4)); data.setTotalEffi(Double.parseDouble(NumberFormatUtil.numberFormat(String.valueOf(totE)))); data.setPeakEffi(Double.parseDouble(NumberFormatUtil.numberFormat(String.valueOf(effi)))); } @@ -2196,15 +2202,15 @@ public class GammaServiceImpl extends AbstractLogOrReport implements IGammaServi public Result KorSumAnalyse(List inputDataList) { Result result = new Result(); DecimalFormat df = new DecimalFormat("#.##########"); - //读取Remaining文件 + // 读取Remaining文件 String readRemaining = ReadRemaining(); - //KORDAT文件的标准头部信息 + // KORDAT文件的标准头部信息 String m_kordatHeader = "0. 0. 0. 10 17.03.1988"; - //分析所需生成的文件 + // 分析所需生成的文件 String kordatPath = parameterProperties.getFilePath() + StringPool.SLASH + "KORDAT"; - //生成文件 + // 生成文件 File kordat = new File(kordatPath); - //向文件内写入内容 + // 向文件内写入内容 BufferedWriter writer = null; try { writer = new BufferedWriter(new FileWriter(kordat)); @@ -2212,8 +2218,8 @@ public class GammaServiceImpl extends AbstractLogOrReport implements IGammaServi // 写头部信息 out.append(m_kordatHeader); out.append(System.lineSeparator()); - //遍历数组写入表单数据 - for (int i=0; i m_mapNuclideInfo = processFile(resultOutFile); @@ -2266,12 +2272,12 @@ public class GammaServiceImpl extends AbstractLogOrReport implements IGammaServi return result; } - private Map processFile(File file){ + private Map processFile(File file) { Map m_mapNuclideInfo = new TreeMap<>(); if (ObjectUtil.isNull(file)) return m_mapNuclideInfo; FileReader fileReader = null; BufferedReader reader = null; - try{ + try { fileReader = new FileReader(file); reader = new BufferedReader(fileReader); String nucline_flag = "-----------------------------------------"; @@ -2315,7 +2321,7 @@ public class GammaServiceImpl extends AbstractLogOrReport implements IGammaServi } } catch (IOException e) { e.printStackTrace(); - }finally { + } finally { try { if (ObjectUtil.isNotNull(fileReader)) fileReader.close(); if (ObjectUtil.isNotNull(reader)) reader.close(); @@ -2365,8 +2371,8 @@ public class GammaServiceImpl extends AbstractLogOrReport implements IGammaServi String userName = JwtUtil.getUserNameByToken(request); Map map = new HashMap<>(); Cache phdCache = localCache.getPHDCache(); - PHDFile phd = phdCache.getIfPresent(fileName+"-"+userName); - if (Objects.isNull(phd)){ + PHDFile phd = phdCache.getIfPresent(fileName + "-" + userName); + if (Objects.isNull(phd)) { result.error500("Please select the parse file first!"); return result; } @@ -2387,12 +2393,12 @@ public class GammaServiceImpl extends AbstractLogOrReport implements IGammaServi } map.put("param", m_curParam); int num = m_vCurEnergy.size(); - if(num < 1){ + if (num < 1) { return result; } List vFit = CalValuesHandler.calFcnEval(m_vCurCentroid, m_curParam.getP()).counts; List energyDataList = new LinkedList<>(); - for(int i=0; i map = new HashMap<>(); Cache phdCache = localCache.getPHDCache(); - PHDFile phd = phdCache.getIfPresent(fileName+"-"+userName); - if (Objects.isNull(phd)){ + PHDFile phd = phdCache.getIfPresent(fileName + "-" + userName); + if (Objects.isNull(phd)) { result.error500("Please select the parse file first!"); return result; } @@ -2431,14 +2437,14 @@ public class GammaServiceImpl extends AbstractLogOrReport implements IGammaServi } private void DataChangeEnergy(List m_vCurCentroid, List m_vCurEnergy, List m_vCurUncert, ParameterInfo m_curParam, PHDFile phd, Double width, Map map) { - if(m_vCurEnergy.size() < 1) { + if (m_vCurEnergy.size() < 1) { return; - } else if(m_vCurEnergy.size() == 1) { - for (int j=m_curParam.getP().size(); j<2; j++){ + } else if (m_vCurEnergy.size() == 1) { + for (int j = m_curParam.getP().size(); j < 2; j++) { m_curParam.getP().add(2.0); } m_curParam.getP().set(1, (m_vCurEnergy.get(0) / m_vCurCentroid.get(0))); - for (int j=m_curParam.getPerr().size(); j<2; j++){ + for (int j = m_curParam.getPerr().size(); j < 2; j++) { m_curParam.getPerr().add(0.0); } } else { @@ -2447,9 +2453,9 @@ public class GammaServiceImpl extends AbstractLogOrReport implements IGammaServi map.put("uncert", m_vCurUncert); map.put("param", m_curParam); List energyDataList = new LinkedList<>(); - if(CollectionUtils.isNotEmpty(m_curParam.getP())) { + if (CollectionUtils.isNotEmpty(m_curParam.getP())) { List vFit = CalValuesHandler.calFcnEval(m_vCurCentroid, m_curParam.getP()).counts; - for(int j=0; j phdCache = localCache.getPHDCache(); - PHDFile phd = phdCache.getIfPresent(fileName+"-"+userName); - if (Objects.isNull(phd)){ + PHDFile phd = phdCache.getIfPresent(fileName + "-" + userName); + if (Objects.isNull(phd)) { result.error500("Please select the parse file first!"); return result; } - if(!curCalName.contains("Input")) { + if (!curCalName.contains("Input")) { List list_cal = phd.getMapEnerKD().keySet().stream().collect(Collectors.toList()); - curCalName = "Input "+(list_cal.stream().filter(item-> item.contains("Input")).collect(Collectors.toList()).size() + 1); + curCalName = "Input " + (list_cal.stream().filter(item -> item.contains("Input")).collect(Collectors.toList()).size() + 1); list_cal.add(curCalName); result.setSuccess(true); result.setResult(list_cal); @@ -2506,15 +2512,15 @@ public class GammaServiceImpl extends AbstractLogOrReport implements IGammaServi public void saveDataEnergy(List m_vCurCentroid, List m_vCurEnergy, List m_vCurUncert, HttpServletResponse response) { StringBuffer strBuffer = new StringBuffer(); strBuffer.append("#g_Energy").append("\n"); - for (int i=0; i phdCache = localCache.getPHDCache(); - PHDFile phd = phdCache.getIfPresent(sampleFileName+"-"+userName); + PHDFile phd = phdCache.getIfPresent(sampleFileName + "-" + userName); if (Objects.isNull(phd)) { result.error500("Please select the parse file first"); return result; } if (Objects.nonNull(file)) { String fileName = file.getOriginalFilename(); - //从最后一个。切割文件名称 获取文件名称后缀 + // 从最后一个。切割文件名称 获取文件名称后缀 String fileSuffix = fileName.substring(fileName.lastIndexOf(StringPool.DOT)); if (fileSuffix.equalsIgnoreCase(".ent")) { Map map = new HashMap<>(); List m_vCurCentroid = new LinkedList<>(); - List m_vCurEnergy = new LinkedList<>(); + List m_vCurEnergy = new LinkedList<>(); List m_vCurUncert = new LinkedList<>(); File tmpFile = null; InputStream inputStream = null; try { - //创建临时文件 + // 创建临时文件 tmpFile = File.createTempFile("betaGamma", null); inputStream = file.getInputStream(); - //复制上传文件的输入流到临时文件 - FileUtils.copyInputStreamToFile(inputStream ,tmpFile); - //读取文件所有行 + // 复制上传文件的输入流到临时文件 + FileUtils.copyInputStreamToFile(inputStream, tmpFile); + // 读取文件所有行 List readLines = FileUtils.readLines(tmpFile, "UTF-8"); - //遍历文件行数据 - for (int i=0; i values = Arrays.asList(content.split("\t\t")); - if (Double.valueOf(values.get(1))>0) { + if (Double.valueOf(values.get(1)) > 0) { m_vCurEnergy.add(Double.valueOf(values.get(0))); m_vCurCentroid.add(Double.valueOf(values.get(1))); m_vCurUncert.add(Double.valueOf(values.get(2))); @@ -2583,12 +2589,12 @@ public class GammaServiceImpl extends AbstractLogOrReport implements IGammaServi } map.put("param", m_curParam); int num = m_vCurEnergy.size(); - if(num < 1){ + if (num < 1) { return result; } List vFit = CalValuesHandler.calFcnEval(m_vCurCentroid, m_curParam.getP()).counts; List energyDataList = new LinkedList<>(); - for(int i=0; i phdCache = localCache.getPHDCache(); - PHDFile phd = phdCache.getIfPresent(fileName+"-"+userName); - if (Objects.isNull(phd)){ + PHDFile phd = phdCache.getIfPresent(fileName + "-" + userName); + if (Objects.isNull(phd)) { result.error500("Please select the parse file first!"); return result; } @@ -2644,13 +2650,13 @@ public class GammaServiceImpl extends AbstractLogOrReport implements IGammaServi } @Override - public Result resolutionCalibration(Integer sampleId, String fileName, String currentText, Double width, HttpServletRequest request) { + public Result resolutionCalibration(Integer sampleId, String fileName, String currentText, Double width, HttpServletRequest request) { Result result = new Result(); String userName = JwtUtil.getUserNameByToken(request); Map map = new HashMap<>(); Cache phdCache = localCache.getPHDCache(); - PHDFile phd = phdCache.getIfPresent(fileName+"-"+userName); - if (Objects.isNull(phd)){ + PHDFile phd = phdCache.getIfPresent(fileName + "-" + userName); + if (Objects.isNull(phd)) { result.error500("Please select the parse file first!"); return result; } @@ -2671,12 +2677,12 @@ public class GammaServiceImpl extends AbstractLogOrReport implements IGammaServi } map.put("param", m_curParam); int num = m_vCurEnergy.size(); - if(num < 1){ + if (num < 1) { return result; } List vFit = CalValuesHandler.calFcnEval(m_vCurEnergy, m_curParam.getP()).counts; List resolutionDataList = new LinkedList<>(); - for(int i=0; i map = new HashMap<>(); Cache phdCache = localCache.getPHDCache(); - PHDFile phd = phdCache.getIfPresent(fileName+"-"+userName); - if (Objects.isNull(phd)){ + PHDFile phd = phdCache.getIfPresent(fileName + "-" + userName); + if (Objects.isNull(phd)) { result.error500("Please select the parse file first!"); return result; } @@ -2720,9 +2726,9 @@ public class GammaServiceImpl extends AbstractLogOrReport implements IGammaServi map.put("param", m_curParam); List resolutionDataList = new LinkedList<>(); - if(CollectionUtils.isNotEmpty(m_curParam.getP())) { + if (CollectionUtils.isNotEmpty(m_curParam.getP())) { List vFit = CalValuesHandler.calFcnEval(m_vCurEnergy, m_curParam.getP()).counts; - for(int i=0; i phdCache = localCache.getPHDCache(); - PHDFile phd = phdCache.getIfPresent(fileName+"-"+userName); - if (Objects.isNull(phd)){ + PHDFile phd = phdCache.getIfPresent(fileName + "-" + userName); + if (Objects.isNull(phd)) { result.error500("Please select the parse file first!"); return result; } - if(!curCalName.contains("Input")) { + if (!curCalName.contains("Input")) { List list_cal = phd.getMapResoKD().keySet().stream().collect(Collectors.toList()); - curCalName = "Input "+(list_cal.stream().filter(item-> item.contains("Input")).collect(Collectors.toList()).size() + 1); + curCalName = "Input " + (list_cal.stream().filter(item -> item.contains("Input")).collect(Collectors.toList()).size() + 1); list_cal.add(curCalName); result.setSuccess(true); result.setResult(list_cal); @@ -2779,15 +2785,15 @@ public class GammaServiceImpl extends AbstractLogOrReport implements IGammaServi public void saveDataResolution(List m_vCurReso, List m_vCurEnergy, List m_vCurUncert, HttpServletResponse response) { StringBuffer strBuffer = new StringBuffer(); strBuffer.append("#g_Resolution").append("\n"); - for (int i=0; i phdCache = localCache.getPHDCache(); - PHDFile phd = phdCache.getIfPresent(sampleFileName+"-"+userName); + PHDFile phd = phdCache.getIfPresent(sampleFileName + "-" + userName); if (Objects.isNull(phd)) { result.error500("Please select the parse file first"); return result; } if (Objects.nonNull(file)) { String fileName = file.getOriginalFilename(); - //从最后一个。切割文件名称 获取文件名称后缀 + // 从最后一个。切割文件名称 获取文件名称后缀 String fileSuffix = fileName.substring(fileName.lastIndexOf(StringPool.DOT)); if (fileSuffix.equalsIgnoreCase(".ent")) { Map map = new HashMap<>(); List m_vCurReso = new LinkedList<>(); - List m_vCurEnergy = new LinkedList<>(); + List m_vCurEnergy = new LinkedList<>(); List m_vCurUncert = new LinkedList<>(); InputStream inputStream = null; File tmpFile = null; try { - //创建临时文件 + // 创建临时文件 tmpFile = File.createTempFile("betaGamma", null); inputStream = file.getInputStream(); - //复制上传文件的输入流到临时文件 - FileUtils.copyInputStreamToFile(inputStream ,tmpFile); - //读取文件所有行 + // 复制上传文件的输入流到临时文件 + FileUtils.copyInputStreamToFile(inputStream, tmpFile); + // 读取文件所有行 List readLines = FileUtils.readLines(tmpFile, "UTF-8"); - //遍历文件行数据 - for (int i=0; i values = Arrays.asList(content.split("\t\t")); - if (Double.valueOf(values.get(0))>0) { + if (Double.valueOf(values.get(0)) > 0) { m_vCurEnergy.add(Double.valueOf(values.get(0))); m_vCurReso.add(Double.valueOf(values.get(1))); m_vCurUncert.add(Double.valueOf(values.get(2))); @@ -2856,12 +2862,12 @@ public class GammaServiceImpl extends AbstractLogOrReport implements IGammaServi } map.put("param", m_curParam); int num = m_vCurEnergy.size(); - if(num < 1){ + if (num < 1) { return result; } List vFit = CalValuesHandler.calFcnEval(m_vCurEnergy, m_curParam.getP()).counts; List resolutionDataList = new LinkedList<>(); - for(int i=0; i phdCache = localCache.getPHDCache(); - PHDFile phd = phdCache.getIfPresent(fileName+"-"+userName); - if (Objects.isNull(phd)){ + PHDFile phd = phdCache.getIfPresent(fileName + "-" + userName); + if (Objects.isNull(phd)) { result.error500("Please select the parse file first!"); return result; } @@ -2931,8 +2937,8 @@ public class GammaServiceImpl extends AbstractLogOrReport implements IGammaServi m_vFuncName.add("HAE Efficiency(1-2-3)"); // 95 map.put("function", m_vFuncName); Cache phdCache = localCache.getPHDCache(); - PHDFile phd = phdCache.getIfPresent(fileName+"-"+userName); - if (Objects.isNull(phd)){ + PHDFile phd = phdCache.getIfPresent(fileName + "-" + userName); + if (Objects.isNull(phd)) { result.error500("Please select the parse file first!"); return result; } @@ -2953,12 +2959,12 @@ public class GammaServiceImpl extends AbstractLogOrReport implements IGammaServi } map.put("param", m_curParam); int num = m_vCurEnergy.size(); - if(num < 1){ + if (num < 1) { return result; } List vFit = CalValuesHandler.calFcnEval(m_vCurEnergy, m_curParam.getP()).counts; List efficiencyDataList = new LinkedList<>(); - for(int i=0; i map = new HashMap<>(); Cache phdCache = localCache.getPHDCache(); - PHDFile phd = phdCache.getIfPresent(fileName+"-"+userName); - if (Objects.isNull(phd)){ + PHDFile phd = phdCache.getIfPresent(fileName + "-" + userName); + if (Objects.isNull(phd)) { result.error500("Please select the parse file first!"); return result; } @@ -3002,9 +3008,9 @@ public class GammaServiceImpl extends AbstractLogOrReport implements IGammaServi map.put("param", m_curParam); List efficiencyDataList = new LinkedList<>(); - if(CollectionUtils.isNotEmpty(m_curParam.getP())) { + if (CollectionUtils.isNotEmpty(m_curParam.getP())) { List vFit = CalValuesHandler.calFcnEval(m_vCurEnergy, m_curParam.getP()).counts; - for(int i=0; i phdCache = localCache.getPHDCache(); - PHDFile phd = phdCache.getIfPresent(fileName+"-"+userName); - if (Objects.isNull(phd)){ + PHDFile phd = phdCache.getIfPresent(fileName + "-" + userName); + if (Objects.isNull(phd)) { result.error500("Please select the parse file first!"); return result; } - if(!curCalName.contains("Input")) { + if (!curCalName.contains("Input")) { List list_cal = phd.getMapEffiKD().keySet().stream().collect(Collectors.toList()); - curCalName = "Input "+(list_cal.stream().filter(item-> item.contains("Input")).collect(Collectors.toList()).size() + 1); + curCalName = "Input " + (list_cal.stream().filter(item -> item.contains("Input")).collect(Collectors.toList()).size() + 1); list_cal.add(curCalName); result.setSuccess(true); result.setResult(list_cal); @@ -3061,16 +3067,16 @@ public class GammaServiceImpl extends AbstractLogOrReport implements IGammaServi public void saveDataEfficiency(List m_vCurEffi, List m_vCurEnergy, List m_vCurUncert, Integer funId, HttpServletResponse response) { StringBuffer strBuffer = new StringBuffer(); strBuffer.append("#g_Efficiency").append("\n"); - for (int i=0; i phdCache = localCache.getPHDCache(); - PHDFile phd = phdCache.getIfPresent(sampleFileName+"-"+userName); + PHDFile phd = phdCache.getIfPresent(sampleFileName + "-" + userName); if (Objects.isNull(phd)) { result.error500("Please select the parse file first"); return result; } if (Objects.nonNull(file)) { String fileName = file.getOriginalFilename(); - //从最后一个。切割文件名称 获取文件名称后缀 + // 从最后一个。切割文件名称 获取文件名称后缀 String fileSuffix = fileName.substring(fileName.lastIndexOf(StringPool.DOT)); if (fileSuffix.equalsIgnoreCase(".eft") || fileSuffix.equalsIgnoreCase(".ent")) { Map map = new HashMap<>(); List m_vCurEffi = new LinkedList<>(); - List m_vCurEnergy = new LinkedList<>(); + List m_vCurEnergy = new LinkedList<>(); List m_vCurUncert = new LinkedList<>(); Double m_nFunId = 1.0; File tmpFile = null; InputStream inputStream = null; try { - //创建临时文件 + // 创建临时文件 tmpFile = File.createTempFile("betaGamma", null); inputStream = file.getInputStream(); - //复制上传文件的输入流到临时文件 - FileUtils.copyInputStreamToFile(inputStream ,tmpFile); - //读取文件所有行 + // 复制上传文件的输入流到临时文件 + FileUtils.copyInputStreamToFile(inputStream, tmpFile); + // 读取文件所有行 List readLines = FileUtils.readLines(tmpFile, "UTF-8"); - //遍历文件行数据 - for (int i=0; i values = Arrays.asList(content.split("\t\t")); - if (Double.valueOf(values.get(0))>0) { + if (Double.valueOf(values.get(0)) > 0) { m_vCurEnergy.add(Double.valueOf(values.get(0))); m_vCurEffi.add(Double.valueOf(values.get(1))); m_vCurUncert.add(Double.valueOf(values.get(2))); @@ -3144,19 +3150,19 @@ public class GammaServiceImpl extends AbstractLogOrReport implements IGammaServi if (StringUtils.isNotBlank(currentText)) { m_curParam = phd.getMapEffiPara().get(currentText); } - if (m_curParam.getP().size()>0){ + if (m_curParam.getP().size() > 0) { m_curParam.getP().set(0, m_nFunId); } else { m_curParam.getP().add(m_nFunId); } map.put("param", m_curParam); int num = m_vCurEnergy.size(); - if(num < 1){ + if (num < 1) { return result; } List vFit = CalValuesHandler.calFcnEval(m_vCurEnergy, m_curParam.getP()).counts; List efficiencyDataList = new LinkedList<>(); - for(int i=0; i phdCache = localCache.getPHDCache(); - PHDFile phd = phdCache.getIfPresent(fileName+"-"+userName); - if (Objects.isNull(phd)){ + PHDFile phd = phdCache.getIfPresent(fileName + "-" + userName); + if (Objects.isNull(phd)) { result.error500("Please select the parse file first!"); return result; } @@ -3217,44 +3223,44 @@ public class GammaServiceImpl extends AbstractLogOrReport implements IGammaServi Map map = new HashMap<>(); String userName = JwtUtil.getUserNameByToken(request); Cache phdCache = localCache.getPHDCache(); - PHDFile phd = phdCache.getIfPresent(fileName+"-"+userName); - if (Objects.isNull(phd)){ + PHDFile phd = phdCache.getIfPresent(fileName + "-" + userName); + if (Objects.isNull(phd)) { result.error500("Please select the parse file first!"); return result; } List nuclides = new LinkedList<>(); - //判断传入的数据是否都不为空 - if (StringUtils.isNotBlank(editEnergy) && Objects.nonNull(err)){ + // 判断传入的数据是否都不为空 + if (StringUtils.isNotBlank(editEnergy) && Objects.nonNull(err)) { double editEnergyDou = Double.valueOf(editEnergy); double min = editEnergyDou - err; double max = editEnergyDou + err; - if (libraryName.equals("UserLibrary")){ - //从postgreSql中获取当前用户关注的核素信息 如果当前用户没有 则返回管理员的 + if (libraryName.equals("UserLibrary")) { + // 从postgreSql中获取当前用户关注的核素信息 如果当前用户没有 则返回管理员的 nuclides = defaultNuclideSpectrumService.findNuclidesByUserName(userName, phd.getHeader().getSystem_type().toUpperCase()); - if (CollectionUtils.isEmpty(nuclides)){ + if (CollectionUtils.isEmpty(nuclides)) { nuclides = defaultNuclideSpectrumService.findNuclidesByUserName("admin", phd.getHeader().getSystem_type().toUpperCase()); } nuclides = spectrumAnalysisMapper.getUserNuclideNames(nuclides, min, max); - }else if (libraryName.equals("FULLLibrary")){ + } else if (libraryName.equals("FULLLibrary")) { nuclides = spectrumAnalysisMapper.getFULLNuclideNames(min, max); - }else if (libraryName.equals("RelevantLibrary")){ + } else if (libraryName.equals("RelevantLibrary")) { nuclides = spectrumAnalysisMapper.getRelevantNuclideNames(min, max); } } else { - if(libraryName.equals("UserLibrary")) { - //从postgreSql中获取当前用户关注的核素信息 如果当前用户没有 则返回管理员的 + if (libraryName.equals("UserLibrary")) { + // 从postgreSql中获取当前用户关注的核素信息 如果当前用户没有 则返回管理员的 nuclides = defaultNuclideSpectrumService.findNuclidesByUserName(userName, phd.getHeader().getSystem_type().toUpperCase()); - if (CollectionUtils.isEmpty(nuclides)){ + if (CollectionUtils.isEmpty(nuclides)) { nuclides = defaultNuclideSpectrumService.findNuclidesByUserName("admin", phd.getHeader().getSystem_type().toUpperCase()); } - } else if (libraryName.equals("FULLLibrary")){ + } else if (libraryName.equals("FULLLibrary")) { nuclides = spectrumAnalysisMapper.getNuclideNames("CONFIGURATION.GARDS_NUCL_LIB"); - } else if (libraryName.equals("RelevantLibrary")){ + } else if (libraryName.equals("RelevantLibrary")) { nuclides = spectrumAnalysisMapper.getNuclideNames("CONFIGURATION.GARDS_RELEVANT_NUCLIDE"); } } map.put("nuclides", nuclides); - if (StringUtils.isBlank(nuclideName)){ + if (StringUtils.isBlank(nuclideName)) { nuclideName = nuclides.get(0); } List nuclLinesLibs = InitNuclideLine(editEnergy, err, nuclideName); @@ -3271,7 +3277,7 @@ public class GammaServiceImpl extends AbstractLogOrReport implements IGammaServi public List InitNuclideLine(String editEnergy, double err, String name) { Double min = null; Double max = null; - if (StringUtils.isNotBlank(editEnergy) && Objects.nonNull(err)){ + if (StringUtils.isNotBlank(editEnergy) && Objects.nonNull(err)) { double editEnergyDou = Double.valueOf(editEnergy); min = editEnergyDou - err; max = editEnergyDou + err; @@ -3283,40 +3289,40 @@ public class GammaServiceImpl extends AbstractLogOrReport implements IGammaServi public Map InitNuclideInfo(String name) { Map map = new HashMap<>(); GardsNuclLib nuclideInfo = spectrumAnalysisMapper.getNuclideInfo(name); - if(Objects.nonNull(nuclideInfo)) { + if (Objects.nonNull(nuclideInfo)) { Long numLines = nuclideInfo.getNumLines(); map.put("lab_lines", numLines.toString()); - if(Objects.isNull(nuclideInfo.getHalflife())) { + if (Objects.isNull(nuclideInfo.getHalflife())) { map.put("lab_halfLife", ""); } else { String units = "D"; double halflife = nuclideInfo.getHalflife().doubleValue(); - if(halflife >= 1000) { + if (halflife >= 1000) { halflife = halflife / 365.25; units = "A"; - } else if(halflife < 0.1 && halflife >= 1.0 / 1440.0) { + } else if (halflife < 0.1 && halflife >= 1.0 / 1440.0) { halflife = halflife * 1440.0; units = "M"; - } else if(halflife <= 1.0 / 1440.0 && halflife > 0.0) { + } else if (halflife <= 1.0 / 1440.0 && halflife > 0.0) { halflife = halflife * 86400.0; units = "S"; } char flag = 'f'; - if(halflife >= 1000){ + if (halflife >= 1000) { flag = 'e'; } - if (flag == 'f'){ - map.put("lab_halfLife", String.format("%.3f", halflife)+units); + if (flag == 'f') { + map.put("lab_halfLife", String.format("%.3f", halflife) + units); } else if (flag == 'e') { NumberFormat numberFormat = new DecimalFormat("0.###E0"); String formatNum = numberFormat.format(halflife); - map.put("lab_halfLife", formatNum+units); + map.put("lab_halfLife", formatNum + units); } } - if(Objects.isNull(nuclideInfo.getHalflifeErr())) { + if (Objects.isNull(nuclideInfo.getHalflifeErr())) { map.put("lab_halfLifeErr", ""); } else { - map.put("lab_halfLifeErr", String.format("%.3f", nuclideInfo.getHalflifeErr().doubleValue())+"%"); + map.put("lab_halfLifeErr", String.format("%.3f", nuclideInfo.getHalflifeErr().doubleValue()) + "%"); } } return map; @@ -3326,7 +3332,7 @@ public class GammaServiceImpl extends AbstractLogOrReport implements IGammaServi Map map = new HashMap<>(); GardsNuclLib parentAndDaughter = spectrumAnalysisMapper.getParentAndDaughter(name); List parentList = new LinkedList<>(); - if(Objects.nonNull(parentAndDaughter)) { + if (Objects.nonNull(parentAndDaughter)) { parentList.add(parentAndDaughter.getParents1()); parentList.add(parentAndDaughter.getParents2()); parentList.add(parentAndDaughter.getParents3()); @@ -3337,28 +3343,28 @@ public class GammaServiceImpl extends AbstractLogOrReport implements IGammaServi List daughterList = new LinkedList<>(); TableDaughter tableDaughter1 = new TableDaughter(); tableDaughter1.setDaughters(parentAndDaughter.getDaughters1()); - tableDaughter1.setBranchingratios(String.format("%.2f", parentAndDaughter.getBranchingratios1().doubleValue())+"%"); - tableDaughter1.setDaughtersstable(parentAndDaughter.getDaughtersstable1().intValue() == 1? "Stable" : "Unstable"); + tableDaughter1.setBranchingratios(String.format("%.2f", parentAndDaughter.getBranchingratios1().doubleValue()) + "%"); + tableDaughter1.setDaughtersstable(parentAndDaughter.getDaughtersstable1().intValue() == 1 ? "Stable" : "Unstable"); daughterList.add(tableDaughter1); TableDaughter tableDaughter2 = new TableDaughter(); tableDaughter2.setDaughters(parentAndDaughter.getDaughters2()); - tableDaughter2.setBranchingratios(String.format("%.2f", parentAndDaughter.getBranchingratios2().doubleValue())+"%"); - tableDaughter2.setDaughtersstable(parentAndDaughter.getDaughtersstable2().intValue() == 1? "Stable" : "Unstable"); + tableDaughter2.setBranchingratios(String.format("%.2f", parentAndDaughter.getBranchingratios2().doubleValue()) + "%"); + tableDaughter2.setDaughtersstable(parentAndDaughter.getDaughtersstable2().intValue() == 1 ? "Stable" : "Unstable"); daughterList.add(tableDaughter2); TableDaughter tableDaughter3 = new TableDaughter(); tableDaughter3.setDaughters(parentAndDaughter.getDaughters3()); - tableDaughter3.setBranchingratios(String.format("%.2f", parentAndDaughter.getBranchingratios3().doubleValue())+"%"); - tableDaughter3.setDaughtersstable(parentAndDaughter.getDaughtersstable3().intValue() == 1? "Stable" : "Unstable"); + tableDaughter3.setBranchingratios(String.format("%.2f", parentAndDaughter.getBranchingratios3().doubleValue()) + "%"); + tableDaughter3.setDaughtersstable(parentAndDaughter.getDaughtersstable3().intValue() == 1 ? "Stable" : "Unstable"); daughterList.add(tableDaughter3); TableDaughter tableDaughter4 = new TableDaughter(); tableDaughter4.setDaughters(parentAndDaughter.getDaughters4()); - tableDaughter4.setBranchingratios(String.format("%.2f", parentAndDaughter.getBranchingratios4().doubleValue())+"%"); - tableDaughter4.setDaughtersstable(parentAndDaughter.getDaughtersstable4().intValue() == 1? "Stable" : "Unstable"); + tableDaughter4.setBranchingratios(String.format("%.2f", parentAndDaughter.getBranchingratios4().doubleValue()) + "%"); + tableDaughter4.setDaughtersstable(parentAndDaughter.getDaughtersstable4().intValue() == 1 ? "Stable" : "Unstable"); daughterList.add(tableDaughter4); TableDaughter tableDaughter5 = new TableDaughter(); tableDaughter5.setDaughters(parentAndDaughter.getDaughters5()); - tableDaughter5.setBranchingratios(String.format("%.2f", parentAndDaughter.getBranchingratios5().doubleValue())+"%"); - tableDaughter5.setDaughtersstable(parentAndDaughter.getDaughtersstable5().intValue() == 1? "Stable" : "Unstable"); + tableDaughter5.setBranchingratios(String.format("%.2f", parentAndDaughter.getBranchingratios5().doubleValue()) + "%"); + tableDaughter5.setDaughtersstable(parentAndDaughter.getDaughtersstable5().intValue() == 1 ? "Stable" : "Unstable"); daughterList.add(tableDaughter5); map.put("table_daughter", daughterList); } @@ -3371,14 +3377,14 @@ public class GammaServiceImpl extends AbstractLogOrReport implements IGammaServi Map> map = new HashMap<>(); String userName = JwtUtil.getUserNameByToken(request); Cache phdCache = localCache.getPHDCache(); - PHDFile phd = phdCache.getIfPresent(fileName+"-"+userName); - if (Objects.isNull(phd)){ + PHDFile phd = phdCache.getIfPresent(fileName + "-" + userName); + if (Objects.isNull(phd)) { result.error500("Please select the parse file first!"); return result; } List nuclides = spectrumAnalysisMapper.getNuclideNames("CONFIGURATION.GARDS_NUCL_LIB"); List userNuclides = defaultNuclideSpectrumService.findNuclidesByUserName(userName, phd.getHeader().getSystem_type().toUpperCase()); - if (CollectionUtils.isEmpty(userNuclides)){ + if (CollectionUtils.isEmpty(userNuclides)) { userNuclides = defaultNuclideSpectrumService.findNuclidesByUserName("admin", phd.getHeader().getSystem_type().toUpperCase()); } map.put("AllNuclides", nuclides); @@ -3393,8 +3399,8 @@ public class GammaServiceImpl extends AbstractLogOrReport implements IGammaServi Result result = new Result(); String userName = JwtUtil.getUserNameByToken(request); Cache phdCache = localCache.getPHDCache(); - PHDFile phd = phdCache.getIfPresent(fileName+"-"+userName); - if (Objects.isNull(phd)){ + PHDFile phd = phdCache.getIfPresent(fileName + "-" + userName); + if (Objects.isNull(phd)) { result.error500("Please select the parse file first!"); return result; } @@ -3418,21 +3424,21 @@ public class GammaServiceImpl extends AbstractLogOrReport implements IGammaServi String comments = ""; String userName = JwtUtil.getUserNameByToken(request); Cache phdCache = localCache.getPHDCache(); - PHDFile phd = phdCache.getIfPresent(fileName+"-"+userName); - if (Objects.isNull(phd)){ + PHDFile phd = phdCache.getIfPresent(fileName + "-" + userName); + if (Objects.isNull(phd)) { result.error500("Please select the parse file first!"); return result; } String temp = phd.getOriTotalCmt().trim(); - if(StringUtils.isNotBlank(temp)) { + if (StringUtils.isNotBlank(temp)) { comments += "Comments From Original Spectrum:\n" + temp; } - if(Objects.nonNull(sampleId)) { + if (Objects.nonNull(sampleId)) { CommentData commentData = spectrumAnalysisMapper.viewComment(sampleId); - if(Objects.nonNull(commentData)) { - temp = commentData.getComment()==null?"":commentData.getComment().trim(); - if(StringUtils.isNotBlank(temp)) { - comments += "\n\nComments From "+commentData.getAnalyst()+"\n:"+temp; + if (Objects.nonNull(commentData)) { + temp = commentData.getComment() == null ? "" : commentData.getComment().trim(); + if (StringUtils.isNotBlank(temp)) { + comments += "\n\nComments From " + commentData.getAnalyst() + "\n:" + temp; } } } @@ -3446,38 +3452,38 @@ public class GammaServiceImpl extends AbstractLogOrReport implements IGammaServi Result result = new Result(); String userName = JwtUtil.getUserNameByToken(request); Cache phdCache = localCache.getPHDCache(); - PHDFile phd = phdCache.getIfPresent(fileName+"-"+userName); - if (Objects.isNull(phd)){ + PHDFile phd = phdCache.getIfPresent(fileName + "-" + userName); + if (Objects.isNull(phd)) { result.error500("Please select the parse file first!"); return result; } - if(StringUtils.isNotBlank(comment)) { + if (StringUtils.isNotBlank(comment)) { phd.setTotalCmt(comment); } result.success("Modified successfully"); return result; } - public Result> peakInformation(Integer sampleId, String fileName, HttpServletRequest request){ + public Result> peakInformation(Integer sampleId, String fileName, HttpServletRequest request) { Result> result = new Result(); String userName = JwtUtil.getUserNameByToken(request); Cache phdCache = localCache.getPHDCache(); - PHDFile phd = phdCache.getIfPresent(fileName+"-"+userName); - if (Objects.isNull(phd)){ + PHDFile phd = phdCache.getIfPresent(fileName + "-" + userName); + if (Objects.isNull(phd)) { result.error500("Please select the parse file first!"); return result; } List vPeak = phd.getVPeak(); List tablePeaks = new LinkedList<>(); - if (CollectionUtils.isNotEmpty(vPeak)){ - for (PeakInfo peak: vPeak){ + if (CollectionUtils.isNotEmpty(vPeak)) { + for (PeakInfo peak : vPeak) { TablePeak tablePeak = new TablePeak(); tablePeak.setEnergy(String.format("%.3f", peak.energy)); tablePeak.setCentroid(String.format("%.3f", peak.peakCentroid)); tablePeak.setMultiplet(String.valueOf(peak.multiIndex)); tablePeak.setFwhm(String.format("%.3f", peak.fwhm)); tablePeak.setNetArea(String.format("%.3f", peak.area)); - tablePeak.setAreaErr(peak.area>0?String.format("%.3f",(peak.areaErr/peak.area)*100):"0"); + tablePeak.setAreaErr(peak.area > 0 ? String.format("%.3f", (peak.areaErr / peak.area) * 100) : "0"); tablePeak.setSignificant(String.format("%.3f", peak.significance)); tablePeak.setSensitivity(String.format("%.3f", peak.sensitivity)); tablePeak.setIndentify(StringUtils.join(peak.nuclides, ";")); @@ -3498,27 +3504,27 @@ public class GammaServiceImpl extends AbstractLogOrReport implements IGammaServi for (int i = 0; i < peaks.size(); i++) { peaks.get(i).setNo(i + 1); } - ExportUtil.exportXls(response,TablePeak.class,peaks,export); + ExportUtil.exportXls(response, TablePeak.class, peaks, export); } @Override public void viewARR(Integer sampleId, HttpServletResponse response) { - //获取自动处理生成的报告地址 + // 获取自动处理生成的报告地址 String reportPath = spectrumAnalysisMapper.viewARR(sampleId); - if (StringUtils.isBlank(reportPath)){ + if (StringUtils.isBlank(reportPath)) { throw new RuntimeException("自动处理程序生成报告不存在!"); } String pathName = StringPool.SLASH + spectrumPathProperties.getSaveFilePath() + StringPool.SLASH + reportPath.substring(0, reportPath.lastIndexOf(StringPool.SLASH)); - String fileName = reportPath.substring(reportPath.lastIndexOf(StringPool.SLASH)+1)+".txt"; - //连接ftp + String fileName = reportPath.substring(reportPath.lastIndexOf(StringPool.SLASH) + 1) + ".txt"; + // 连接ftp FTPClient ftpClient = ftpUtil.LoginFTP(); - if (Objects.isNull(ftpClient)){ + if (Objects.isNull(ftpClient)) { throw new RuntimeException("ftp连接失败"); } InputStream inputStream = null; ServletOutputStream outputStream = null; try { - //切换被动模式 + // 切换被动模式 ftpClient.enterLocalPassiveMode(); ftpClient.setFileType(FTPClient.BINARY_FILE_TYPE); // 设置编码,当文件中存在中文且上传后文件乱码时可使用此配置项 @@ -3526,7 +3532,7 @@ public class GammaServiceImpl extends AbstractLogOrReport implements IGammaServi ftpClient.setFileTransferMode(FTPClient.STREAM_TRANSFER_MODE); ftpClient.changeWorkingDirectory(pathName); inputStream = ftpClient.retrieveFileStream(fileName); - if (Objects.nonNull(inputStream)){ + if (Objects.nonNull(inputStream)) { outputStream = response.getOutputStream(); byte[] buffer = new byte[1024]; int bytesRead; @@ -3539,13 +3545,13 @@ public class GammaServiceImpl extends AbstractLogOrReport implements IGammaServi throw new RuntimeException(e); } finally { try { - if (Objects.nonNull(ftpClient)){ + if (Objects.nonNull(ftpClient)) { ftpClient.disconnect(); } - if (ObjectUtil.isNotNull(inputStream)){ + if (ObjectUtil.isNotNull(inputStream)) { inputStream.close(); } - if (ObjectUtil.isNotNull(outputStream)){ + if (ObjectUtil.isNotNull(outputStream)) { outputStream.close(); } } catch (IOException e) { @@ -3556,19 +3562,19 @@ public class GammaServiceImpl extends AbstractLogOrReport implements IGammaServi @Override public void exportARR(Integer sampleId, HttpServletResponse response) { - //获取自动处理生成的报告地址 + // 获取自动处理生成的报告地址 String reportPath = spectrumAnalysisMapper.viewARR(sampleId); String pathName = StringPool.SLASH + spectrumPathProperties.getSaveFilePath() + StringPool.SLASH + reportPath.substring(0, reportPath.lastIndexOf(StringPool.SLASH)); - String fileName = reportPath.substring(reportPath.lastIndexOf(StringPool.SLASH)+1)+".txt"; - //连接ftp + String fileName = reportPath.substring(reportPath.lastIndexOf(StringPool.SLASH) + 1) + ".txt"; + // 连接ftp FTPClient ftpClient = ftpUtil.LoginFTP(); - if (Objects.isNull(ftpClient)){ + if (Objects.isNull(ftpClient)) { throw new RuntimeException("ftp连接失败"); } InputStream inputStream = null; ServletOutputStream outputStream = null; try { - //切换被动模式 + // 切换被动模式 ftpClient.enterLocalPassiveMode(); ftpClient.setFileType(FTPClient.BINARY_FILE_TYPE); // 设置编码,当文件中存在中文且上传后文件乱码时可使用此配置项 @@ -3576,11 +3582,11 @@ public class GammaServiceImpl extends AbstractLogOrReport implements IGammaServi ftpClient.setFileTransferMode(FTPClient.STREAM_TRANSFER_MODE); ftpClient.changeWorkingDirectory(pathName); inputStream = ftpClient.retrieveFileStream(fileName); - if (Objects.nonNull(inputStream)){ - //设置响应类型 + if (Objects.nonNull(inputStream)) { + // 设置响应类型 response.setContentType("application/octet-stream"); - //解决中文不能生成文件 - response.setHeader("Content-Disposition", "attachment; fileName=" + URLEncoder.encode("ARR.txt","UTF-8")); + // 解决中文不能生成文件 + response.setHeader("Content-Disposition", "attachment; fileName=" + URLEncoder.encode("ARR.txt", "UTF-8")); outputStream = response.getOutputStream(); byte[] buffer = new byte[1024]; int bytesRead; @@ -3593,13 +3599,13 @@ public class GammaServiceImpl extends AbstractLogOrReport implements IGammaServi throw new RuntimeException(e); } finally { try { - if (Objects.nonNull(ftpClient)){ + if (Objects.nonNull(ftpClient)) { ftpClient.disconnect(); } - if (ObjectUtil.isNotNull(inputStream)){ + if (ObjectUtil.isNotNull(inputStream)) { inputStream.close(); } - if (ObjectUtil.isNotNull(outputStream)){ + if (ObjectUtil.isNotNull(outputStream)) { outputStream.close(); } } catch (IOException e) { @@ -3613,8 +3619,8 @@ public class GammaServiceImpl extends AbstractLogOrReport implements IGammaServi Result result = new Result(); String userName = JwtUtil.getUserNameByToken(request); Cache phdCache = localCache.getPHDCache(); - PHDFile phd = phdCache.getIfPresent(fileName+"-"+userName); - if (Objects.isNull(phd)){ + PHDFile phd = phdCache.getIfPresent(fileName + "-" + userName); + if (Objects.isNull(phd)) { result.error500("Please select the parse file first!"); return result; } @@ -3628,17 +3634,17 @@ public class GammaServiceImpl extends AbstractLogOrReport implements IGammaServi public void exportRRR(Integer sampleId, String fileName, HttpServletRequest request, HttpServletResponse response) { String userName = JwtUtil.getUserNameByToken(request); Cache phdCache = localCache.getPHDCache(); - PHDFile phd = phdCache.getIfPresent(fileName+"-"+userName); + PHDFile phd = phdCache.getIfPresent(fileName + "-" + userName); if (Objects.isNull(phd)) { return; } String reportContent = gammaFileUtil.GetReportContent(phd, userName, false); OutputStream fos = null; try { - //设置响应类型 + // 设置响应类型 response.setContentType("application/octet-stream"); - //解决中文不能生成文件 - response.setHeader("Content-Disposition", "attachment; fileName=" + URLEncoder.encode("RRR.txt","UTF-8")); + // 解决中文不能生成文件 + response.setHeader("Content-Disposition", "attachment; fileName=" + URLEncoder.encode("RRR.txt", "UTF-8")); fos = response.getOutputStream(); fos.write(reportContent.getBytes()); } catch (FileNotFoundException e) { @@ -3659,8 +3665,8 @@ public class GammaServiceImpl extends AbstractLogOrReport implements IGammaServi Result> result = new Result(); String userName = JwtUtil.getUserNameByToken(request); Cache phdCache = localCache.getPHDCache(); - PHDFile phd = phdCache.getIfPresent(fileName+"-"+userName); - if (Objects.isNull(phd)){ + PHDFile phd = phdCache.getIfPresent(fileName + "-" + userName); + if (Objects.isNull(phd)) { result.error500("Please select the parse file first!"); return result; } @@ -3677,17 +3683,17 @@ public class GammaServiceImpl extends AbstractLogOrReport implements IGammaServi if (MapUtil.isEmpty(dataMap)) return; Date actRef = (Date) dataMap.get("dateTime_act_ref"); Date conRef = (Date) dataMap.get("dateTime_con_ref"); - if (ObjectUtil.isNotNull(actRef)){ + if (ObjectUtil.isNotNull(actRef)) { String actTime = DateUtil.format(actRef, DateConstant.DATE_TIME); - dataMap.put("dateTime_act_ref",actTime); + dataMap.put("dateTime_act_ref", actTime); } - if (ObjectUtil.isNotNull(conRef)){ + if (ObjectUtil.isNotNull(conRef)) { String conTime = DateUtil.format(conRef, DateConstant.DATE_TIME); - dataMap.put("dateTime_con_ref",conTime); + dataMap.put("dateTime_con_ref", conTime); } String export = "RadionuclideActivity-Gamma.xls"; String template = ExportTemplate.RadionuclideActivity_G.getName(); - ExportUtil.exportXls(response,template,dataMap,export); + ExportUtil.exportXls(response, template, dataMap, export); } @Override @@ -3695,8 +3701,8 @@ public class GammaServiceImpl extends AbstractLogOrReport implements IGammaServi Result result = new Result(); String userName = JwtUtil.getUserNameByToken(request); Cache phdCache = localCache.getPHDCache(); - PHDFile phd = phdCache.getIfPresent(fileName+"-"+userName); - if (Objects.isNull(phd)){ + PHDFile phd = phdCache.getIfPresent(fileName + "-" + userName); + if (Objects.isNull(phd)) { result.error500("Please select the parse file first!"); return result; } @@ -3711,8 +3717,8 @@ public class GammaServiceImpl extends AbstractLogOrReport implements IGammaServi Result> result = new Result(); String userName = JwtUtil.getUserNameByToken(request); Cache phdCache = localCache.getPHDCache(); - PHDFile phd = phdCache.getIfPresent(fileName+"-"+userName); - if (Objects.isNull(phd)){ + PHDFile phd = phdCache.getIfPresent(fileName + "-" + userName); + if (Objects.isNull(phd)) { result.error500("Please select the parse file first!"); return result; } @@ -3723,16 +3729,16 @@ public class GammaServiceImpl extends AbstractLogOrReport implements IGammaServi String acq_start = phd.getAcq().getAcquisition_start_date() + StringPool.SPACE + phd.getAcq().getAcquisition_start_time(); Date collect_stop_dt = DateUtils.parseDate(collect_stop); Date collect_start_dt = DateUtils.parseDate(collect_start); - double Sampling_Time = (collect_stop_dt.getTime()/1000 - collect_start_dt.getTime()/1000) / 3600.0; + double Sampling_Time = (collect_stop_dt.getTime() / 1000 - collect_start_dt.getTime() / 1000) / 3600.0; Date acq_start_dt = DateUtils.parseDate(acq_start); - double Decay_Time = (acq_start_dt.getTime()/1000 - collect_stop_dt.getTime()/1000) / 3600.0; + double Decay_Time = (acq_start_dt.getTime() / 1000 - collect_stop_dt.getTime() / 1000) / 3600.0; long secs = (long) (acq_start_dt.getTime() + (phd.getAcq().getAcquisition_live_time() * 1000)); String acq_stop = DateUtils.formatDate(new Date(secs), "yyyy/MM/dd HH:mm:ss.S"); map.put("stationId", phd.getHeader().getSite_code()); map.put("sampleId", phd.getId_sample()); - map.put("sampleQuantity", String.format("%.2f", phd.getCollect().getAir_volume())+" m3"); + map.put("sampleQuantity", String.format("%.2f", phd.getCollect().getAir_volume()) + " m3"); map.put("collectStart", collect_start); map.put("collectStop", collect_stop); map.put("acquisitionStart", acq_start); @@ -3741,10 +3747,10 @@ public class GammaServiceImpl extends AbstractLogOrReport implements IGammaServi map.put("detectorId", phd.getHeader().getDetector_code()); map.put("sampleGeometry", phd.getHeader().getSample_geometry()); map.put("sampleType", phd.getHeader().getSystem_type().toUpperCase()); - map.put("samplingTime", String.format("%.2f", Sampling_Time)+" hours"); - map.put("decayTime", String.format("%.2f", Decay_Time)+" hours"); - map.put("acquisitionTime", String.format("%.2f", phd.getAcq().getAcquisition_real_time() / 3600.0)+" hours"); - map.put("avgFlowRate", String.format("%.2f", phd.getCollect().getAir_volume() / Sampling_Time)+" m3/hour"); + map.put("samplingTime", String.format("%.2f", Sampling_Time) + " hours"); + map.put("decayTime", String.format("%.2f", Decay_Time) + " hours"); + map.put("acquisitionTime", String.format("%.2f", phd.getAcq().getAcquisition_real_time() / 3600.0) + " hours"); + map.put("avgFlowRate", String.format("%.2f", phd.getCollect().getAir_volume() / Sampling_Time) + " m3/hour"); } catch (ParseException e) { throw new RuntimeException(e); } @@ -3761,7 +3767,7 @@ public class GammaServiceImpl extends AbstractLogOrReport implements IGammaServi Map dataMap = sampleInfo.entrySet().stream() .collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue)); String export = "SampleInfo-Gamma.xls"; - ExportUtil.exportXls(response, SampleInfo_G.getName(),dataMap,export); + ExportUtil.exportXls(response, SampleInfo_G.getName(), dataMap, export); } @Override @@ -3769,65 +3775,59 @@ public class GammaServiceImpl extends AbstractLogOrReport implements IGammaServi Result> result = new Result(); String userName = JwtUtil.getUserNameByToken(request); Cache phdCache = localCache.getPHDCache(); - PHDFile phd = phdCache.getIfPresent(fileName+"-"+userName); - if (Objects.isNull(phd)){ + PHDFile phd = phdCache.getIfPresent(fileName + "-" + userName); + if (Objects.isNull(phd)) { result.error500("Please select the parse file first!"); return result; } Map m_mapQC = phd.getQcItems(); int size_map = m_mapQC.size(); - if(size_map < 1){ + if (size_map < 1) { return result; } List qcResultList = new LinkedList<>(); - for(Map.Entry iter: m_mapQC.entrySet()){ + for (Map.Entry iter : m_mapQC.entrySet()) { TableQCResult item = new TableQCResult(); String name = iter.getKey(); - if(name.equals("Ba140-MDC")){ + if (name.equals("Ba140-MDC")) { name += " (uBq/m3)"; - } else if(name.equals("Be7-FWHM")){ + } else if (name.equals("Be7-FWHM")) { name += " (keV)"; - } else if(name.equals("Xe133-MDC")){ + } else if (name.equals("Xe133-MDC")) { name += " (uBq/m3)"; - } else if(name.equals("acq_time")){ + } else if (name.equals("acq_time")) { name += " (h)"; - } else if(name.equals("airFlow")){ + } else if (name.equals("airFlow")) { name += " (m3/h)"; - } else if(name.equals("col_time")){ + } else if (name.equals("col_time")) { name += " (h)"; - } else if(name.equals("decay_time")){ + } else if (name.equals("decay_time")) { name += " (h)"; - } else if(name.equals("samp_vol")){ + } else if (name.equals("samp_vol")) { name += " (m3)"; } item.setName(name); item.setFlag(iter.getValue().isBPass() ? "PASS" : "FAIL"); - item.setValue( Double.parseDouble(NumberFormatUtil.numberFormat(String.valueOf(iter.getValue().getValue()))) ); - String standard=""; + item.setValue(Double.parseDouble(NumberFormatUtil.numberFormat(String.valueOf(iter.getValue().getValue())))); + String standard = ""; List strList = Arrays.asList(iter.getValue().getStandard().split(StringPool.COMMA)); for (String str : strList) { - if(str.contains("-")) { - if(str.contains("(") || str.contains("[")){ + if (str.contains("-")) { + if (str.contains("(") || str.contains("[")) { standard += iter.getKey(); } - } else if(str.contains("(")) { + } else if (str.contains("(")) { standard += str.replace("(", "") + " < " + iter.getKey(); - } - else if(str.contains(")")) - { + } else if (str.contains(")")) { standard += " < " + str.replace(")", ""); - } - else if(str.contains("[")) - { + } else if (str.contains("[")) { standard += str.replace("[", "") + " <= " + iter.getKey(); - } - else if(str.contains("]")) - { + } else if (str.contains("]")) { standard += " <= " + str.replace("]", ""); } } item.setStandard(standard); - if(StrUtil.equals(name, "Xe133-MDC (uBq/m3)")){ + if (StrUtil.equals(name, "Xe133-MDC (uBq/m3)")) { qcResultList.add(0, item); continue; } @@ -3842,9 +3842,9 @@ public class GammaServiceImpl extends AbstractLogOrReport implements IGammaServi public void exportQCResult(Integer sampleId, String fileName, HttpServletRequest request, HttpServletResponse response) { Result> result = viewQCResult(sampleId, fileName, request); List qcResults = result.getResult(); - if (CollUtil.isEmpty(qcResults))return; + if (CollUtil.isEmpty(qcResults)) return; String export = "QCResult-Gamma.xls"; - ExportUtil.exportXls(response,TableQCResult.class,qcResults,export); + ExportUtil.exportXls(response, TableQCResult.class, qcResults, export); } @Override @@ -3852,8 +3852,8 @@ public class GammaServiceImpl extends AbstractLogOrReport implements IGammaServi Result result = new Result(); String userName = JwtUtil.getUserNameByToken(request); Cache phdCache = localCache.getPHDCache(); - PHDFile phd = phdCache.getIfPresent(fileName+"-"+userName); - if (Objects.isNull(phd)){ + PHDFile phd = phdCache.getIfPresent(fileName + "-" + userName); + if (Objects.isNull(phd)) { result.error500("Please select the parse file first!"); return result; } @@ -3865,17 +3865,17 @@ public class GammaServiceImpl extends AbstractLogOrReport implements IGammaServi int num = phd.getVPeak().size(); // Init #Header - if(StringUtils.isNotBlank(phd.getMsgInfo().getMsg_id())){ + if (StringUtils.isNotBlank(phd.getMsgInfo().getMsg_id())) { // 1: MSG_ID map.put("header_msg_id", phd.getMsgInfo().getMsg_id()); } map.put("header_data_type", "MINIRLR"); // 2: DataType map.put("header_priority_level", "Routine"); // 3: Priority Level - if(StringUtils.isNotBlank(phd.getHeader().getSite_code())){ + if (StringUtils.isNotBlank(phd.getHeader().getSite_code())) { // 4: Station code map.put("header_station_code", phd.getHeader().getSite_code()); } - if(StringUtils.isNotBlank(phd.getHeader().getSample_ref_id())){ + if (StringUtils.isNotBlank(phd.getHeader().getSample_ref_id())) { // 5: SRID map.put("header_srid", phd.getHeader().getSample_ref_id()); } @@ -3896,7 +3896,7 @@ public class GammaServiceImpl extends AbstractLogOrReport implements IGammaServi map.put("collect_stop", col_stop); map.put("collect_airVolume", NumUtil.keep(phd.getCollect().getAir_volume(), 4)); // Init #SampleReceipt - if(StringUtils.isNotBlank(phd.getHeader().getSample_ref_id())){ + if (StringUtils.isNotBlank(phd.getHeader().getSample_ref_id())) { map.put("Receipt_srid", phd.getHeader().getSample_ref_id()); } map.put("Receipt_sealNum", "0"); @@ -3916,20 +3916,20 @@ public class GammaServiceImpl extends AbstractLogOrReport implements IGammaServi List peakFitList = new LinkedList<>(); String energy_uncert = "-9999"; double live_time = phd.getAcq().getAcquisition_live_time(); - for(int i=0; i 0 ? NumberFormatUtil.numberFormat(String.valueOf(peak.areaErr/peak.area*100)) : "0"; + tablePeak.setNetArea(NumberFormatUtil.numberFormat(String.valueOf(peak.area))); + String area_err = peak.area > 0 ? NumberFormatUtil.numberFormat(String.valueOf(peak.areaErr / peak.area * 100)) : "0"; tablePeak.setAreaErr(area_err); - String rate = live_time > 0 ? NumberFormatUtil.numberFormat(String.valueOf(peak.area/live_time)) : "0"; + String rate = live_time > 0 ? NumberFormatUtil.numberFormat(String.valueOf(peak.area / live_time)) : "0"; tablePeak.setNetCountRate(rate); tablePeak.setNcRateErr(area_err); - tablePeak.setLc( NumberFormatUtil.numberFormat(String.valueOf(peak.lc)) ); - tablePeak.setSignificance( NumberFormatUtil.numberFormat(String.valueOf(peak.significance)) ); + tablePeak.setLc(NumberFormatUtil.numberFormat(String.valueOf(peak.lc))); + tablePeak.setSignificance(NumberFormatUtil.numberFormat(String.valueOf(peak.significance))); peakFitList.add(tablePeak); } map.put("peakFit", peakFitList); @@ -3942,22 +3942,22 @@ public class GammaServiceImpl extends AbstractLogOrReport implements IGammaServi // Init #PeakAssociation String Explanation_Level = "100.000"; List associationList = new LinkedList<>(); - for(int i=0; i nuclides = phd.getVPeak().get(i).nuclides; String iden = ""; - for (String str:nuclides) { - iden+=str+","; + for (String str : nuclides) { + iden += str + ","; } - iden = iden.length()>0?iden.substring(0, iden.length()-1):""; + iden = iden.length() > 0 ? iden.substring(0, iden.length() - 1) : ""; tableAssociation.setIdentified(iden); associationList.add(tableAssociation); } map.put("Association", associationList); // Init #References - if(StringUtils.isNotBlank(phd.getHeader().getMeasurement_id())) { + if (StringUtils.isNotBlank(phd.getHeader().getMeasurement_id())) { map.put("Reference_samplePHD", phd.getHeader().getMeasurement_id()); map.put("Reference_CalibPHD", phd.getHeader().getMeasurement_id()); } @@ -3977,17 +3977,17 @@ public class GammaServiceImpl extends AbstractLogOrReport implements IGammaServi map.put("Result_act_ref", timeAct); map.put("Result_conc_ref", timeConc); List tableResultList = new LinkedList<>(); - for(Map.Entry it : phd.getMapNucActMda().entrySet()){ - if(it.getValue().isBCalculateMDA()) { + for (Map.Entry it : phd.getMapNucActMda().entrySet()) { + if (it.getValue().isBCalculateMDA()) { TableResult tableResult = new TableResult(); NuclideActMda nuc = it.getValue(); tableResult.setNuclide(it.getKey()); tableResult.setActivity(NumUtil.keep4ScienceStr(nuc.getActivity())); - String act_err = nuc.getActivity() > 0 ? NumUtil.keepStr(nuc.getAct_err()/nuc.getActivity()*100, 2) : "0"; + String act_err = nuc.getActivity() > 0 ? NumUtil.keepStr(nuc.getAct_err() / nuc.getActivity() * 100, 2) : "0"; tableResult.setActErr(act_err); tableResult.setFactor1(coverage_factor); tableResult.setConfidence1(level_confidence); - tableResult.setConc(NumUtil.keep4ScienceStr(nuc.getConcentration()/1000)); + tableResult.setConc(NumUtil.keep4ScienceStr(nuc.getConcentration() / 1000)); tableResult.setConcErr(act_err); tableResult.setFactor2(coverage_factor); tableResult.setConfidence2(level_confidence); @@ -4003,11 +4003,11 @@ public class GammaServiceImpl extends AbstractLogOrReport implements IGammaServi map.put("Conclusion_IDC", "0"); List listNuc = phd.getMapNucActMda().keySet().stream().collect(Collectors.toList()); String nuc = ""; - for (String str:listNuc) { - nuc+=str+","; + for (String str : listNuc) { + nuc += str + ","; } - nuc = StringUtils.isNotBlank(nuc)?nuc.substring(0, nuc.length()-1):nuc; - map.put("Conclusion_Lab", "The nuclides "+ nuc +" are identified in the reference sample definitely."); + nuc = StringUtils.isNotBlank(nuc) ? nuc.substring(0, nuc.length() - 1) : nuc; + map.put("Conclusion_Lab", "The nuclides " + nuc + " are identified in the reference sample definitely."); map.put("Conclusion_Res", "0"); // Init #Comment map.put("Comment", "0"); @@ -4020,66 +4020,69 @@ public class GammaServiceImpl extends AbstractLogOrReport implements IGammaServi @Override public void exportRLR(GammaRLR gammaRLR, HttpServletResponse response) { if (ObjectUtil.isNull(gammaRLR)) return; - String pathPrefix = "excelTemplate/"; - String path = pathPrefix + RLR_G.getName(); - String template = ClassUtil.classPath(path); - List lines = FileUtil.readUtf8Lines(template); - // 正则表达式,匹配${}中的内容 - String regex = "\\$\\{([^}]+)}"; - List newLines = new ArrayList<>(); - List list = ListUtil.toList("peakFit", "Association", "Result", - "NuclideRatios", "g_CoincidenceCorrection", "mda"); - List skip = ListUtil.toList("${peakFit}", "${Association}", "${Result}", - "${NuclideRatios}", "${g_CoincidenceCorrection}", "${mda}"); - for (String line : lines) { - if (StrUtil.isBlank(line)) continue; - List fieldNames = ReUtil.findAllGroup1(regex, line); - if (CollUtil.isEmpty(fieldNames)){ - newLines.add(line); - continue; - } - Map fieldValue = ClassUtil.fieldValue(gammaRLR, fieldNames); - String newLine = line; - for (Map.Entry entry : fieldValue.entrySet()) { - String fieldName = entry.getKey(); - Object value = entry.getValue(); - if (list.contains(fieldName)){ - List lineList = new ArrayList<>(); - switch (fieldName){ - case "peakFit": - lineList = ClassUtil.objsStr((List)value); - break; - case "Association": - lineList = ClassUtil.objsStr((List)value); - break; - case "Result": - lineList = ClassUtil.objsStr((List)value); - break; - case "NuclideRatios": - lineList = ClassUtil.objsStr((List)value); - break; - case "g_CoincidenceCorrection": - lineList = ClassUtil.objsStr((List)value); - break; - case "mda": - lineList = ClassUtil.objsStr((List)value); - break; - default: - break; - } - newLines.addAll(lineList); - }else { - String search = "${" + fieldName + "}"; - String replacement = StrUtil.toString(value); - replacement = StrUtil.isBlank(replacement) ? "null" : replacement; - newLine = StrUtil.replace(newLine, search, replacement); - } - } - if (!CollUtil.contains(skip, newLine)) - newLines.add(newLine); - } PrintWriter writer = null; try { + String pathPrefix = "excelTemplate/"; + String path = pathPrefix + RLR_G.getName(); + /*String template = ClassUtil.classPath(path); + List lines = FileUtil.readUtf8Lines(template);*/ + InputStream inputStream = ClassUtil.classPathStream(path); + List lines = IOUtils.readLines(inputStream, "UTF-8"); + // 正则表达式,匹配${}中的内容 + String regex = "\\$\\{([^}]+)}"; + List newLines = new ArrayList<>(); + List list = ListUtil.toList("peakFit", "Association", "Result", + "NuclideRatios", "g_CoincidenceCorrection", "mda"); + List skip = ListUtil.toList("${peakFit}", "${Association}", "${Result}", + "${NuclideRatios}", "${g_CoincidenceCorrection}", "${mda}"); + for (String line : lines) { + if (StrUtil.isBlank(line)) continue; + List fieldNames = ReUtil.findAllGroup1(regex, line); + if (CollUtil.isEmpty(fieldNames)) { + newLines.add(line); + continue; + } + Map fieldValue = ClassUtil.fieldValue(gammaRLR, fieldNames); + String newLine = line; + for (Map.Entry entry : fieldValue.entrySet()) { + String fieldName = entry.getKey(); + Object value = entry.getValue(); + if (list.contains(fieldName)) { + List lineList = new ArrayList<>(); + switch (fieldName) { + case "peakFit": + lineList = ClassUtil.objsStr((List) value); + break; + case "Association": + lineList = ClassUtil.objsStr((List) value); + break; + case "Result": + lineList = ClassUtil.objsStr((List) value); + break; + case "NuclideRatios": + lineList = ClassUtil.objsStr((List) value); + break; + case "g_CoincidenceCorrection": + lineList = ClassUtil.objsStr((List) value); + break; + case "mda": + lineList = ClassUtil.objsStr((List) value); + break; + default: + break; + } + newLines.addAll(lineList); + } else { + String search = "${" + fieldName + "}"; + String replacement = StrUtil.toString(value); + replacement = StrUtil.isBlank(replacement) ? "null" : replacement; + newLine = StrUtil.replace(newLine, search, replacement); + } + } + if (!CollUtil.contains(skip, newLine)) + newLines.add(newLine); + } + String export = "Gamma.RLR"; writer = ExportUtil.streamWriter(response, export); for (String newLine : newLines) { @@ -4087,7 +4090,7 @@ public class GammaServiceImpl extends AbstractLogOrReport implements IGammaServi } } catch (IOException e) { e.printStackTrace(); - }finally { + } finally { if (ObjectUtil.isNotNull(writer)) writer.close(); } @@ -4096,20 +4099,20 @@ public class GammaServiceImpl extends AbstractLogOrReport implements IGammaServi @Override public void viewAutomaticAnalysisLog(Integer sampleId, HttpServletResponse response) { String logPath = spectrumAnalysisMapper.findAutomaticLogPath(sampleId); - if (StringUtils.isBlank(logPath)){ + if (StringUtils.isBlank(logPath)) { throw new RuntimeException("自动处理程序生成日志不存在!"); } String pathName = StringPool.SLASH + spectrumPathProperties.getLogPath() + StringPool.SLASH + logPath.substring(0, logPath.lastIndexOf(StringPool.SLASH)); String fileName = logPath.substring(logPath.lastIndexOf(StringPool.SLASH) + 1); - //连接ftp + // 连接ftp FTPClient ftpClient = ftpUtil.LoginFTP(); - if (Objects.isNull(ftpClient)){ + if (Objects.isNull(ftpClient)) { throw new RuntimeException("ftp连接失败"); } InputStream inputStream = null; ServletOutputStream outputStream = null; try { - //切换被动模式 + // 切换被动模式 ftpClient.enterLocalPassiveMode(); ftpClient.setFileType(FTPClient.BINARY_FILE_TYPE); // 设置编码,当文件中存在中文且上传后文件乱码时可使用此配置项 @@ -4117,7 +4120,7 @@ public class GammaServiceImpl extends AbstractLogOrReport implements IGammaServi ftpClient.setFileTransferMode(FTPClient.STREAM_TRANSFER_MODE); ftpClient.changeWorkingDirectory(pathName); inputStream = ftpClient.retrieveFileStream(fileName); - if (Objects.nonNull(inputStream)){ + if (Objects.nonNull(inputStream)) { outputStream = response.getOutputStream(); byte[] buffer = new byte[1024]; int bytesRead; @@ -4130,13 +4133,13 @@ public class GammaServiceImpl extends AbstractLogOrReport implements IGammaServi throw new RuntimeException(e); } finally { try { - if (Objects.nonNull(ftpClient)){ + if (Objects.nonNull(ftpClient)) { ftpClient.disconnect(); } - if (ObjectUtil.isNotNull(inputStream)){ + if (ObjectUtil.isNotNull(inputStream)) { inputStream.close(); } - if (ObjectUtil.isNotNull(outputStream)){ + if (ObjectUtil.isNotNull(outputStream)) { outputStream.close(); } } catch (IOException e) { @@ -4150,8 +4153,8 @@ public class GammaServiceImpl extends AbstractLogOrReport implements IGammaServi Result result = new Result(); String userName = JwtUtil.getUserNameByToken(request); Cache phdCache = localCache.getPHDCache(); - PHDFile phd = phdCache.getIfPresent(fileName+"-"+userName); - if (Objects.isNull(phd)){ + PHDFile phd = phdCache.getIfPresent(fileName + "-" + userName); + if (Objects.isNull(phd)) { result.error500("Please select the parse file first!"); return result; } @@ -4166,20 +4169,20 @@ public class GammaServiceImpl extends AbstractLogOrReport implements IGammaServi Result result = new Result(); String userName = JwtUtil.getUserNameByToken(request); Cache phdCache = localCache.getPHDCache(); - PHDFile phd = phdCache.getIfPresent(fileName+"-"+userName); - if (Objects.isNull(phd)){ + PHDFile phd = phdCache.getIfPresent(fileName + "-" + userName); + if (Objects.isNull(phd)) { result.error500("Please select the parse file first!"); return result; } int size = phd.getVPeak().size(); - //vPeak的数组大小大于0 当前下标在范围内 - if (size>0 && index=0) { + // vPeak的数组大小大于0 当前下标在范围内 + if (size > 0 && index < size && index >= 0) { PeakInfo peak = phd.getVPeak().get(index); - String message = "Peak:"+String.format("%.2f", peak.peakCentroid)+" = "+String.format("%.2f", peak.energy)+" keV\n" + - "FWHM:"+String.format("%.2f", peak.fwhmc)+" keV\n" + - "Net Area:"+String.format("%.2f", peak.area)+"\n" + - "BaseLine:"+String.format("%.2f", phd.getVBase().get((int) Math.round(peak.peakCentroid)))+"\n" + - "NID:"+ StringUtils.join(peak.nuclides, ";"); + String message = "Peak:" + String.format("%.2f", peak.peakCentroid) + " = " + String.format("%.2f", peak.energy) + " keV\n" + + "FWHM:" + String.format("%.2f", peak.fwhmc) + " keV\n" + + "Net Area:" + String.format("%.2f", peak.area) + "\n" + + "BaseLine:" + String.format("%.2f", phd.getVBase().get((int) Math.round(peak.peakCentroid))) + "\n" + + "NID:" + StringUtils.join(peak.nuclides, ";"); result.setSuccess(true); result.setResult(message); } @@ -4213,82 +4216,82 @@ public class GammaServiceImpl extends AbstractLogOrReport implements IGammaServi String userName = JwtUtil.getUserNameByToken(request); boolean bRet = true; Cache phdCache = localCache.getPHDCache(); - PHDFile phd = phdCache.getIfPresent(fileName+"-"+userName); + PHDFile phd = phdCache.getIfPresent(fileName + "-" + userName); if (Objects.isNull(phd)) { result.error500("Please select the parse file first!"); return result; } - //查看分析员是否有权限操作当前台站信息 - //判断当前分析员是否有过排班任务 + // 查看分析员是否有权限操作当前台站信息 + // 判断当前分析员是否有过排班任务 Integer stationId = spectrumAnalysisMapper.getStationId(phd.getHeader().getSite_code()); boolean bAnalysisResultWriteAuthority = userTaskUtil.CheckUserWriteAuthorityForStation(userName, stationId); - //如果用户没有权限操作 则查看当前用户是否是高级分析员/管理员 - if (!bAnalysisResultWriteAuthority){ + // 如果用户没有权限操作 则查看当前用户是否是高级分析员/管理员 + if (!bAnalysisResultWriteAuthority) { List roleCodes = userTaskUtil.findRoleCodeByUserName(userName); - //如果只是分析员 则无法保存数据库 返回信息 - if (roleCodes.contains(RoleType.Role_Analyst) && !roleCodes.contains(RoleType.Role_SuperAnalyst) && !roleCodes.contains(RoleType.Role_Manager) ){ + // 如果只是分析员 则无法保存数据库 返回信息 + if (roleCodes.contains(RoleType.Role_Analyst) && !roleCodes.contains(RoleType.Role_SuperAnalyst) && !roleCodes.contains(RoleType.Role_Manager)) { bRet = false; } } - if(!bRet) { + if (!bRet) { result.error500("You have no permission to save results to DB!"); return result; } - //如果有权限则开始保存数据库操作 - //将PHDFile数据 生成用于数据库存储的数据结构 + // 如果有权限则开始保存数据库操作 + // 将PHDFile数据 生成用于数据库存储的数据结构 GStoreMiddleProcessData middleData = new GStoreMiddleProcessData(); bRet = gammaFileUtil.GetInterMiddlData(phd, userName, middleData); - if(!bRet) { + if (!bRet) { result.error500("Transform PHDFile into structGStoreMiddleProcessData failed."); return result; } - //根据文件名称查询对应的sampleId 如果存在则赋值sampleId, Status 如果不存在则先存储数据信息到sampleData + // 根据文件名称查询对应的sampleId 如果存在则赋值sampleId, Status 如果不存在则先存储数据信息到sampleData GardsSampleDataSpectrum sampleData = spectrumAnalysisMapper.findSampleByFilePath(middleData.analyses_save_filePath); - if (Objects.nonNull(sampleData)){ + if (Objects.nonNull(sampleData)) { phd.setId_sample(sampleData.getSampleId().toString()); phd.setStatus(sampleData.getStatus()); } else { - //如果sampleData为空 存储数据 + // 如果sampleData为空 存储数据 SaveSampleToDB(phd, middleData.analyses_save_filePath); } - if(!bRet) { - result.error500("The Database hasn't this Spectrum("+phd.getFilename()+") and Insert it to Database failed."); + if (!bRet) { + result.error500("The Database hasn't this Spectrum(" + phd.getFilename() + ") and Insert it to Database failed."); return result; } - //根据sampleId,分析员名称查询idAnalysis + // 根据sampleId,分析员名称查询idAnalysis String idAnalysis = ""; idAnalysis = spectrumAnalysisMapper.getIdAnalysisByIdAnalyst(phd.getId_sample(), userName); - //写入 RNMAN数据库 - //获取phd文件中的totalcmt信息 存入数据库 + // 写入 RNMAN数据库 + // 获取phd文件中的totalcmt信息 存入数据库 String comments = phd.getTotalCmt().trim(); - //如果comment的数据长度大于1024 则截取前1024部分 - if (comments.length()>1024){ + // 如果comment的数据长度大于1024 则截取前1024部分 + if (comments.length() > 1024) { comments = comments.substring(0, 1024); } - //判断idAnalysis是否为空 - if (StringUtils.isBlank(idAnalysis)){ + // 判断idAnalysis是否为空 + if (StringUtils.isBlank(idAnalysis)) { // 向 RNMAN.GARDS_ANALYSES 表插入数据 analysesSpectrumService.insertEntity(middleData, phd, userName, comments); - //查询idAnalysis + // 查询idAnalysis idAnalysis = spectrumAnalysisMapper.getIdAnalysisByIdAnalyst(phd.getId_sample(), userName); - //修改sample_data状态 + // 修改sample_data状态 spectrumAnalysisMapper.updateAnalysesStatus(middleData.analyses_save_filePath); - }else { + } else { // 更新 RNMAN.GARDS_ANALYSES 表数据 analysesSpectrumService.updateEntity(middleData, phd, userName, comments, idAnalysis); - //删除GARDS_CALIBRATION_PAIRS表数据 + // 删除GARDS_CALIBRATION_PAIRS表数据 spectrumAnalysisMapper.deleteCalibrationPairs(Integer.valueOf(idAnalysis)); - //删除GARDS_CALIBRATION表数据 + // 删除GARDS_CALIBRATION表数据 spectrumAnalysisMapper.deleteCalibration(Integer.valueOf(idAnalysis)); - //删除GARDS_PEAKS表数据 + // 删除GARDS_PEAKS表数据 spectrumAnalysisMapper.deletePeaks(Integer.valueOf(idAnalysis)); - //删除GARDS_NUCL_LINES_IDED表数据 + // 删除GARDS_NUCL_LINES_IDED表数据 spectrumAnalysisMapper.deleteNuclLines(Integer.valueOf(idAnalysis)); - //删除GARDS_NUCL_IDED表数据 + // 删除GARDS_NUCL_IDED表数据 spectrumAnalysisMapper.deleteNucl(Integer.valueOf(idAnalysis)); - //删除GARDS_QC_CHECK表数据 + // 删除GARDS_QC_CHECK表数据 spectrumAnalysisMapper.deleteQCCheck(Integer.valueOf(idAnalysis)); - //删除GARDS_ANALY_SETTING表数据 + // 删除GARDS_ANALY_SETTING表数据 spectrumAnalysisMapper.deleteAnalySetting(Integer.valueOf(idAnalysis)); } // 向 RNMAN.GARDS_CALIBRATION_PAIRS 表写入 Energy 刻度数据对 @@ -4304,13 +4307,13 @@ public class GammaServiceImpl extends AbstractLogOrReport implements IGammaServi qcCheckSpectrumService.saveQcCheckGamma(middleData, phd, idAnalysis); // 向 RNMAN.GARDS_ANALY_SETTING 表写入 SpecSetup 分析参数设置 analySettingSpectrumService.saveAnalySettingGamma(phd, idAnalysis); - if(bRet) { + if (bRet) { phd.setStatus("R"); - List paths=new LinkedList<>(); + List paths = new LinkedList<>(); paths.add(middleData.analyses_lc_filePath); paths.add(middleData.analyses_scac_filePath); paths.add(middleData.analyses_baseline_filePath); - if(false) paths.add(middleData.analyses_save_filePath); + if (false) paths.add(middleData.analyses_save_filePath); } else { result.error500("Save analysis results to DB failed."); return result; @@ -4334,19 +4337,19 @@ public class GammaServiceImpl extends AbstractLogOrReport implements IGammaServi // ORIGINAL.gards_sample_description 数据表 sampleDescriptionSpectrumService.saveSampleDescriptionGamma(phd, sampleId); // ORIGINAL.GARDS_SAMPLE_CERT 和 ORIGINAL.GARDS_SAMPLE_CERT_LINE 数据表 - if(phd.getCertificate().getRecord_count() > 0) { + if (phd.getCertificate().getRecord_count() > 0) { sampleCertSpectrumService.saveSampleCertGamma(phd, sampleId); sampleCertLineSpectrumService.saveSampleCertLineGamma(phd, sampleId); } - //gards_ calibration_pairs_orig数据表 + // gards_ calibration_pairs_orig数据表 calibrationPairsOrigSpectrumService.saveCalibrationPairsOrigGamma(phd, sampleId); // gards_total_efficiency _pairs数据表(IDC没有) totalEfficiencyPairsSpectrumService.saveTotalEfficiencyPairsGamma(phd, sampleId); - //gards_spectrum数据表 + // gards_spectrum数据表 spectrumService.saveSpectrumGamma(phd, sampleId, input_file_name); - //根据文件名称获取sample基础数据信息 + // 根据文件名称获取sample基础数据信息 GardsSampleDataSpectrum samplData = spectrumAnalysisMapper.findSampleByFilePath(input_file_name); - if(Objects.nonNull(samplData)) { + if (Objects.nonNull(samplData)) { phd.setId_sample(samplData.getSampleId().toString()); phd.setStatus(samplData.getStatus()); } @@ -4356,34 +4359,34 @@ public class GammaServiceImpl extends AbstractLogOrReport implements IGammaServi public void saveToTxt(String fileName, HttpServletRequest request, HttpServletResponse response) { String userName = JwtUtil.getUserNameByToken(request); Cache phdCache = localCache.getPHDCache(); - PHDFile phd = phdCache.getIfPresent(fileName+"-"+userName); + PHDFile phd = phdCache.getIfPresent(fileName + "-" + userName); if (Objects.isNull(phd)) { return; } if (Objects.nonNull(phd)) { StringBuilder strBuild = new StringBuilder(); - //txt文本内容 - //文本内容第一块头部信息 + // txt文本内容 + // 文本内容第一块头部信息 String title1 = " %s The Results of Peak Searching %s"; - //文本内容第一块头部信息匹配 + // 文本内容第一块头部信息匹配 strBuild.append(titleFormat(title1, 51, StringPool.ASTERISK, StringPool.ASTERISK)); - //换行 + // 换行 strBuild.append(System.lineSeparator()); - //换行 + // 换行 strBuild.append(System.lineSeparator()); - //文本内容第二块 - //文本内容第二块匹配格式 + // 文本内容第二块 + // 文本内容第二块匹配格式 String title2 = "%-12s %-12s %-12s %-12s %-12s %-12s %-12s %-12s %-12s %-12s"; - //文本内容第二块头部信息 + // 文本内容第二块头部信息 String[] titleArgs2 = new String[]{"PeakID", "Energy(keV)", "Centroid", "Multiplet", "FWHM(keV)", "NetArea", "NAErr%", "Signif", "Sensit", "Nuclide"}; - //文本内容第二块头部信息匹配 + // 文本内容第二块头部信息匹配 strBuild.append(rowFormat(title2, titleArgs2)); - //换行 + // 换行 strBuild.append(System.lineSeparator()); - //遍历数组进行文本内容第二块数据匹配 - for (int i=0; i> peakNuclides = phd.getVPeak().stream().map(item -> item.nuclides).collect(Collectors.toList()); List nuclides = new LinkedList<>(); - for (int i=0; i peakNuclide = peakNuclides.get(i); nuclides.addAll(peakNuclide); } nuclides = nuclides.stream().distinct().collect(Collectors.toList()); String nuclideStr = ""; - for (int i=0; i mapNucActMda = phd.getMapNucActMda(); - for (Map.Entry entry:mapNucActMda.entrySet()) { + for (Map.Entry entry : mapNucActMda.entrySet()) { String key = entry.getKey(); NuclideActMda nuc = entry.getValue(); String halflifeValue = ""; - if(nuc.isBCalculateMDA()) { + if (nuc.isBCalculateMDA()) { String units = "S"; double halflife = nuc.getHalflife(); - if(halflife >= 31556736) {// 1年 = 365.24 * 24 * 60 * 60 = 31556736s + if (halflife >= 31556736) {// 1年 = 365.24 * 24 * 60 * 60 = 31556736s halflife /= 31556736; units = "A"; - } else if(halflife >= 86400) {// 1天 = 24 * 60 * 60 = 86400s + } else if (halflife >= 86400) {// 1天 = 24 * 60 * 60 = 86400s halflife /= 86400; units = "D"; - } else if(halflife >= 3600) { + } else if (halflife >= 3600) { halflife /= 3600; units = "H"; } @@ -4473,12 +4476,12 @@ public class GammaServiceImpl extends AbstractLogOrReport implements IGammaServi } String efficiency = NumberFormatUtil.numberFormat(String.valueOf(nuc.getEfficiency())); String activity = NumberFormatUtil.numberFormat(String.valueOf(nuc.getActivity())); - String actErr = NumberFormatUtil.numberFormat(String.valueOf(nuc.getAct_err()/nuc.getActivity()*100)); + String actErr = NumberFormatUtil.numberFormat(String.valueOf(nuc.getAct_err() / nuc.getActivity() * 100)); String mda = NumberFormatUtil.numberFormat(String.valueOf(nuc.getMda())); String conc = NumberFormatUtil.numberFormat(String.valueOf(nuc.getConcentration())); String mdc = NumberFormatUtil.numberFormat(String.valueOf(nuc.getMdc())); - if(nuc.getCalculateIdx() >= 0 && nuc.getCalculateIdx() < nuc.getVEnergy().size()) { - String yield = NumberFormatUtil.numberFormat(String.valueOf(nuc.getVYield().get(nuc.getCalculateIdx())*100)); + if (nuc.getCalculateIdx() >= 0 && nuc.getCalculateIdx() < nuc.getVEnergy().size()) { + String yield = NumberFormatUtil.numberFormat(String.valueOf(nuc.getVYield().get(nuc.getCalculateIdx()) * 100)); String energy = NumberFormatUtil.numberFormat(String.valueOf(nuc.getVEnergy().get(nuc.getCalculateIdx()))); strBuild.append(rowFormat(title5, key, halflifeValue, yield, energy, efficiency, activity, actErr, mda, conc, mdc)); strBuild.append(System.lineSeparator()); @@ -4494,14 +4497,14 @@ public class GammaServiceImpl extends AbstractLogOrReport implements IGammaServi String dataType = phd.getMsgInfo().getData_type().substring(0, 1); String format = ".txt"; String txtFileName = String.format("%s-%s_%s_%s_RESULT%s", detectorCode, date, time, dataType, format); - //导出数据内容到txt文本 + // 导出数据内容到txt文本 OutputStream fos = null; try { - //设置响应类型 + // 设置响应类型 response.setContentType("application/octet-stream"); - //解决中文不能生成文件 + // 解决中文不能生成文件 response.setHeader("Access-Control-Expose-Headers", "Content-Disposition"); - response.setHeader("Content-Disposition", "attachment; fileName=" + URLEncoder.encode(txtFileName,"UTF-8")); + response.setHeader("Content-Disposition", "attachment; fileName=" + URLEncoder.encode(txtFileName, "UTF-8")); fos = response.getOutputStream(); fos.write(strBuild.toString().getBytes()); } catch (FileNotFoundException e) { @@ -4559,14 +4562,16 @@ public class GammaServiceImpl extends AbstractLogOrReport implements IGammaServi data.put("nuclides", nuclideStr); /* Nuclide's Activity and Concentration */ // 时间部分 - String actTime = ""; String concTime = ""; + String actTime = ""; + String concTime = ""; Date refTimeAct = phd.getUsedSetting().getRefTime_act(); Date refTimeConc = phd.getUsedSetting().getRefTime_conc(); if (ObjectUtil.isNotNull(refTimeAct)) actTime = DateUtil.format(refTimeAct, DateConstant.DATE_BIAS_TIME); if (ObjectUtil.isNotNull(refTimeConc)) concTime = DateUtil.format(refTimeConc, DateConstant.DATE_BIAS_TIME); - data.put("actTime", actTime); data.put("concTime", concTime); + data.put("actTime", actTime); + data.put("concTime", concTime); // 数据部分 Map mapNucActMda = phd.getMapNucActMda(); List nuclideActs = new ArrayList<>(); @@ -4576,16 +4581,16 @@ public class GammaServiceImpl extends AbstractLogOrReport implements IGammaServi nuclideActMdaDto.setNuclide(nuclide); NuclideActMda nuclideActMda = entry.getValue(); String halflifeValue = ""; - if(nuclideActMda.isBCalculateMDA()) { + if (nuclideActMda.isBCalculateMDA()) { String units = "S"; double halflife = nuclideActMda.getHalflife(); - if(halflife >= 31556736) { // 1年 = 31556736s + if (halflife >= 31556736) { // 1年 = 31556736s halflife /= 31556736; units = "A"; - } else if(halflife >= 86400) { // 1天 = 86400s + } else if (halflife >= 86400) { // 1天 = 86400s halflife /= 86400; units = "D"; - } else if(halflife >= 3600) { // 1小时 = 3600s + } else if (halflife >= 3600) { // 1小时 = 3600s halflife /= 3600; units = "H"; } @@ -4609,12 +4614,14 @@ public class GammaServiceImpl extends AbstractLogOrReport implements IGammaServi int calculateIdx = nuclideActMda.getCalculateIdx(); List vEnergy = nuclideActMda.getVEnergy(); List vYield = nuclideActMda.getVYield(); - if(calculateIdx >= 0 && calculateIdx < vEnergy.size()) { + if (calculateIdx >= 0 && calculateIdx < vEnergy.size()) { String yield = NumberFormatUtil.numberFormat(String.valueOf(vYield.get(calculateIdx) * 100)); String energy = NumberFormatUtil.numberFormat(String.valueOf(vEnergy.get(calculateIdx))); - nuclideActMdaDto.setYield(yield); nuclideActMdaDto.setEnergy(energy); + nuclideActMdaDto.setYield(yield); + nuclideActMdaDto.setEnergy(energy); } else { - nuclideActMdaDto.setYield("NULL"); nuclideActMdaDto.setEnergy("NULL"); + nuclideActMdaDto.setYield("NULL"); + nuclideActMdaDto.setEnergy("NULL"); } nuclideActs.add(nuclideActMdaDto); } @@ -4632,11 +4639,11 @@ public class GammaServiceImpl extends AbstractLogOrReport implements IGammaServi @Override public void saveToPHD(String fileName, HttpServletRequest request, HttpServletResponse response) { - //获取当前登陆用户名 + // 获取当前登陆用户名 String userName = JwtUtil.getUserNameByToken(request); - //读取本地缓存的phd文件信息 + // 读取本地缓存的phd文件信息 Cache phdCache = localCache.getPHDCache(); - PHDFile phd = phdCache.getIfPresent(fileName+"-"+userName); + PHDFile phd = phdCache.getIfPresent(fileName + "-" + userName); if (Objects.isNull(phd)) { return; } @@ -4647,14 +4654,14 @@ public class GammaServiceImpl extends AbstractLogOrReport implements IGammaServi String dataType = phd.getMsgInfo().getData_type().substring(0, 1); String phdFileName = String.format("%s-%s_%s_%s.PHD", detectorCode, date, time, dataType); String spectrum = gammaFileUtil.makeUpSpectrum(phd); - //导出数据内容到txt文本 + // 导出数据内容到txt文本 OutputStream fos = null; try { - //设置响应类型 + // 设置响应类型 response.setContentType("application/octet-stream"); - //解决中文不能生成文件 + // 解决中文不能生成文件 response.setHeader("Access-Control-Expose-Headers", "Content-Disposition"); - response.setHeader("Content-Disposition", "attachment; fileName=" + URLEncoder.encode(phdFileName,"UTF-8")); + response.setHeader("Content-Disposition", "attachment; fileName=" + URLEncoder.encode(phdFileName, "UTF-8")); fos = response.getOutputStream(); fos.write(spectrum.getBytes()); } catch (FileNotFoundException e) { @@ -4676,29 +4683,29 @@ public class GammaServiceImpl extends AbstractLogOrReport implements IGammaServi @Override public void ImsToIec(MultipartFile file, HttpServletResponse response) { String ImsName = file.getOriginalFilename(); - String IecName = ImsName.substring(0, ImsName.length()-4)+".IEC"; + String IecName = ImsName.substring(0, ImsName.length() - 4) + ".IEC"; FileData datas = new FileData(); File imsFile = null; InputStream inputStream = null; - //导出数据内容到txt文本 + // 导出数据内容到txt文本 OutputStream fos = null; try { imsFile = File.createTempFile("betaGamma", null); inputStream = file.getInputStream(); FileUtils.copyInputStreamToFile(inputStream, imsFile); - if(!fileFtransitUtil.ReadIMS(imsFile, datas)) { - String Warning = "Read "+ImsName+" failed!\n"+ - "Possible Reason:\n"+ - "1、The file is unreadable;\n"+ - "2、The format of file is error."; + if (!fileFtransitUtil.ReadIMS(imsFile, datas)) { + String Warning = "Read " + ImsName + " failed!\n" + + "Possible Reason:\n" + + "1、The file is unreadable;\n" + + "2、The format of file is error."; return; } String iecValue = fileFtransitUtil.WriteIEC(datas); - //设置响应类型 + // 设置响应类型 response.setContentType("application/octet-stream"); - //解决中文不能生成文件 + // 解决中文不能生成文件 response.setHeader("Access-Control-Expose-Headers", "Content-Disposition"); - response.setHeader("Content-Disposition", "attachment; fileName=" + URLEncoder.encode(IecName,"UTF-8")); + response.setHeader("Content-Disposition", "attachment; fileName=" + URLEncoder.encode(IecName, "UTF-8")); fos = response.getOutputStream(); fos.write(iecValue.getBytes()); } catch (IOException e) { @@ -4723,28 +4730,28 @@ public class GammaServiceImpl extends AbstractLogOrReport implements IGammaServi @Override public void IecToIms(FileData datas, MultipartFile file, HttpServletResponse response) { String IecName = file.getOriginalFilename(); - String ImsName = IecName.substring(0, IecName.length()-4)+".Ims"; + String ImsName = IecName.substring(0, IecName.length() - 4) + ".Ims"; File iecFile = null; InputStream inputStream = null; - //导出数据内容到txt文本 + // 导出数据内容到txt文本 OutputStream fos = null; try { iecFile = File.createTempFile("betaGamma", null); inputStream = file.getInputStream(); FileUtils.copyInputStreamToFile(inputStream, iecFile); - if(!fileFtransitUtil.ReadIEC(iecFile, datas)) { - String Warning = "Read "+IecName+" failed!\n"+ - "Possible Reason:\n"+ - "1、The file is unreadable;\n"+ - "2、The format of file is error."; + if (!fileFtransitUtil.ReadIEC(iecFile, datas)) { + String Warning = "Read " + IecName + " failed!\n" + + "Possible Reason:\n" + + "1、The file is unreadable;\n" + + "2、The format of file is error."; return; } String imsValue = fileFtransitUtil.WriteIMS(datas); - //设置响应类型 + // 设置响应类型 response.setContentType("application/octet-stream"); - //解决中文不能生成文件 + // 解决中文不能生成文件 response.setHeader("Access-Control-Expose-Headers", "Content-Disposition"); - response.setHeader("Content-Disposition", "attachment; fileName=" + URLEncoder.encode(ImsName,"UTF-8")); + response.setHeader("Content-Disposition", "attachment; fileName=" + URLEncoder.encode(ImsName, "UTF-8")); fos = response.getOutputStream(); fos.write(imsValue.getBytes()); } catch (IOException e) { @@ -4769,23 +4776,23 @@ public class GammaServiceImpl extends AbstractLogOrReport implements IGammaServi @Override public void SpcToIms(FileData datas, MultipartFile file, HttpServletResponse response) { String SpcName = file.getOriginalFilename(); - String ImsName = SpcName.substring(0, SpcName.length()-4)+".IMS"; + String ImsName = SpcName.substring(0, SpcName.length() - 4) + ".IMS"; File spcFile = null; InputStream inputStream = null; - //导出数据内容到txt文本 + // 导出数据内容到txt文本 OutputStream fos = null; try { spcFile = File.createTempFile("betaGamma", null); inputStream = file.getInputStream(); FileUtils.copyInputStreamToFile(inputStream, spcFile); - if(!fileFtransitUtil.ReadSPC(spcFile, datas)) return; + if (!fileFtransitUtil.ReadSPC(spcFile, datas)) return; String imsValue = fileFtransitUtil.WriteIMS(datas); - //设置响应类型 + // 设置响应类型 response.setContentType("application/octet-stream"); - //解决中文不能生成文件 + // 解决中文不能生成文件 response.setHeader("Access-Control-Expose-Headers", "Content-Disposition"); - response.setHeader("Content-Disposition", "attachment; fileName=" + URLEncoder.encode(ImsName,"UTF-8")); + response.setHeader("Content-Disposition", "attachment; fileName=" + URLEncoder.encode(ImsName, "UTF-8")); fos = response.getOutputStream(); fos.write(imsValue.getBytes()); } catch (IOException e) { @@ -4810,7 +4817,7 @@ public class GammaServiceImpl extends AbstractLogOrReport implements IGammaServi @Override public void ImsToSpc(MultipartFile file, HttpServletResponse response) { String ImsName = file.getOriginalFilename(); - String SpcName = ImsName.substring(0, ImsName.length()-4)+".SPC"; + String SpcName = ImsName.substring(0, ImsName.length() - 4) + ".SPC"; FileData datas = new FileData(); File imsFile = null; File spcFile = null; @@ -4822,8 +4829,8 @@ public class GammaServiceImpl extends AbstractLogOrReport implements IGammaServi spcFile = File.createTempFile("spcTemp", ".SCP"); inputStream = file.getInputStream(); FileUtils.copyInputStreamToFile(inputStream, imsFile); - if(!fileFtransitUtil.ReadIMS(imsFile, datas)) return; - fileFtransitUtil.WriteSPC(spcFile ,datas); + if (!fileFtransitUtil.ReadIMS(imsFile, datas)) return; + fileFtransitUtil.WriteSPC(spcFile, datas); // 获取文件输入流 spcInputStream = new FileInputStream(spcFile); // 获取响应输出流 @@ -4855,24 +4862,24 @@ public class GammaServiceImpl extends AbstractLogOrReport implements IGammaServi @Override public void SpcToIec(MultipartFile file, HttpServletResponse response) { String SpcName = file.getOriginalFilename(); - String IecName = SpcName.substring(0, SpcName.length()-4)+".IEC"; + String IecName = SpcName.substring(0, SpcName.length() - 4) + ".IEC"; FileData datas = new FileData(); File spcFile = null; InputStream inputStream = null; - //导出数据内容到txt文本 + // 导出数据内容到txt文本 OutputStream fos = null; try { spcFile = File.createTempFile("betaGamma", null); inputStream = file.getInputStream(); FileUtils.copyInputStreamToFile(inputStream, spcFile); - if(!fileFtransitUtil.ReadSPC(spcFile, datas)) return; + if (!fileFtransitUtil.ReadSPC(spcFile, datas)) return; String iecValue = fileFtransitUtil.WriteIEC(datas); - //设置响应类型 + // 设置响应类型 response.setContentType("application/octet-stream"); - //解决中文不能生成文件 + // 解决中文不能生成文件 response.setHeader("Access-Control-Expose-Headers", "Content-Disposition"); - response.setHeader("Content-Disposition", "attachment; fileName=" + URLEncoder.encode(IecName,"UTF-8")); + response.setHeader("Content-Disposition", "attachment; fileName=" + URLEncoder.encode(IecName, "UTF-8")); fos = response.getOutputStream(); fos.write(iecValue.getBytes()); } catch (IOException e) { @@ -4897,7 +4904,7 @@ public class GammaServiceImpl extends AbstractLogOrReport implements IGammaServi @Override public void IecToSpc(MultipartFile file, HttpServletResponse response) { String IecName = file.getOriginalFilename(); - String SpcName = IecName.substring(0, IecName.length()-4)+".SPC"; + String SpcName = IecName.substring(0, IecName.length() - 4) + ".SPC"; FileData datas = new FileData(); File iecFile = null; File spcFile = null; @@ -4909,8 +4916,8 @@ public class GammaServiceImpl extends AbstractLogOrReport implements IGammaServi spcFile = File.createTempFile("spcTemp", ".SCP"); inputStream = file.getInputStream(); FileUtils.copyInputStreamToFile(inputStream, iecFile); - if(!fileFtransitUtil.ReadIEC(iecFile, datas)) return; - fileFtransitUtil.WriteSPC(spcFile ,datas); + if (!fileFtransitUtil.ReadIEC(iecFile, datas)) return; + fileFtransitUtil.WriteSPC(spcFile, datas); // 获取文件输入流 spcInputStream = new FileInputStream(spcFile); // 获取响应输出流 diff --git a/jeecg-module-spectrum-analysis/src/main/java/org/jeecg/modules/service/impl/SpectrumAnalysisServiceImpl.java b/jeecg-module-spectrum-analysis/src/main/java/org/jeecg/modules/service/impl/SpectrumAnalysisServiceImpl.java index 6bb0cdc0..262c36b4 100644 --- a/jeecg-module-spectrum-analysis/src/main/java/org/jeecg/modules/service/impl/SpectrumAnalysisServiceImpl.java +++ b/jeecg-module-spectrum-analysis/src/main/java/org/jeecg/modules/service/impl/SpectrumAnalysisServiceImpl.java @@ -13,7 +13,9 @@ import com.baomidou.mybatisplus.core.toolkit.StringPool; import com.baomidou.mybatisplus.core.toolkit.StringUtils; import com.baomidou.mybatisplus.extension.plugins.pagination.Page; import com.google.common.cache.Cache; +import org.apache.commons.io.Charsets; import org.apache.commons.io.FileUtils; +import org.apache.commons.io.IOUtils; import org.apache.commons.net.ftp.FTP; import org.apache.commons.net.ftp.FTPClient; import org.apache.shiro.SecurityUtils; @@ -114,8 +116,8 @@ public class SpectrumAnalysisServiceImpl implements ISpectrumAnalysisService { Result result = new Result(); Map> map = new HashMap<>(); List menuTypeList = Arrays.asList(menuTypes); - //查询谱对应的台站类型 - if (CollectionUtils.isEmpty(menuTypeList)){ + // 查询谱对应的台站类型 + if (CollectionUtils.isEmpty(menuTypeList)) { result.error500("The spectrum type cannot be empty"); return result; } @@ -125,28 +127,28 @@ public class SpectrumAnalysisServiceImpl implements ISpectrumAnalysisService { return result; } List userStations = new LinkedList<>(); - //如果没有勾选AllUsers - if (Objects.nonNull(AllUsers) && !AllUsers){ + // 如果没有勾选AllUsers + if (Objects.nonNull(AllUsers) && !AllUsers) { String userName = JwtUtil.getUserNameByToken(request); - if (StringUtils.isBlank(userName)){ + if (StringUtils.isBlank(userName)) { result.error500("Description Failed to obtain the current login user information!"); return result; } userStations = userTaskUtil.findUserStation(userName); } - if (dbName.equalsIgnoreCase("auto")){ + if (dbName.equalsIgnoreCase("auto")) { dbName = "RNAUTO.GARDS_ANALYSES"; - }else if (dbName.equalsIgnoreCase("man")){ + } else if (dbName.equalsIgnoreCase("man")) { dbName = "RNMAN.GARDS_ANALYSES"; - }else { + } else { result.error500("The database type does not exist"); return result; } List sampleData = spectrumAnalysisMapper.getDBSearchList(dbName, stationTypes, userStations, AllUsers); - //获取台站编码 + // 获取台站编码 List stationCodes = new LinkedList<>(); List detectorCodes = new LinkedList<>(); - if (CollectionUtils.isNotEmpty(sampleData)){ + if (CollectionUtils.isNotEmpty(sampleData)) { stationCodes = sampleData.stream().map(GardsSampleDataSpectrum::getStationName).distinct().collect(Collectors.toList()); detectorCodes = sampleData.stream().map(GardsSampleDataSpectrum::getDetectorsName).distinct().collect(Collectors.toList()); } @@ -160,7 +162,7 @@ public class SpectrumAnalysisServiceImpl implements ISpectrumAnalysisService { @Override public Result getDBSpectrumList(QueryRequest queryRequest, GardsSampleDataSpectrum gardsSampleData, String dbName, String[] menuTypes, boolean AllUsers, boolean CollectStopB, boolean AcqStartB, Date startDate, Date endDate, HttpServletRequest request) { Result result = new Result(); - if (Objects.isNull(startDate)){ + if (Objects.isNull(startDate)) { result.error500("The start time cannot be empty"); return result; } @@ -171,7 +173,7 @@ public class SpectrumAnalysisServiceImpl implements ISpectrumAnalysisService { } String endTime = DateUtils.formatDate(endDate, "yyyy-MM-dd") + " 23:59:59"; List menuTypeList = Arrays.asList(menuTypes); - if (CollectionUtils.isEmpty(menuTypeList)){ + if (CollectionUtils.isEmpty(menuTypeList)) { result.error500("The spectrum type cannot be empty"); return result; } @@ -180,32 +182,34 @@ public class SpectrumAnalysisServiceImpl implements ISpectrumAnalysisService { result.error500("Please add the station type corresponding to the current system type in the data dictionary"); return result; } - if (StringUtils.isBlank(dbName)){ + if (StringUtils.isBlank(dbName)) { result.error500("Please select the database type"); return result; } String tempDBName = dbName; - if (dbName.equalsIgnoreCase("auto")){ + if (dbName.equalsIgnoreCase("auto")) { dbName = "RNAUTO.GARDS_ANALYSES"; - }else if (dbName.equalsIgnoreCase("man")){ + } else if (dbName.equalsIgnoreCase("man")) { dbName = "RNMAN.GARDS_ANALYSES"; - }else { + } else { result.error500("The database type does not exist"); return result; } List userStations = new LinkedList<>(); - if (Objects.nonNull(AllUsers) && !AllUsers){ + if (Objects.nonNull(AllUsers) && !AllUsers) { String userName = JwtUtil.getUserNameByToken(request); - if (StringUtils.isBlank(userName)){ + if (StringUtils.isBlank(userName)) { result.error500("Description Failed to obtain the current login user information!"); return result; } userStations = userTaskUtil.findUserStation(userName); } - //声明分页page + // 声明分页page Page page = new Page<>(queryRequest.getPageNo(), queryRequest.getPageSize()); Page sampleDataPage = spectrumAnalysisMapper.getDBSpectrumList(page, gardsSampleData, dbName, stationTypes, CollectStopB, AcqStartB, startTime, endTime, userStations, AllUsers); - sampleDataPage.getRecords().stream().forEach(item->{item.setDbName(tempDBName);}); + sampleDataPage.getRecords().stream().forEach(item -> { + item.setDbName(tempDBName); + }); result.setSuccess(true); result.setResult(sampleDataPage); return result; @@ -216,14 +220,14 @@ public class SpectrumAnalysisServiceImpl implements ISpectrumAnalysisService { Result result = new Result(); List> resultList = new LinkedList<>(); String userName = JwtUtil.getUserNameByToken(request); - String filePath = StringPool.SLASH + spectrumPathProperties.getUploadPath() + StringPool.SLASH +userName; + String filePath = StringPool.SLASH + spectrumPathProperties.getUploadPath() + StringPool.SLASH + userName; String sampleRx = "[a-zA-Z]{3}[0-9]{2}_[0-9]{3}-[0-9]{8}_[0-9]{4}_S_(FULL_|PREL_)\\d+\\.PHD"; Pattern regexPattern = Pattern.compile(sampleRx); String sampleRx1 = "[a-zA-Z]{3}[0-9]{2}_[0-9]{3}-[0-9]{8}_[0-9]{4}_S_(FULL_|PREL_)\\d+\\.\\d+\\.PHD"; Pattern regexPattern1 = Pattern.compile(sampleRx1); String sampleRx2 = "[a-zA-Z]{3}[0-9]{2}_[0-9]{3}-[0-9]{8}_[0-9]{4}_S.PHD"; Pattern regexPattern2 = Pattern.compile(sampleRx2); - //判断传递的文件名称是否包含, + // 判断传递的文件名称是否包含, List matchFileNames = new LinkedList<>(); if (fileName.contains(StringPool.COMMA)) { matchFileNames = Arrays.asList(fileName.split(StringPool.COMMA)); @@ -231,11 +235,11 @@ public class SpectrumAnalysisServiceImpl implements ISpectrumAnalysisService { matchFileNames.add(fileName); } if (CollectionUtils.isNotEmpty(matchFileNames)) { - for (String matchFileName :matchFileNames) { - Map map =new HashMap<>(); - //判断sample文件名称是否匹配正则表达式 如果满足 则查询出对应的文件信息 - if ( regexPattern.matcher(matchFileName).find() || regexPattern1.matcher(matchFileName).find() || regexPattern2.matcher(matchFileName).find() ){ - //查询sampleFile文件内容信息 获取文件内容 获取大致的gas det文件名称 + for (String matchFileName : matchFileNames) { + Map map = new HashMap<>(); + // 判断sample文件名称是否匹配正则表达式 如果满足 则查询出对应的文件信息 + if (regexPattern.matcher(matchFileName).find() || regexPattern1.matcher(matchFileName).find() || regexPattern2.matcher(matchFileName).find()) { + // 查询sampleFile文件内容信息 获取文件内容 获取大致的gas det文件名称 Map fileData = phdFileUtil.getFileData(filePath, matchFileName); if (CollectionUtils.isEmpty(fileData)) { result.error500("Failed to query the ftp file. Procedure"); @@ -244,25 +248,25 @@ public class SpectrumAnalysisServiceImpl implements ISpectrumAnalysisService { String sampleFileName = fileData.get("sampleFileName"); String sampleSystemType = fileData.get("sampleSystemType"); if (sampleSystemType.equals(SystemType.BETA.getType())) { - //加载并获取当前路径下所有的文件名称并进行名称格式化 仅需要格式化和sample文件同一个台站 名称格式化为最终名称 + // 加载并获取当前路径下所有的文件名称并进行名称格式化 仅需要格式化和sample文件同一个台站 名称格式化为最终名称 List fileNames = phdFileUtil.FileNameByStandardForm(filePath, sampleFileName); - //匹配获取QCFile + // 匹配获取QCFile boolean qcStatus = true; String qcphd = phdFileUtil.GetQCPHD(sampleFileName, fileNames); if (StringUtils.isBlank(qcphd)) { - qcphd = sampleFileName.substring(0, 23)+"_Q.PHD"; + qcphd = sampleFileName.substring(0, 23) + "_Q.PHD"; qcStatus = false; } - //匹配gasFile + // 匹配gasFile boolean gasStatus = false; String gasFileName = fileData.get("gasFileName"); String gasphd = phdFileUtil.GetMatchFile(gasFileName, fileNames, DataTypeAbbr.GASBKPHD.getType()); - //如果匹配到的文件名称不为空 + // 如果匹配到的文件名称不为空 if (StringUtils.isNotBlank(gasphd)) { gasFileName = gasphd; gasStatus = true; } - //匹配detFile + // 匹配detFile boolean detStatus = false; String detaFileName = fileData.get("detaFileName"); String detphd = phdFileUtil.GetMatchFile(detaFileName, fileNames, DataTypeAbbr.DETBKPHD.getType()); @@ -299,7 +303,7 @@ public class SpectrumAnalysisServiceImpl implements ISpectrumAnalysisService { @Override public Result getDBSpectrumChart(String dbName, Integer sampleId, HttpServletRequest request) { Result result = new Result(); - //获取当前的用户名称 + // 获取当前的用户名称 String userName = JwtUtil.getUserNameByToken(request); Map resultMap = new HashMap<>(); boolean bProcessed = false; @@ -308,24 +312,24 @@ public class SpectrumAnalysisServiceImpl implements ISpectrumAnalysisService { List gasBoundary = new LinkedList<>(); List detBoundary = new LinkedList<>(); List qcBoundary = new LinkedList<>(); - if (Objects.isNull(sampleId)){ + if (Objects.isNull(sampleId)) { result.error500("Please select a piece of data"); return result; } Integer analysisID = null; - if (dbName.equalsIgnoreCase("auto")){ + if (dbName.equalsIgnoreCase("auto")) { dbName = "RNAUTO"; analysisID = spectrumAnalysisMapper.getAnalysisID(dbName, sampleId, "RNAUTO"); xeResultsSpectrumList = spectrumAnalysisMapper.ReadXeResults(dbName, analysisID, sampleId); - } else if (dbName.equalsIgnoreCase("man")){ + } else if (dbName.equalsIgnoreCase("man")) { dbName = "RNMAN"; analysisID = spectrumAnalysisMapper.getAnalysisID(dbName, sampleId, userName); xeResultsSpectrumList = spectrumAnalysisMapper.ReadXeResults(dbName, analysisID, sampleId); - }else { + } else { result.error500("The database type does not exist"); return result; } - //查询数据库文件信息 + // 查询数据库文件信息 SpectrumFileRecord dbSpectrumFilePath = spectrumAnalysisMapper.getDBSpectrumFilePath(dbName, sampleId, analysisID); List filePath = new LinkedList<>(); if (Objects.nonNull(dbSpectrumFilePath)) { @@ -333,51 +337,51 @@ public class SpectrumAnalysisServiceImpl implements ISpectrumAnalysisService { Map gasBgMap = new HashMap<>(); Map detBgMap = new HashMap<>(); Map qcMap = new HashMap<>(); - if(StringUtils.isNotBlank(dbSpectrumFilePath.getSampleFilePath())){ + if (StringUtils.isNotBlank(dbSpectrumFilePath.getSampleFilePath())) { String sampleFilePath = dbSpectrumFilePath.getSampleFilePath(); filePath.add(sampleFilePath); GardsSampleData sample = spectrumAnalysisMapper.findSampleByFilePath(sampleFilePath); String pathName = StringPool.SLASH + spectrumPathProperties.getSaveFilePath() + StringPool.SLASH + sampleFilePath.substring(0, sampleFilePath.lastIndexOf(StringPool.SLASH)); - String fileName = sampleFilePath.substring(sampleFilePath.lastIndexOf(StringPool.SLASH)+1); + String fileName = sampleFilePath.substring(sampleFilePath.lastIndexOf(StringPool.SLASH) + 1); sampleMap = this.fenxi(pathName, fileName, sample.getSampleId(), sample.getStatus()); - resultMap.put("sample",sampleMap); + resultMap.put("sample", sampleMap); } - if(StringUtils.isNotBlank(dbSpectrumFilePath.getGasBgFilePath())){ + if (StringUtils.isNotBlank(dbSpectrumFilePath.getGasBgFilePath())) { String gasBgFilePath = dbSpectrumFilePath.getGasBgFilePath(); filePath.add(gasBgFilePath); GardsSampleData gasBg = spectrumAnalysisMapper.findSampleByFilePath(gasBgFilePath); String pathName = StringPool.SLASH + spectrumPathProperties.getSaveFilePath() + StringPool.SLASH + gasBgFilePath.substring(0, gasBgFilePath.lastIndexOf(StringPool.SLASH)); - String fileName = gasBgFilePath.substring(gasBgFilePath.lastIndexOf(StringPool.SLASH)+1); + String fileName = gasBgFilePath.substring(gasBgFilePath.lastIndexOf(StringPool.SLASH) + 1); if (Objects.nonNull(gasBg)) { gasBgMap = this.fenxi(pathName, fileName, gasBg.getSampleId(), gasBg.getStatus()); - resultMap.put("gasBg",gasBgMap); + resultMap.put("gasBg", gasBgMap); } } - if(StringUtils.isNotBlank(dbSpectrumFilePath.getDetBgFilePath())){ + if (StringUtils.isNotBlank(dbSpectrumFilePath.getDetBgFilePath())) { String detBgFilePath = dbSpectrumFilePath.getDetBgFilePath(); filePath.add(detBgFilePath); GardsSampleData detBg = spectrumAnalysisMapper.findSampleByFilePath(detBgFilePath); String pathName = StringPool.SLASH + spectrumPathProperties.getSaveFilePath() + StringPool.SLASH + detBgFilePath.substring(0, detBgFilePath.lastIndexOf(StringPool.SLASH)); - String fileName = detBgFilePath.substring(detBgFilePath.lastIndexOf(StringPool.SLASH)+1); + String fileName = detBgFilePath.substring(detBgFilePath.lastIndexOf(StringPool.SLASH) + 1); if (Objects.nonNull(detBg)) { detBgMap = this.fenxi(pathName, fileName, detBg.getSampleId(), detBg.getStatus()); - resultMap.put("detBg",detBgMap); + resultMap.put("detBg", detBgMap); } } String collectStartStr = DateUtils.formatDate(dbSpectrumFilePath.getCollectStart(), "yyyy/MM/dd HH:mm:ss"); String dbQcFilePath = spectrumAnalysisMapper.getQCFilePath(dbSpectrumFilePath.getSiteDetCode(), collectStartStr); - if(StringUtils.isNotBlank(dbQcFilePath)){ + if (StringUtils.isNotBlank(dbQcFilePath)) { dbSpectrumFilePath.setQcFilePath(dbQcFilePath); filePath.add(dbQcFilePath); GardsSampleData qc = spectrumAnalysisMapper.findSampleByFilePath(dbQcFilePath); String pathName = StringPool.SLASH + spectrumPathProperties.getSaveFilePath() + StringPool.SLASH + dbQcFilePath.substring(0, dbQcFilePath.lastIndexOf(StringPool.SLASH)); - String fileName = dbQcFilePath.substring(dbQcFilePath.lastIndexOf(StringPool.SLASH)+1); + String fileName = dbQcFilePath.substring(dbQcFilePath.lastIndexOf(StringPool.SLASH) + 1); if (Objects.nonNull(qc)) { qcMap = this.fenxi(pathName, fileName, qc.getSampleId(), qc.getStatus()); - resultMap.put("qc",qcMap); + resultMap.put("qc", qcMap); } } - phdFileUtil.getLightColor(sampleMap,gasBgMap,detBgMap,qcMap); + phdFileUtil.getLightColor(sampleMap, gasBgMap, detBgMap, qcMap); String sampleFileName = String.valueOf(sampleMap.get("fileName")); Cache> cache = betaCache.getBetaCache(); Map xeDataMap = cache.getIfPresent(sampleFileName + "-" + userName + "-xeData"); @@ -401,15 +405,15 @@ public class SpectrumAnalysisServiceImpl implements ISpectrumAnalysisService { qcMap.put("Boundary", qcBoundary); } } - //Xe - if (CollectionUtils.isNotEmpty(xeResultsSpectrumList)){ - for (GardsXeResultsSpectrum xeData:xeResultsSpectrumList) { + // Xe + if (CollectionUtils.isNotEmpty(xeResultsSpectrumList)) { + for (GardsXeResultsSpectrum xeData : xeResultsSpectrumList) { Double conc = xeData.getConc(); Double mdc = xeData.getMdc(); - if (conc < 0){ + if (conc < 0) { xeData.setColor("red"); xeData.setNidFlag(0); - } else if (0 mdc) { @@ -422,7 +426,7 @@ public class SpectrumAnalysisServiceImpl implements ISpectrumAnalysisService { resultMap.put("bProcessed", bProcessed); resultMap.put("savedAnalysisResult", true); } else { - resultMap.clear(); + resultMap.clear(); } result.setSuccess(true); result.setResult(resultMap); @@ -447,36 +451,36 @@ public class SpectrumAnalysisServiceImpl implements ISpectrumAnalysisService { Map qcMap = new HashMap<>(); boolean bProcessed = false; boolean savedAnalysisResult = false; - if(StringUtils.isNotBlank(sampleFileName)){ + if (StringUtils.isNotBlank(sampleFileName)) { String sampleFilePath = path + sampleFileName; filePath.add(sampleFilePath); sampleMap = this.fenxi(path, sampleFileName, null, null); resultMap.put("sample", sampleMap); } - if(StringUtils.isNotBlank(gasFileName)){ + if (StringUtils.isNotBlank(gasFileName)) { String gasBgFilePath = path + gasFileName; filePath.add(gasBgFilePath); gasBgMap = this.fenxi(path, gasFileName, null, null); resultMap.put("gasBg", gasBgMap); } - if(StringUtils.isNotBlank(detFileName)){ + if (StringUtils.isNotBlank(detFileName)) { String detBgFilePath = path + detFileName; filePath.add(detBgFilePath); detBgMap = this.fenxi(path, detFileName, null, null); resultMap.put("detBg", detBgMap); } - if(StringUtils.isNotBlank(qcFileName)){ + if (StringUtils.isNotBlank(qcFileName)) { String dbQcFilePath = path + qcFileName; filePath.add(dbQcFilePath); qcMap = this.fenxi(path, qcFileName, null, null); resultMap.put("qc", qcMap); } - phdFileUtil.getLightColor(sampleMap,gasBgMap,detBgMap,qcMap); - //获取本地缓存信息 + phdFileUtil.getLightColor(sampleMap, gasBgMap, detBgMap, qcMap); + // 获取本地缓存信息 Cache> cache = betaCache.getBetaCache(); - //根据key获取缓存数据 + // 根据key获取缓存数据 Map xeDataMap = cache.getIfPresent(sampleFileName + "-" + userName + "-xeData"); - //如果缓存数据存在 则将缓存数据取出使用 + // 如果缓存数据存在 则将缓存数据取出使用 if (CollectionUtils.isNotEmpty(xeDataMap)) { xeResultsSpectrumList = (List) xeDataMap.get("XeData"); sampleBoundary = (List) xeDataMap.get("SampleBoundary"); @@ -485,15 +489,15 @@ public class SpectrumAnalysisServiceImpl implements ISpectrumAnalysisService { qcBoundary = (List) xeDataMap.get("QCBoundary"); bProcessed = true; savedAnalysisResult = true; - //Xe - if (CollectionUtils.isNotEmpty(xeResultsSpectrumList)){ - for (GardsXeResultsSpectrum xeData:xeResultsSpectrumList) { + // Xe + if (CollectionUtils.isNotEmpty(xeResultsSpectrumList)) { + for (GardsXeResultsSpectrum xeData : xeResultsSpectrumList) { Double conc = xeData.getConc(); Double mdc = xeData.getMdc(); - if (conc < 0){ + if (conc < 0) { xeData.setColor("red"); xeData.setNidFlag(0); - } else if (0 mdc) { @@ -526,13 +530,13 @@ public class SpectrumAnalysisServiceImpl implements ISpectrumAnalysisService { private Map fenxi(String pathName, String fileName, Integer sampleId, String status) { Map map = new HashMap<>(); FTPClient ftpClient = ftpUtil.LoginFTP(); - if (Objects.isNull(ftpClient)){ + if (Objects.isNull(ftpClient)) { throw new RuntimeException("ftp连接失败"); } InputStream inputStream = null; File file = null; try { - //切换被动模式 + // 切换被动模式 ftpClient.enterLocalPassiveMode(); ftpClient.setFileType(FTPClient.BINARY_FILE_TYPE); // 设置编码,当文件中存在中文且上传后文件乱码时可使用此配置项 @@ -540,10 +544,10 @@ public class SpectrumAnalysisServiceImpl implements ISpectrumAnalysisService { ftpClient.setFileTransferMode(FTPClient.STREAM_TRANSFER_MODE); ftpClient.changeWorkingDirectory(pathName); inputStream = ftpClient.retrieveFileStream(fileName); - if (Objects.nonNull(inputStream)){ - //声明一个临时文件 + if (Objects.nonNull(inputStream)) { + // 声明一个临时文件 file = File.createTempFile("betaGamma", null); - //将ftp文件的输入流复制给临时文件 + // 将ftp文件的输入流复制给临时文件 FileUtils.copyInputStreamToFile(inputStream, file); map = phdFileUtil.getSourceData(file.getAbsolutePath(), sampleId, status); map.put("fileName", fileName); @@ -553,10 +557,10 @@ public class SpectrumAnalysisServiceImpl implements ISpectrumAnalysisService { throw new RuntimeException(e); } finally { try { - if (Objects.nonNull(ftpClient)){ + if (Objects.nonNull(ftpClient)) { ftpClient.disconnect(); } - if (Objects.nonNull(inputStream)){ + if (Objects.nonNull(inputStream)) { inputStream.close(); } if (Objects.nonNull(file)) { @@ -586,37 +590,37 @@ public class SpectrumAnalysisServiceImpl implements ISpectrumAnalysisService { public Result viewComment(Integer sampleId, String sampleFileName, HttpServletRequest request) { Result result = new Result(); CommentInfo commentInfo = new CommentInfo(); - //根据请求体获取当前登录用户名 + // 根据请求体获取当前登录用户名 String userName = JwtUtil.getUserNameByToken(request); - //上传路径 + // 上传路径 String pathName = StringPool.SLASH + spectrumPathProperties.getUploadPath() + StringPool.SLASH + userName; - //判断sampleId是否为空 如果不为空 则当前操作数据来源是数据库 文件路径从数据库中查询 + // 判断sampleId是否为空 如果不为空 则当前操作数据来源是数据库 文件路径从数据库中查询 if (Objects.nonNull(sampleId)) { String sampleFilePath = spectrumAnalysisMapper.getSampleFilePath(sampleId); pathName = StringPool.SLASH + spectrumPathProperties.getSaveFilePath() + StringPool.SLASH + sampleFilePath.substring(0, sampleFilePath.lastIndexOf(StringPool.SLASH)); CommentData commentData = spectrumAnalysisMapper.viewComment(sampleId); - if (Objects.nonNull(commentData)){ - if (!commentData.getAnalyst().equals(userName)){ - if (StringUtils.isNotBlank(commentData.getComment())){ + if (Objects.nonNull(commentData)) { + if (!commentData.getAnalyst().equals(userName)) { + if (StringUtils.isNotBlank(commentData.getComment())) { String line = "------------------------------------------------------------"; - String comment = line+"\n[ User "+commentData.getAnalyst()+" Comment ] : \n"+commentData.getComment()+"\n"+line+" \n"; + String comment = line + "\n[ User " + commentData.getAnalyst() + " Comment ] : \n" + commentData.getComment() + "\n" + line + " \n"; commentInfo.setSpectrumOtherCommentInfo(comment); } - }else { + } else { commentInfo.setSpectrumAnalysisCommentInfo(commentData.getComment()); } } } - //连接ftp + // 连接ftp FTPClient ftpClient = ftpUtil.LoginFTP(); - if (Objects.isNull(ftpClient)){ + if (Objects.isNull(ftpClient)) { result.error500("ftp connection failed"); return result; } InputStream inputStream = null; File file = null; try { - //切换被动模式 + // 切换被动模式 ftpClient.enterLocalPassiveMode(); ftpClient.setFileType(FTPClient.BINARY_FILE_TYPE); // 设置编码,当文件中存在中文且上传后文件乱码时可使用此配置项 @@ -624,10 +628,10 @@ public class SpectrumAnalysisServiceImpl implements ISpectrumAnalysisService { ftpClient.setFileTransferMode(FTPClient.STREAM_TRANSFER_MODE); ftpClient.changeWorkingDirectory(pathName); inputStream = ftpClient.retrieveFileStream(sampleFileName); - if (Objects.nonNull(inputStream)){ - //声明一个临时文件 + if (Objects.nonNull(inputStream)) { + // 声明一个临时文件 file = File.createTempFile("betaGamma", null); - //将ftp文件的输入流复制给临时文件 + // 将ftp文件的输入流复制给临时文件 FileUtils.copyInputStreamToFile(inputStream, file); EnergySpectrumStruct struct = EnergySpectrumHandler.getSourceData(file.getAbsolutePath()); commentInfo.setSpectrumCommentInfo(struct.comment); @@ -636,10 +640,10 @@ public class SpectrumAnalysisServiceImpl implements ISpectrumAnalysisService { throw new RuntimeException(e); } finally { try { - if (Objects.nonNull(ftpClient)){ + if (Objects.nonNull(ftpClient)) { ftpClient.disconnect(); } - if (Objects.nonNull(inputStream)){ + if (Objects.nonNull(inputStream)) { inputStream.close(); } if (Objects.nonNull(file)) { @@ -656,22 +660,22 @@ public class SpectrumAnalysisServiceImpl implements ISpectrumAnalysisService { @Override public void viewARR(Integer sampleId, HttpServletResponse response) { - //获取自动处理生成的报告地址 + // 获取自动处理生成的报告地址 String reportPath = spectrumAnalysisMapper.viewARR(sampleId); - if (StringUtils.isBlank(reportPath)){ + if (StringUtils.isBlank(reportPath)) { throw new RuntimeException("自动处理程序生成报告不存在!"); } String pathName = StringPool.SLASH + spectrumPathProperties.getSaveFilePath() + StringPool.SLASH + reportPath.substring(0, reportPath.lastIndexOf(StringPool.SLASH)); - String fileName = reportPath.substring(reportPath.lastIndexOf(StringPool.SLASH)+1)+".txt"; - //连接ftp + String fileName = reportPath.substring(reportPath.lastIndexOf(StringPool.SLASH) + 1) + ".txt"; + // 连接ftp FTPClient ftpClient = ftpUtil.LoginFTP(); - if (Objects.isNull(ftpClient)){ + if (Objects.isNull(ftpClient)) { throw new RuntimeException("ftp连接失败"); } InputStream inputStream = null; ServletOutputStream outputStream = null; try { - //切换被动模式 + // 切换被动模式 ftpClient.enterLocalPassiveMode(); ftpClient.setFileType(FTPClient.BINARY_FILE_TYPE); // 设置编码,当文件中存在中文且上传后文件乱码时可使用此配置项 @@ -679,7 +683,7 @@ public class SpectrumAnalysisServiceImpl implements ISpectrumAnalysisService { ftpClient.setFileTransferMode(FTPClient.STREAM_TRANSFER_MODE); ftpClient.changeWorkingDirectory(pathName); inputStream = ftpClient.retrieveFileStream(fileName); - if (Objects.nonNull(inputStream)){ + if (Objects.nonNull(inputStream)) { outputStream = response.getOutputStream(); byte[] buffer = new byte[1024]; int bytesRead; @@ -692,13 +696,13 @@ public class SpectrumAnalysisServiceImpl implements ISpectrumAnalysisService { throw new RuntimeException(e); } finally { try { - if (Objects.nonNull(ftpClient)){ + if (Objects.nonNull(ftpClient)) { ftpClient.disconnect(); } - if (ObjectUtil.isNotNull(inputStream)){ + if (ObjectUtil.isNotNull(inputStream)) { inputStream.close(); } - if (ObjectUtil.isNotNull(outputStream)){ + if (ObjectUtil.isNotNull(outputStream)) { outputStream.close(); } } catch (IOException e) { @@ -726,16 +730,16 @@ public class SpectrumAnalysisServiceImpl implements ISpectrumAnalysisService { String dbName = rrrLogInfo.getDbName(); Integer sampleId = rrrLogInfo.getSampleId(); Integer analysisID = null; - //GetAnalysisID sample_id - if (dbName.equalsIgnoreCase("auto")){ + // GetAnalysisID sample_id + if (dbName.equalsIgnoreCase("auto")) { dbName = "RNAUTO"; analysisID = spectrumAnalysisMapper.getAnalysisID(dbName, sampleId, "RNAUTO"); - }else if (dbName.equalsIgnoreCase("man")){ + } else if (dbName.equalsIgnoreCase("man")) { dbName = "RNMAN"; analysisID = spectrumAnalysisMapper.getAnalysisID(dbName, sampleId, userName); } SpectrumFileRecord dbSpectrumFilePath = spectrumAnalysisMapper.getDBSpectrumFilePath(dbName, sampleId, analysisID); - if (Objects.nonNull(dbSpectrumFilePath)){ + if (Objects.nonNull(dbSpectrumFilePath)) { sampleFilePath = StringPool.SLASH + spectrumPathProperties.getSaveFilePath() + StringPool.SLASH + dbSpectrumFilePath.getSampleFilePath().substring(0, dbSpectrumFilePath.getSampleFilePath().lastIndexOf(StringPool.SLASH)); gasFilePath = StringPool.SLASH + spectrumPathProperties.getSaveFilePath() + StringPool.SLASH + dbSpectrumFilePath.getGasBgFilePath().substring(0, dbSpectrumFilePath.getGasBgFilePath().lastIndexOf(StringPool.SLASH)); detFilePath = StringPool.SLASH + spectrumPathProperties.getSaveFilePath() + StringPool.SLASH + dbSpectrumFilePath.getDetBgFilePath().substring(0, dbSpectrumFilePath.getDetBgFilePath().lastIndexOf(StringPool.SLASH)); @@ -748,24 +752,24 @@ public class SpectrumAnalysisServiceImpl implements ISpectrumAnalysisService { rrrLogInfo.setSampleFilePath(sampleFilePath); rrrLogInfo.setGasFilePath(gasFilePath); rrrLogInfo.setDetFilePath(detFilePath); - //从本地缓存获取beta gamma的数组 + // 从本地缓存获取beta gamma的数组 Cache> cache = betaCache.getBetaCache(); List betaFittingPara = new LinkedList<>(); List gammaFittingPara = new LinkedList<>(); if (Objects.nonNull(cache)) { - //根据qc文件名称-用户名-beta的方式获取beta的内容 + // 根据qc文件名称-用户名-beta的方式获取beta的内容 Map betaMap = cache.getIfPresent(rrrLogInfo.getQcFileName() + "-" + userName + "-beta"); if (CollectionUtils.isNotEmpty(betaMap)) { betaFittingPara = (List) betaMap.get("fittingPara"); } - //根据qc文件名称-用户名-gamma的方式获取gamma的内容 + // 根据qc文件名称-用户名-gamma的方式获取gamma的内容 Map gammaMap = cache.getIfPresent(rrrLogInfo.getQcFileName() + "-" + userName + "-gamma"); if (CollectionUtils.isNotEmpty(gammaMap)) { gammaFittingPara = (List) gammaMap.get("fittingPara"); } - //根据key获取重新分析的参数 + // 根据key获取重新分析的参数 Map reAnalyseParam = cache.getIfPresent(rrrLogInfo.getSampleFileName() + "-" + userName + "-reAnalyseParam"); - //判断重新分析的参数信息是否为空 + // 判断重新分析的参数信息是否为空 if (CollectionUtils.isNotEmpty(reAnalyseParam)) { BgCalPara = (BgCalibratePara) reAnalyseParam.get("reAnalyseParam"); Boolean bGammaEnergyValidSample = (Boolean) reAnalyseParam.get("bGammaEnergyValidSample"); @@ -794,15 +798,15 @@ public class SpectrumAnalysisServiceImpl implements ISpectrumAnalysisService { } } } - //对当前文件内容进行分析 + // 对当前文件内容进行分析 analyzeRRR(rrrLogInfo, BgCalPara, betaFittingPara, gammaFittingPara); - //对分析后的内容进行数据获取 - List channelsSpectrums = rrrLogInfo.getRoiChannelsSpectrumList(); - List resultsSpectrums = rrrLogInfo.getRoiResultsSpectrumList(); - List xeResultsSpectrums = rrrLogInfo.getXeResultsSpectrumList(); + // 对分析后的内容进行数据获取 + List channelsSpectrums = rrrLogInfo.getRoiChannelsSpectrumList(); + List resultsSpectrums = rrrLogInfo.getRoiResultsSpectrumList(); + List xeResultsSpectrums = rrrLogInfo.getXeResultsSpectrumList(); //#SAMPLE: LIMITS PER ROI GAMMA List gammaCalibrationParamList = rrrLogInfo.getGammaCalibrationParamList(); - for (int i=0; i betaCalibrationParamList = rrrLogInfo.getBetaCalibrationParamList(); - for (int i=0; i xeResultsSpectrumList = new LinkedList<>(); GardsXeResultsSpectrum xe131m = new GardsXeResultsSpectrum(); xe131m.setNuclideName(XeNuclideName.XE_131m.getType()); @@ -1052,7 +1056,7 @@ public class SpectrumAnalysisServiceImpl implements ISpectrumAnalysisService { xe135.setNidFlag(bgAnalyseResult.XE_135_NID_FLAG); xeResultsSpectrumList.add(xe135); rrrLogInfo.setXeResultsSpectrumList(xeResultsSpectrumList); - //处理GammaCalibration的数据 + // 处理GammaCalibration的数据 List gammaCalibrationSpectrumList = new LinkedList<>(); if (rrrLogInfo.isBGammaEnergyValidSample()) { GardsCalibrationSpectrum gammaCalibrationS = new GardsCalibrationSpectrum(); @@ -1100,7 +1104,7 @@ public class SpectrumAnalysisServiceImpl implements ISpectrumAnalysisService { gammaCalibrationSpectrumList.add(gammaCalibrationD); } rrrLogInfo.setGammaCalibrationParamList(gammaCalibrationSpectrumList); - //处理BetaCalibration数据 + // 处理BetaCalibration数据 List betaCalibrationSpectrumList = new LinkedList<>(); if (rrrLogInfo.isBBetaEnergyValidSample()) { GardsCalibrationSpectrum betaCalibrationS = new GardsCalibrationSpectrum(); @@ -1148,32 +1152,32 @@ public class SpectrumAnalysisServiceImpl implements ISpectrumAnalysisService { betaCalibrationSpectrumList.add(betaCalibrationD); } rrrLogInfo.setBetaCalibrationParamList(betaCalibrationSpectrumList); - //存储roiChannel数据 + // 存储roiChannel数据 List roiChannelsSpectrumList = new LinkedList<>(); - for (int i=0; i roiResultsSpectrumList = new LinkedList<>(); - for (int i=0; ibgAnalyseResult.MDC.get(i)) { + if (bgAnalyseResult.ROI_con_uncer.get(i) > bgAnalyseResult.MDC.get(i)) { roiResults.setNidFlag(1); } else { roiResults.setNidFlag(0); @@ -1230,16 +1234,16 @@ public class SpectrumAnalysisServiceImpl implements ISpectrumAnalysisService { SpectrumFileRecord dbSpectrumFilePath = new SpectrumFileRecord(); if (Objects.nonNull(sampleId) && StringUtils.isNotBlank(dbName)) { Integer analysisID = null; - if (dbName.equalsIgnoreCase("auto")){ + if (dbName.equalsIgnoreCase("auto")) { dbName = "RNAUTO"; analysisID = spectrumAnalysisMapper.getAnalysisID(dbName, sampleId, "RNAUTO"); - }else if (dbName.equalsIgnoreCase("man")){ + } else if (dbName.equalsIgnoreCase("man")) { dbName = "RNMAN"; analysisID = spectrumAnalysisMapper.getAnalysisID(dbName, sampleId, userName); } dbSpectrumFilePath = spectrumAnalysisMapper.getDBSpectrumFilePath(dbName, sampleId, analysisID); } - if(StringUtils.isNotBlank(sampleFileName)) { + if (StringUtils.isNotBlank(sampleFileName)) { String sampleFilePath = StringPool.SLASH + spectrumPathProperties.getUploadPath() + StringPool.SLASH + userName + StringPool.SLASH + sampleFileName; if (StringUtils.isNotBlank(dbSpectrumFilePath.getSampleFilePath())) { sampleFilePath = StringPool.SLASH + spectrumPathProperties.getSaveFilePath() + StringPool.SLASH + dbSpectrumFilePath.getSampleFilePath(); @@ -1247,7 +1251,7 @@ public class SpectrumAnalysisServiceImpl implements ISpectrumAnalysisService { List lines = phdFileUtil.readLine(sampleFilePath); map.put("sample", lines); } - if(StringUtils.isNotBlank(gasFileName)) { + if (StringUtils.isNotBlank(gasFileName)) { String gasBgFilePath = StringPool.SLASH + spectrumPathProperties.getUploadPath() + StringPool.SLASH + userName + StringPool.SLASH + gasFileName; if (StringUtils.isNotBlank(dbSpectrumFilePath.getGasBgFilePath())) { gasBgFilePath = StringPool.SLASH + spectrumPathProperties.getSaveFilePath() + StringPool.SLASH + dbSpectrumFilePath.getGasBgFilePath(); @@ -1255,7 +1259,7 @@ public class SpectrumAnalysisServiceImpl implements ISpectrumAnalysisService { List lines = phdFileUtil.readLine(gasBgFilePath); map.put("gasBg", lines); } - if(StringUtils.isNotBlank(detFileName)) { + if (StringUtils.isNotBlank(detFileName)) { String detBgFilePath = StringPool.SLASH + spectrumPathProperties.getUploadPath() + StringPool.SLASH + userName + StringPool.SLASH + detFileName; if (StringUtils.isNotBlank(dbSpectrumFilePath.getDetBgFilePath())) { detBgFilePath = StringPool.SLASH + spectrumPathProperties.getSaveFilePath() + StringPool.SLASH + dbSpectrumFilePath.getDetBgFilePath(); @@ -1265,7 +1269,7 @@ public class SpectrumAnalysisServiceImpl implements ISpectrumAnalysisService { } if (StringUtils.isNotBlank(qcFileName)) { String dbQcFilePath = StringPool.SLASH + spectrumPathProperties.getUploadPath() + StringPool.SLASH + userName + StringPool.SLASH + qcFileName; - if ( Objects.nonNull(dbSpectrumFilePath.getCollectStart()) && StringUtils.isNotBlank(dbSpectrumFilePath.getSiteDetCode()) ) { + if (Objects.nonNull(dbSpectrumFilePath.getCollectStart()) && StringUtils.isNotBlank(dbSpectrumFilePath.getSiteDetCode())) { String collectStartStr = DateUtils.formatDate(dbSpectrumFilePath.getCollectStart(), "yyyy/MM/dd HH:mm:ss"); dbQcFilePath = StringPool.SLASH + spectrumPathProperties.getSaveFilePath() + StringPool.SLASH + spectrumAnalysisMapper.getQCFilePath(dbSpectrumFilePath.getSiteDetCode(), collectStartStr); } @@ -1286,16 +1290,16 @@ public class SpectrumAnalysisServiceImpl implements ISpectrumAnalysisService { String sampleFilePath = spectrumAnalysisMapper.getSampleFilePath(sampleId); pathName = StringPool.SLASH + spectrumPathProperties.getSaveFilePath() + StringPool.SLASH + sampleFilePath.substring(0, sampleFilePath.lastIndexOf(StringPool.SLASH)); } - if (StringUtils.isNotBlank(sampleFileName)){ + if (StringUtils.isNotBlank(sampleFileName)) { FTPClient ftpClient = ftpUtil.LoginFTP(); - if (Objects.isNull(ftpClient)){ + if (Objects.isNull(ftpClient)) { result.error500("ftp connection failed"); return result; } InputStream inputStream = null; File file = null; try { - //切换被动模式 + // 切换被动模式 ftpClient.enterLocalPassiveMode(); ftpClient.setFileType(FTPClient.BINARY_FILE_TYPE); // 设置编码,当文件中存在中文且上传后文件乱码时可使用此配置项 @@ -1303,10 +1307,10 @@ public class SpectrumAnalysisServiceImpl implements ISpectrumAnalysisService { ftpClient.setFileTransferMode(FTPClient.STREAM_TRANSFER_MODE); ftpClient.changeWorkingDirectory(pathName); inputStream = ftpClient.retrieveFileStream(sampleFileName); - if (Objects.nonNull(inputStream)){ - //声明一个临时文件 + if (Objects.nonNull(inputStream)) { + // 声明一个临时文件 file = File.createTempFile("betaGamma", null); - //将ftp文件的输入流复制给临时文件 + // 将ftp文件的输入流复制给临时文件 FileUtils.copyInputStreamToFile(inputStream, file); EnergySpectrumStruct struct = EnergySpectrumHandler.getSourceData(file.getAbsolutePath()); String measurementID = struct.measurement_id; @@ -1315,12 +1319,12 @@ public class SpectrumAnalysisServiceImpl implements ISpectrumAnalysisService { String sampleRefId = struct.sample_ref_id; String sit_det_code = struct.site_code; String detect_code = struct.detector_code; - String sample_id = Objects.nonNull(sampleId)?sampleId.toString():""; + String sample_id = Objects.nonNull(sampleId) ? sampleId.toString() : ""; Date collectStartDate = DateUtils.parseDate(struct.collection_start_date + StringPool.SPACE + struct.collection_start_time.substring(0, struct.collection_start_time.indexOf(StringPool.DOT)), "yyyy/MM/dd HH:mm:ss"); Date collectStopDate = DateUtils.parseDate(struct.collection_stop_date + StringPool.SPACE + struct.collection_stop_time.substring(0, struct.collection_stop_time.indexOf(StringPool.DOT)), "yyyy/MM/dd HH:mm:ss"); String collect_start = DateUtils.formatDate(collectStartDate, "yyyy/MM/dd HH:mm:ss"); String collect_stop = DateUtils.formatDate(collectStopDate, "yyyy/MM/dd HH:mm:ss"); - String collection_time_value = String.format ("%.2f",Double.valueOf(collectStopDate.getTime()/1000 - collectStartDate.getTime()/ 1000)); + String collection_time_value = String.format("%.2f", Double.valueOf(collectStopDate.getTime() / 1000 - collectStartDate.getTime() / 1000)); String s_volume_of_Xe = String.valueOf(struct.air_volume); String s_xe_stable_volume = String.valueOf(struct.sample_volume_of_Xe); Date acquisitionStartDate = DateUtils.parseDate(struct.acquisition_start_date + StringPool.SPACE + struct.acquisition_start_time.substring(0, struct.acquisition_start_time.indexOf(StringPool.DOT)), "yyyy/MM/dd HH:mm:ss"); @@ -1330,25 +1334,25 @@ public class SpectrumAnalysisServiceImpl implements ISpectrumAnalysisService { StringBuffer strBuffer = new StringBuffer(); strBuffer.append("\n"); strBuffer.append("#FILE INFORMATION\n"); - strBuffer.append(" SampleMeasID: "+measurementID+"\n"); - strBuffer.append(" GASBKMeasID: "+gasBkgdMeasurementID+"\n"); - strBuffer.append(" DETBKMeasID: "+bkgdMeasurementID+"\n"); - strBuffer.append(" SRID: "+sampleRefId+"\n"); + strBuffer.append(" SampleMeasID: " + measurementID + "\n"); + strBuffer.append(" GASBKMeasID: " + gasBkgdMeasurementID + "\n"); + strBuffer.append(" DETBKMeasID: " + bkgdMeasurementID + "\n"); + strBuffer.append(" SRID: " + sampleRefId + "\n"); strBuffer.append("\n"); strBuffer.append("#COLLECTION INFORMATION\n"); - strBuffer.append(" Station CODE: "+sit_det_code+"\n"); - strBuffer.append(" Detector CODE: "+detect_code+"\n"); - strBuffer.append(" Sample ID: "+sample_id+"\n"); - strBuffer.append(" Collection Start: "+collect_start+"\n"); - strBuffer.append(" Collection Stop: "+collect_stop+"\n"); - strBuffer.append(" Collection TIME: "+collection_time_value+"\n"); - strBuffer.append(" Sample Volume[m3]: "+s_volume_of_Xe+"\n"); - strBuffer.append(" Xe Volume[cm3]: "+s_xe_stable_volume+"\n"); + strBuffer.append(" Station CODE: " + sit_det_code + "\n"); + strBuffer.append(" Detector CODE: " + detect_code + "\n"); + strBuffer.append(" Sample ID: " + sample_id + "\n"); + strBuffer.append(" Collection Start: " + collect_start + "\n"); + strBuffer.append(" Collection Stop: " + collect_stop + "\n"); + strBuffer.append(" Collection TIME: " + collection_time_value + "\n"); + strBuffer.append(" Sample Volume[m3]: " + s_volume_of_Xe + "\n"); + strBuffer.append(" Xe Volume[cm3]: " + s_xe_stable_volume + "\n"); strBuffer.append("\n"); strBuffer.append("#ACQUISITION INFORMATION\n"); - strBuffer.append(" Acquisition Start: "+acquisition_start+"\n"); - strBuffer.append(" Acq Real Time: "+acquisition_real_sec+"\n"); - strBuffer.append(" Acq Live Time: "+acquisition_live_sec+"\n"); + strBuffer.append(" Acquisition Start: " + acquisition_start + "\n"); + strBuffer.append(" Acq Real Time: " + acquisition_real_sec + "\n"); + strBuffer.append(" Acq Live Time: " + acquisition_live_sec + "\n"); strBuffer.append("\n"); result.setSuccess(true); result.setResult(strBuffer.toString()); @@ -1359,10 +1363,10 @@ public class SpectrumAnalysisServiceImpl implements ISpectrumAnalysisService { throw new RuntimeException(e); } finally { try { - if (Objects.nonNull(ftpClient)){ + if (Objects.nonNull(ftpClient)) { ftpClient.disconnect(); } - if (Objects.nonNull(inputStream)){ + if (Objects.nonNull(inputStream)) { inputStream.close(); } if (Objects.nonNull(file)) { @@ -1387,18 +1391,18 @@ public class SpectrumAnalysisServiceImpl implements ISpectrumAnalysisService { String line = SymbolConstant.LINE; List sampleInfos = ListUtil.toList(sampleInfo.split(line)).stream() .filter(StrUtil::isNotBlank) - .filter(item -> StrUtil.contains(item,colon)) + .filter(item -> StrUtil.contains(item, colon)) .collect(Collectors.toList()); - Map dataMap = new HashMap<>(); + Map dataMap = new HashMap<>(); for (String s : sampleInfos) { - String[] split = s.split(colon,2);// 只分割为两部分 - String key = StrUtil.removeAll(split[0],space); + String[] split = s.split(colon, 2);// 只分割为两部分 + String key = StrUtil.removeAll(split[0], space); String value = split[1].trim(); - dataMap.put(key,value); + dataMap.put(key, value); } String export = "SampleInformation-Beta.xls"; String template = SampleInfo_B.getName(); - ExportUtil.exportXls(response,template,dataMap,export); + ExportUtil.exportXls(response, template, dataMap, export); } @Override @@ -1406,7 +1410,7 @@ public class SpectrumAnalysisServiceImpl implements ISpectrumAnalysisService { Result result = new Result(); String userName = JwtUtil.getUserNameByToken(request); QCResult qcResult = new QCResult(); - //获取各数据的范围信息 + // 获取各数据的范围信息 Sections sections = new Sections(); List collectionTimeSections = sections.getCollectionTimeSections(); String collectionMerits = collectionTimeSections.get(1) + "~" + collectionTimeSections.get(4); @@ -1419,7 +1423,7 @@ public class SpectrumAnalysisServiceImpl implements ISpectrumAnalysisService { qcResult.setXenonVolumeEvaluationMetrics(xeMerits); String xe133MDCEvaluationMetrics = "0.001 ~ 5"; qcResult.setXe133MDCEvaluationMetrics(xe133MDCEvaluationMetrics); - //获取数据信息 + // 获取数据信息 SpectrumFileRecord dbSpectrumFilePath = new SpectrumFileRecord(); GardsXeResultsSpectrum gardsXeResults = new GardsXeResultsSpectrum(); String sampleFilePath = ""; @@ -1430,13 +1434,13 @@ public class SpectrumAnalysisServiceImpl implements ISpectrumAnalysisService { File gasTmp = null; File detTmp = null; try { - //判断是否有sampleId和dbName 如果有则说明数据来源DB + // 判断是否有sampleId和dbName 如果有则说明数据来源DB if (Objects.nonNull(sampleId) && StringUtils.isNotBlank(dbName)) { Integer analysisID = null; - if (dbName.equalsIgnoreCase("auto")){ + if (dbName.equalsIgnoreCase("auto")) { dbName = "RNAUTO"; analysisID = spectrumAnalysisMapper.getAnalysisID(dbName, sampleId, "RNAUTO"); - }else if (dbName.equalsIgnoreCase("man")){ + } else if (dbName.equalsIgnoreCase("man")) { dbName = "RNMAN"; analysisID = spectrumAnalysisMapper.getAnalysisID(dbName, sampleId, userName); } @@ -1444,11 +1448,11 @@ public class SpectrumAnalysisServiceImpl implements ISpectrumAnalysisService { sampleFilePath = StringPool.SLASH + spectrumPathProperties.getSaveFilePath() + StringPool.SLASH + dbSpectrumFilePath.getSampleFilePath().substring(0, dbSpectrumFilePath.getSampleFilePath().lastIndexOf(StringPool.SLASH)); gasFilePath = StringPool.SLASH + spectrumPathProperties.getSaveFilePath() + StringPool.SLASH + dbSpectrumFilePath.getGasBgFilePath().substring(0, dbSpectrumFilePath.getGasBgFilePath().lastIndexOf(StringPool.SLASH)); detFilePath = StringPool.SLASH + spectrumPathProperties.getSaveFilePath() + StringPool.SLASH + dbSpectrumFilePath.getDetBgFilePath().substring(0, dbSpectrumFilePath.getDetBgFilePath().lastIndexOf(StringPool.SLASH)); - //查询数据库中结果的Xe数据 + // 查询数据库中结果的Xe数据 xeDataList = spectrumAnalysisMapper.getXeDataList(sampleId); xeDataList = xeDataList.stream().filter(item -> item.getNuclideName().equals(XeNuclideName.XE_133.getType())).collect(Collectors.toList()); gardsXeResults = xeDataList.get(0); - } else {//如果没有数据来源就是File + } else {// 如果没有数据来源就是File String path = StringPool.SLASH + spectrumPathProperties.getUploadPath() + StringPool.SLASH + userName; sampleFilePath = path; gasFilePath = path; @@ -1456,7 +1460,7 @@ public class SpectrumAnalysisServiceImpl implements ISpectrumAnalysisService { sampleTmp = phdFileUtil.analyzeFile(path, sampleFileName); gasTmp = phdFileUtil.analyzeFile(path, gasFileName); detTmp = phdFileUtil.analyzeFile(path, detFileName); - if ( Objects.isNull(sampleTmp) || Objects.isNull(gasTmp) || Objects.isNull(detTmp) ) { + if (Objects.isNull(sampleTmp) || Objects.isNull(gasTmp) || Objects.isNull(detTmp)) { result.error500("The file under ftp does not exist"); return result; } @@ -1464,7 +1468,7 @@ public class SpectrumAnalysisServiceImpl implements ISpectrumAnalysisService { xeDataList = xeDataList.stream().filter(item -> item.getNuclideName().equals(XeNuclideName.XE_133.getType())).collect(Collectors.toList()); gardsXeResults = xeDataList.get(0); } - //解析sample,gas,det文件并判断数据状态 + // 解析sample,gas,det文件并判断数据状态 if (StringUtils.isNotBlank(sampleFileName) && StringUtils.isNotBlank(gasFileName) && StringUtils.isNotBlank(detFileName)) { EnergySpectrumStruct sampleSourceData = phdFileUtil.analyzeFileSourceData(sampleFilePath, sampleFileName); EnergySpectrumStruct gasSourceData = phdFileUtil.analyzeFileSourceData(gasFilePath, gasFileName); @@ -1474,39 +1478,39 @@ public class SpectrumAnalysisServiceImpl implements ISpectrumAnalysisService { Date collectStartDate = DateUtils.parseDate(sampleSourceData.collection_start_date + StringPool.SPACE + sampleSourceData.collection_start_time); Date collectStopDate = DateUtils.parseDate(sampleSourceData.collection_stop_date + StringPool.SPACE + sampleSourceData.collection_stop_time); Double collection_time = Double.valueOf(collectStopDate.getTime() / 1000 - collectStartDate.getTime() / 1000); - String collection_time_value = String.format("%.2f",Double.valueOf(collectStopDate.getTime()/1000 - collectStartDate.getTime()/ 1000)/3600.0); + String collection_time_value = String.format("%.2f", Double.valueOf(collectStopDate.getTime() / 1000 - collectStartDate.getTime() / 1000) / 3600.0); qcResult.setCollectTimeValue(collection_time_value); - if (collectionTimeSections.get(1) < collection_time/3600 && collection_time/3600 < collectionTimeSections.get(4)){ + if (collectionTimeSections.get(1) < collection_time / 3600 && collection_time / 3600 < collectionTimeSections.get(4)) { qcResult.setCollectTimeStatus("Pass"); - }else { + } else { qcResult.setCollectTimeStatus("Failed"); } - String acquisition_live_sec = String.format("%.2f",sampleSourceData.acquisition_live_time/3600.0); + String acquisition_live_sec = String.format("%.2f", sampleSourceData.acquisition_live_time / 3600.0); qcResult.setAcquisitionTimeValue(acquisition_live_sec); - if (acquisitionTimeSections.get(1) < sampleSourceData.acquisition_live_time/3600.0 && sampleSourceData.acquisition_live_time/3600.0 < acquisitionTimeSections.get(4)){ + if (acquisitionTimeSections.get(1) < sampleSourceData.acquisition_live_time / 3600.0 && sampleSourceData.acquisition_live_time / 3600.0 < acquisitionTimeSections.get(4)) { qcResult.setAcquisitionTimeStatus("Pass"); - }else { + } else { qcResult.setAcquisitionTimeStatus("Failed"); } String s_xe_stable_volume = String.valueOf(sampleSourceData.sample_volume_of_Xe); qcResult.setXenonVolumeValue(s_xe_stable_volume); - if (xeVolumeSections.get(1) < sampleSourceData.sample_volume_of_Xe){ + if (xeVolumeSections.get(1) < sampleSourceData.sample_volume_of_Xe) { qcResult.setXenonVolumeStatus("Pass"); - }else { + } else { qcResult.setXenonVolumeStatus("Failed"); } // String gasMeasurementID = gasSourceData.measurement_id; - if (gasMeasurementID.equals(sampleSourceData.gas_bk_measurement_id)){ + if (gasMeasurementID.equals(sampleSourceData.gas_bk_measurement_id)) { qcResult.setGasBgValueAndStatus(true); - }else { + } else { qcResult.setGasBgValueAndStatus(false); } // String detMeasurementID = detSourceData.measurement_id; - if (detMeasurementID.equals(sampleSourceData.detector_bk_measurement_id)){ + if (detMeasurementID.equals(sampleSourceData.detector_bk_measurement_id)) { qcResult.setDetBgValueAndStatus(true); - }else { + } else { qcResult.setDetBgValueAndStatus(false); } } @@ -1514,11 +1518,11 @@ public class SpectrumAnalysisServiceImpl implements ISpectrumAnalysisService { throw new RuntimeException(e); } } - if (Objects.nonNull(gardsXeResults)){ + if (Objects.nonNull(gardsXeResults)) { qcResult.setXe133MDCValue(String.valueOf(gardsXeResults.getMdc())); - if ( 0.001 < gardsXeResults.getMdc() && gardsXeResults.getMdc() < 5.0 ){ + if (0.001 < gardsXeResults.getMdc() && gardsXeResults.getMdc() < 5.0) { qcResult.setXe133MDCStatus("Pass"); - }else { + } else { qcResult.setXe133MDCStatus("Failed"); } } @@ -1558,7 +1562,7 @@ public class SpectrumAnalysisServiceImpl implements ISpectrumAnalysisService { dataMap.replaceAll((key, value) -> ObjectUtil.isNull(value) ? "" : value); String export = "QCResult-Beta.xls"; String template = QcResult_B.getName(); - ExportUtil.exportXls(response,template,dataMap,export); + ExportUtil.exportXls(response, template, dataMap, export); } @Override @@ -1569,7 +1573,7 @@ public class SpectrumAnalysisServiceImpl implements ISpectrumAnalysisService { String sampleFilePath = StringPool.SLASH + spectrumPathProperties.getUploadPath() + StringPool.SLASH + userName; List xeDataList = new LinkedList<>(); FTPClient ftpClient = ftpUtil.LoginFTP(); - if (Objects.isNull(ftpClient)){ + if (Objects.isNull(ftpClient)) { result.error500("ftp connection failed"); return result; } @@ -1579,7 +1583,7 @@ public class SpectrumAnalysisServiceImpl implements ISpectrumAnalysisService { File gasTmp = null; File detTmp = null; try { - //切换被动模式 + // 切换被动模式 ftpClient.enterLocalPassiveMode(); ftpClient.setFileType(FTPClient.BINARY_FILE_TYPE); // 设置编码,当文件中存在中文且上传后文件乱码时可使用此配置项 @@ -1588,26 +1592,26 @@ public class SpectrumAnalysisServiceImpl implements ISpectrumAnalysisService { if (Objects.nonNull(sampleId)) { sampleFilePath = spectrumAnalysisMapper.getSampleFilePath(sampleId); sampleFilePath = StringPool.SLASH + spectrumPathProperties.getSaveFilePath() + StringPool.SLASH + sampleFilePath.substring(0, sampleFilePath.lastIndexOf(StringPool.SLASH)); - //查询数据库中结果的Xe数据 + // 查询数据库中结果的Xe数据 xeDataList = spectrumAnalysisMapper.getXeDataList(sampleId); } else { String path = StringPool.SLASH + spectrumPathProperties.getUploadPath() + StringPool.SLASH + userName; sampleTmp = phdFileUtil.analyzeFile(path, sampleFileName); gasTmp = phdFileUtil.analyzeFile(path, gasFileName); detTmp = phdFileUtil.analyzeFile(path, detFileName); - if ( Objects.isNull(sampleTmp) || Objects.isNull(gasTmp) || Objects.isNull(detTmp) ) { + if (Objects.isNull(sampleTmp) || Objects.isNull(gasTmp) || Objects.isNull(detTmp)) { result.error500("The file under ftp does not exist"); return result; } xeDataList = phdFileUtil.analyzeQCResultXe(sampleTmp, gasTmp, detTmp); } - if (StringUtils.isNotBlank(sampleFilePath)){ + if (StringUtils.isNotBlank(sampleFilePath)) { ftpClient.changeWorkingDirectory(sampleFilePath); inputStream = ftpClient.retrieveFileStream(sampleFileName); - if (Objects.nonNull(inputStream)){ - //声明一个临时文件 + if (Objects.nonNull(inputStream)) { + // 声明一个临时文件 file = File.createTempFile("betaGamma", null); - //将ftp文件的输入流复制给临时文件 + // 将ftp文件的输入流复制给临时文件 FileUtils.copyInputStreamToFile(inputStream, file); EnergySpectrumStruct struct = EnergySpectrumHandler.getSourceData(file.getAbsolutePath()); rlrDataValues.setSrid(struct.sample_ref_id); @@ -1618,7 +1622,7 @@ public class SpectrumAnalysisServiceImpl implements ISpectrumAnalysisService { rlrDataValues.setAcq_start_date(struct.acquisition_start_date); rlrDataValues.setAcq_start_time(struct.acquisition_start_time); rlrDataValues.setAcq_live_time(String.valueOf(struct.acquisition_live_time)); - if (CollectionUtils.isNotEmpty(xeDataList)){ + if (CollectionUtils.isNotEmpty(xeDataList)) { List xe131mDataList = xeDataList.stream().filter(item -> item.getNuclideName().equals(XeNuclideName.XE_131m.getType())).collect(Collectors.toList()); GardsXeResultsSpectrum xe131m = xe131mDataList.get(0); rlrDataValues.setXe131m_conc(NumUtil.keepStr(xe131m.getConc(), 5)); @@ -1653,10 +1657,10 @@ public class SpectrumAnalysisServiceImpl implements ISpectrumAnalysisService { throw new RuntimeException(e); } finally { try { - if (Objects.nonNull(ftpClient)){ + if (Objects.nonNull(ftpClient)) { ftpClient.disconnect(); } - if (Objects.nonNull(inputStream)){ + if (Objects.nonNull(inputStream)) { inputStream.close(); } if (Objects.nonNull(file)) { @@ -1683,12 +1687,12 @@ public class SpectrumAnalysisServiceImpl implements ISpectrumAnalysisService { @Override public void exportRLR(BetaRLR betaRLR, HttpServletResponse response) { if (ObjectUtil.isNull(betaRLR)) return; - Map dataMap = BeanUtil.beanToMap(betaRLR); + Map dataMap = BeanUtil.beanToMap(betaRLR); // 将Null值替换为"",避免空指针异常(或者在模板中进行判断) dataMap.replaceAll((key, value) -> ObjectUtil.isNull(value) ? "" : value); String export = "RLR-Beta.xls"; String template = RLR_B.getName(); - ExportUtil.exportXls(response, template, dataMap,export); + ExportUtil.exportXls(response, template, dataMap, export); } @Override @@ -1699,7 +1703,7 @@ public class SpectrumAnalysisServiceImpl implements ISpectrumAnalysisService { Cache> cache = betaCache.getBetaCache(); Map cacheMap = new HashMap<>(); String qcPath = StringPool.SLASH + spectrumPathProperties.getUploadPath() + StringPool.SLASH + userName; - //如果sampleId不为空 + // 如果sampleId不为空 if (Objects.nonNull(sampleId)) { GardsSampleData sampleData = spectrumAnalysisMapper.getSampleData(sampleId); String collectStartStr = DateUtils.formatDate(sampleData.getCollectStart(), "yyyy/MM/dd HH:mm:ss"); @@ -1709,15 +1713,15 @@ public class SpectrumAnalysisServiceImpl implements ISpectrumAnalysisService { } } FTPClient ftpClient = ftpUtil.LoginFTP(); - if (Objects.isNull(ftpClient)){ + if (Objects.isNull(ftpClient)) { result.error500("ftp connection failed"); return result; } InputStream inputStream = null; File file = null; try { - if (StringUtils.isNotBlank(qcPath)){ - //切换被动模式 + if (StringUtils.isNotBlank(qcPath)) { + // 切换被动模式 ftpClient.enterLocalPassiveMode(); ftpClient.setFileType(FTPClient.BINARY_FILE_TYPE); // 设置编码,当文件中存在中文且上传后文件乱码时可使用此配置项 @@ -1725,22 +1729,22 @@ public class SpectrumAnalysisServiceImpl implements ISpectrumAnalysisService { ftpClient.setFileTransferMode(FTPClient.STREAM_TRANSFER_MODE); ftpClient.changeWorkingDirectory(qcPath); inputStream = ftpClient.retrieveFileStream(qcFileName); - if (Objects.nonNull(inputStream)){ - //声明一个临时文件 + if (Objects.nonNull(inputStream)) { + // 声明一个临时文件 file = File.createTempFile("betaGamma", null); - //将ftp文件的输入流复制给临时文件 + // 将ftp文件的输入流复制给临时文件 FileUtils.copyInputStreamToFile(inputStream, file); EnergySpectrumStruct struct = EnergySpectrumHandler.getSourceData(file.getAbsolutePath()); - //读取Gamma Detector Calibration所需要的参数 + // 读取Gamma Detector Calibration所需要的参数 long numGChannel = struct.num_g_channel; List gCentroidChannel = struct.g_centroid_channel; List gEnergy = struct.g_energy; int gRecordCount = struct.g_record_count; List gCounts = struct.g_counts; List gammaParam = EnergySpectrumHandler.GetFileFittingPara(gCentroidChannel, gEnergy); - //计算Calibration Parameter Of Gamma Detector + // 计算Calibration Parameter Of Gamma Detector List oldScatterSeries = new LinkedList<>(); - for (int i=0; i xs = new LinkedList<>(); List ys = new LinkedList<>(); - for (int i=0; i< oldScatterSeries.size(); i++){ + for (int i = 0; i < oldScatterSeries.size(); i++) { xs.add(oldScatterSeries.get(i).getX()); ys.add(oldScatterSeries.get(i).getY()); } - //C to E + // C to E List fittingPara = EnergySpectrumHandler.GetFileFittingPara(xs, ys); List fittingParaStr = new LinkedList<>(); - for (Double para:fittingPara) { + for (Double para : fittingPara) { fittingParaStr.add(String.valueOf(para)); } map.put("CToE", fittingParaStr); - //E to C + // E to C List fittingParaToUi = EnergySpectrumHandler.GetFileFittingPara(ys, xs); List fittingParaToUiStr = new LinkedList<>(); - for (Double para:fittingParaToUi) { + for (Double para : fittingParaToUi) { fittingParaToUiStr.add(String.valueOf(para)); } map.put("EToC", fittingParaToUiStr); - //Figure of Gamma Detector Calibration + // Figure of Gamma Detector Calibration List gchannels = new ArrayList<>(); List> gammaEnergyList = new LinkedList<>(); - for (int i=0; i gammaEnergy = EnergySpectrumHandler.GetFileFittingData(gchannels, gammaParam); gammaEnergyList.add(gammaEnergy); } map.put("gammaEnergy", gammaEnergyList); - //Gamma Spectrum: QC + // Gamma Spectrum: QC List seriseDataList = new LinkedList<>(); - long min =0; - double max =0; - for ( int i=1; imax){ + if (count > max) { max = count; } } - if (max<10) { - max += max*0.5; - } else if (max<50) { - max += max*0.2; - } else if (max<100) { - max += max*0.1; + if (max < 10) { + max += max * 0.5; + } else if (max < 50) { + max += max * 0.2; + } else if (max < 100) { + max += max * 0.1; } else { - max += max*0.05; + max += max * 0.05; } map.put("min", min); map.put("max", max); @@ -1812,7 +1816,7 @@ public class SpectrumAnalysisServiceImpl implements ISpectrumAnalysisService { cacheMap.put("Series", oldScatterSeries); cacheMap.put("fittingPara", fittingParaStr); cacheMap.put("fittingParaToUi", fittingParaToUiStr); - cache.put(qcFileName+"-"+userName+"-gamma", cacheMap); + cache.put(qcFileName + "-" + userName + "-gamma", cacheMap); betaCache.setBetaCache(cache); } } @@ -1820,10 +1824,10 @@ public class SpectrumAnalysisServiceImpl implements ISpectrumAnalysisService { throw new RuntimeException(e); } finally { try { - if (Objects.nonNull(ftpClient)){ + if (Objects.nonNull(ftpClient)) { ftpClient.disconnect(); } - if (Objects.nonNull(inputStream)){ + if (Objects.nonNull(inputStream)) { inputStream.close(); } if (Objects.nonNull(file)) { @@ -1851,19 +1855,19 @@ public class SpectrumAnalysisServiceImpl implements ISpectrumAnalysisService { String collectStartStr = DateUtils.formatDate(sampleData.getCollectStart(), "yyyy/MM/dd HH:mm:ss"); String dbQcFilePath = spectrumAnalysisMapper.getQCFilePath(sampleData.getSiteDetCode(), collectStartStr); if (StringUtils.isNotBlank(dbQcFilePath)) { - qcPath = StringPool.SLASH + spectrumPathProperties.getSaveFilePath() + StringPool.SLASH + dbQcFilePath.substring(0, dbQcFilePath.lastIndexOf(StringPool.SLASH)); + qcPath = StringPool.SLASH + spectrumPathProperties.getSaveFilePath() + StringPool.SLASH + dbQcFilePath.substring(0, dbQcFilePath.lastIndexOf(StringPool.SLASH)); } } FTPClient ftpClient = ftpUtil.LoginFTP(); - if (Objects.isNull(ftpClient)){ + if (Objects.isNull(ftpClient)) { result.error500("ftp connection failed"); return result; } InputStream inputStream = null; File file = null; try { - if (StringUtils.isNotBlank(qcPath)){ - //切换被动模式 + if (StringUtils.isNotBlank(qcPath)) { + // 切换被动模式 ftpClient.enterLocalPassiveMode(); ftpClient.setFileType(FTPClient.BINARY_FILE_TYPE); // 设置编码,当文件中存在中文且上传后文件乱码时可使用此配置项 @@ -1871,50 +1875,50 @@ public class SpectrumAnalysisServiceImpl implements ISpectrumAnalysisService { ftpClient.setFileTransferMode(FTPClient.STREAM_TRANSFER_MODE); ftpClient.changeWorkingDirectory(qcPath); inputStream = ftpClient.retrieveFileStream(qcFileName); - if (Objects.nonNull(inputStream)){ - //声明一个临时文件 + if (Objects.nonNull(inputStream)) { + // 声明一个临时文件 file = File.createTempFile("betaGamma", null); - //将ftp文件的输入流复制给临时文件 + // 将ftp文件的输入流复制给临时文件 FileUtils.copyInputStreamToFile(inputStream, file); EnergySpectrumStruct struct = EnergySpectrumHandler.getSourceData(file.getAbsolutePath()); - //Figure of Beta Detector Calibration + // Figure of Beta Detector Calibration long numBChannel = struct.num_b_channel; List> betaEnergyList = new LinkedList<>(); List bChannel = struct.b_channel; List bElectronEnergy = struct.b_electron_energy; List betaParam = EnergySpectrumHandler.GetFileFittingPara(bChannel, bElectronEnergy); List bchannels = new ArrayList<>(); - for (int i=0; i betaEnergy = EnergySpectrumHandler.GetFileFittingData(bchannels, betaParam); betaEnergyList.add(betaEnergy); } map.put("betaEnergy", betaEnergyList); - //gamma Energy + // gamma Energy long numGChannel = struct.num_g_channel; List gCentroidChannel = struct.g_centroid_channel; List gEnergy = struct.g_energy; List gammaParam = EnergySpectrumHandler.GetFileFittingPara(gCentroidChannel, gEnergy); List gchannels = new ArrayList<>(); List> gammaEnergyList = new LinkedList<>(); - for (int i=0; i gammaEnergy = EnergySpectrumHandler.GetFileFittingData(gchannels, gammaParam); gammaEnergyList.add(gammaEnergy); } map.put("gammaEnergy", gammaEnergyList); - //Beta-Gamma Spectrum: QC + // Beta-Gamma Spectrum: QC long bChannels = struct.b_channels; long gChannels = struct.g_channels; List hCounts = struct.h_counts; List histogramDataList = new LinkedList<>(); - for (int column=0; column0) { + if (count > 0) { HistogramData histogramData = new HistogramData(); histogramData.setB(row); histogramData.setG(column); @@ -1923,12 +1927,12 @@ public class SpectrumAnalysisServiceImpl implements ISpectrumAnalysisService { } } map.put("histogramData", histogramDataList); - //计算Calibration Parameter Of Gamma Detector + // 计算Calibration Parameter Of Gamma Detector List bChannelList = struct.b_channel; List bEnergy = struct.b_electron_energy; int bRecordCount = struct.b_record_count; List oldScatterSeries = new LinkedList<>(); - for (int i=0; i xs = new LinkedList<>(); List ys = new LinkedList<>(); - for (int i=0; i< oldScatterSeries.size(); i++){ + for (int i = 0; i < oldScatterSeries.size(); i++) { xs.add(oldScatterSeries.get(i).getX()); ys.add(oldScatterSeries.get(i).getY()); } - //C to E + // C to E List fittingPara = EnergySpectrumHandler.GetFileFittingPara(xs, ys); List fittingParaStr = new LinkedList<>(); - for (Double para:fittingPara) { + for (Double para : fittingPara) { fittingParaStr.add(String.valueOf(para)); } map.put("CToE", fittingParaStr); - //E to C + // E to C List fittingParaToUi = EnergySpectrumHandler.GetFileFittingPara(ys, xs); List fittingParaToUiStr = new LinkedList<>(); - for (Double para:fittingParaToUi) { + for (Double para : fittingParaToUi) { fittingParaToUiStr.add(String.valueOf(para)); } map.put("EToC", fittingParaToUiStr); @@ -1961,7 +1965,7 @@ public class SpectrumAnalysisServiceImpl implements ISpectrumAnalysisService { cacheMap.put("Series", oldScatterSeries); cacheMap.put("fittingPara", fittingParaStr); cacheMap.put("fittingParaToUi", fittingParaToUiStr); - cache.put(qcFileName+"-"+userName+"-beta", cacheMap); + cache.put(qcFileName + "-" + userName + "-beta", cacheMap); betaCache.setBetaCache(cache); } } @@ -1969,10 +1973,10 @@ public class SpectrumAnalysisServiceImpl implements ISpectrumAnalysisService { throw new RuntimeException(e); } finally { try { - if (Objects.nonNull(ftpClient)){ + if (Objects.nonNull(ftpClient)) { ftpClient.disconnect(); } - if (Objects.nonNull(inputStream)){ + if (Objects.nonNull(inputStream)) { inputStream.close(); } if (Objects.nonNull(file)) { @@ -1993,12 +1997,12 @@ public class SpectrumAnalysisServiceImpl implements ISpectrumAnalysisService { Map map = new HashMap<>(); String userName = JwtUtil.getUserNameByToken(request); String samplePathName = StringPool.SLASH + spectrumPathProperties.getUploadPath() + StringPool.SLASH + userName; - if (Objects.nonNull(sampleId)){ + if (Objects.nonNull(sampleId)) { String sampleFilePath = spectrumAnalysisMapper.getSampleFilePath(sampleId); samplePathName = StringPool.SLASH + spectrumPathProperties.getSaveFilePath() + StringPool.SLASH + sampleFilePath.substring(0, sampleFilePath.lastIndexOf(StringPool.SLASH)); } FTPClient ftpClient = ftpUtil.LoginFTP(); - if (Objects.isNull(ftpClient)){ + if (Objects.isNull(ftpClient)) { result.error500("ftp connection failed"); return result; } @@ -2006,34 +2010,34 @@ public class SpectrumAnalysisServiceImpl implements ISpectrumAnalysisService { File file = null; try { if (StringUtils.isNotBlank(samplePathName)) { - //切换被动模式 + // 切换被动模式 ftpClient.enterLocalPassiveMode(); ftpClient.setFileType(FTPClient.BINARY_FILE_TYPE); // 设置编码,当文件中存在中文且上传后文件乱码时可使用此配置项 ftpClient.setControlEncoding("UTF-8"); ftpClient.setFileTransferMode(FTPClient.STREAM_TRANSFER_MODE); ftpClient.changeWorkingDirectory(samplePathName); - //获取ftp文件的输入流 + // 获取ftp文件的输入流 inputStream = ftpClient.retrieveFileStream(sampleFileName); - if (Objects.nonNull(inputStream)){ - //声明一个临时文件 + if (Objects.nonNull(inputStream)) { + // 声明一个临时文件 file = File.createTempFile("betaGamma", null); - //ftp文件输入流复制给临时文件 + // ftp文件输入流复制给临时文件 FileUtils.copyInputStreamToFile(inputStream, file); - //读取文件信息 + // 读取文件信息 EnergySpectrumStruct struct = EnergySpectrumHandler.getSourceData(file.getAbsolutePath()); - //Acquisition + // Acquisition String acquisitionStart = struct.acquisition_start_date + StringPool.SPACE + struct.acquisition_start_time; String acquisitionRealTime = String.valueOf(struct.acquisition_real_time); String acquisitionLiveTime = String.valueOf(struct.acquisition_live_time); map.put("acquisitionStart", acquisitionStart); map.put("acquisitionRealTime", acquisitionRealTime); map.put("acquisitionLiveTime", acquisitionLiveTime); - //Gamma Spectrum: Sample - //蓝色线 + // Gamma Spectrum: Sample + // 蓝色线 List gCounts = struct.g_counts; List gammaOriginSeriseData = new LinkedList<>(); - for (int i=0; i< struct.num_g_channel; i++){ + for (int i = 0; i < struct.num_g_channel; i++) { long count = gCounts.get(i); SeriseData temp = new SeriseData(); temp.setX(i); @@ -2044,20 +2048,20 @@ public class SpectrumAnalysisServiceImpl implements ISpectrumAnalysisService { List chanelEnergy = new LinkedList<>(); List gCentroidChannel = struct.g_centroid_channel; List gEnergy = struct.g_energy; - List fittingPara = EnergySpectrumHandler.GetFileFittingPara(gCentroidChannel,gEnergy); - for (int i=0; i fittingPara = EnergySpectrumHandler.GetFileFittingPara(gCentroidChannel, gEnergy); + for (int i = 0; i < struct.num_g_channel; i++) { List channel = new LinkedList<>(); channel.add(Double.valueOf(i)); List energy = EnergySpectrumHandler.GetFileFittingData(channel, fittingPara); - if(CollectionUtils.isNotEmpty(energy)) { + if (CollectionUtils.isNotEmpty(energy)) { chanelEnergy.add(energy.get(0)); } } map.put("gammaChannelEnergy", chanelEnergy); - //绿色线 + // 绿色线 List gammaProjectedSeriseData = new LinkedList<>(); List gammaProjectedDataValue = this.handleHistogram(struct.h_counts, struct.b_channels, struct.g_channels, "Vertical"); - for (int i=0; i bCounts = struct.b_counts; List betaOriginSeriseData = new LinkedList<>(); - for (int i=0; i< struct.num_b_channel; i++){ + for (int i = 0; i < struct.num_b_channel; i++) { long count = bCounts.get(i); SeriseData temp = new SeriseData(); temp.setX(i); @@ -2079,7 +2083,7 @@ public class SpectrumAnalysisServiceImpl implements ISpectrumAnalysisService { map.put("betaOriginSeriseData", betaOriginSeriseData); List betaProjectedSeriseData = new LinkedList<>(); List betaProjectedDataValue = this.handleHistogram(struct.h_counts, struct.b_channels, struct.g_channels, "Horizontal"); - for (int i=0; i sample_spectrum = new LinkedList<>(); List sample_b_spectrum = new LinkedList<>(); @@ -2160,7 +2164,7 @@ public class SpectrumAnalysisServiceImpl implements ISpectrumAnalysisService { if (CollectionUtils.isNotEmpty(sampleTmp.h_counts)) { sample_histogram = sampleTmp.h_counts; } - //获取det文件的文件读取信息 + // 获取det文件的文件读取信息 EnergySpectrumStruct detTmp = phdFileUtil.analyzeFileSourceData(detFilePath, extInfo.getDetFileName()); double detbgrRealTime = 0.0; List detbgr_spectrum = new LinkedList<>(); @@ -2178,92 +2182,92 @@ public class SpectrumAnalysisServiceImpl implements ISpectrumAnalysisService { if (CollectionUtils.isNotEmpty(detTmp.h_counts)) { detbgr_historgram = detTmp.h_counts; } - //计算 - //Ng + // 计算 + // Ng double s_Ng = 0.0; double d_Ng = 0.0; int begin = extInfo.getGammaBegin(); int end = extInfo.getGammaEnd(); - for(int i=begin;i Eb = new LinkedList<>(); List Ny = new LinkedList<>(); List Nx = new LinkedList<>(); - while(index<=51&&EbT>min_e) { - double s_Nb=0.0; - double d_Nb=0.0; - for(int k = index*5;k min_e) { + double s_Nb = 0.0; + double d_Nb = 0.0; + for (int k = index * 5; k < sample_b_spectrum.size() && k < detbgr_b_spectrum.size() && k < 255; k++) { s_Nb += sample_b_spectrum.get(k); d_Nb += detbgr_b_spectrum.get(k); } - double NbR = (s_Nb/sampleRealTime)-(d_Nb/detbgrRealTime); + double NbR = (s_Nb / sampleRealTime) - (d_Nb / detbgrRealTime); double s_Nc = 0.0; double d_Nc = 0.0; - for(int l = index*5;l<=255;l++) { - for(int m=begin;m<=end&&(256*m+l) scatter_serise_data = new LinkedList<>(); - double maxLeft=0.0; - double maxBottom=0.0; - double minLeft=0.0; - for(int i=0;iNx.get(i)) { + if (minLeft > Nx.get(i)) { minLeft = Nx.get(i); } - if(maxLeftNy.get(i)) { + if (minLeft > Ny.get(i)) { minLeft = Ny.get(i); } } - for(int i=0;i extDataList = new LinkedList<>(); - for(int i=0;i map = new HashMap<>(); - //计算 + // 计算 List Eb = extInfo.getEb(); List Ny = extInfo.getNy(); List Nx = extInfo.getNx(); - int pos=0; + int pos = 0; String fittype = extInfo.getFitType(); - if(fittype.equals("liner")) { - pos=1; - } else if(fittype.equals("poly2")) { - pos=0; - } else if(fittype.equals("poly3")) { + if (fittype.equals("liner")) { + pos = 1; + } else if (fittype.equals("poly2")) { + pos = 0; + } else if (fittype.equals("poly3")) { } - double lamadaXe = Math.log(2)/(extInfo.getHalfLife()*24*60*60); + double lamadaXe = Math.log(2) / (extInfo.getHalfLife() * 24 * 60 * 60); List fittingPara = new LinkedList<>(); - fittingPara = EnergySpectrumHandler.GetFittingPara(Nx,Ny,fittype); - if(CollectionUtils.isEmpty(fittingPara) && pos>fittingPara.size()-1) { + fittingPara = EnergySpectrumHandler.GetFittingPara(Nx, Ny, fittype); + if (CollectionUtils.isEmpty(fittingPara) && pos > fittingPara.size() - 1) { return result; } double acqRealTime = extInfo.getAcqRealTime(); - double Xe_activity = fittingPara.get(pos)*(lamadaXe*acqRealTime)/(1-Math.exp(-lamadaXe*acqRealTime)); + double Xe_activity = fittingPara.get(pos) * (lamadaXe * acqRealTime) / (1 - Math.exp(-lamadaXe * acqRealTime)); List line_serise_data = new LinkedList<>(); - double maxLeft=0.0; - double maxBottom=0.0; - double minLeft=0.0; - for(int i=0;iNx.get(i)) { + if (minLeft > Nx.get(i)) { minLeft = Nx.get(i); } - if(maxLeftNy.get(i)) { + if (minLeft > Ny.get(i)) { minLeft = Ny.get(i); } } String functionFit = ""; - if(fittype.equals("liner")) { + if (fittype.equals("liner")) { SeriseData left = new SeriseData(); List data = new LinkedList<>(); List rData = new LinkedList<>(); @@ -2328,38 +2332,38 @@ public class SpectrumAnalysisServiceImpl implements ISpectrumAnalysisService { data.clear(); rData.clear(); - data.add(Nx.get(Nx.size()-1)); - rData = EnergySpectrumHandler.GetFittingData(data,fittype,fittingPara); + data.add(Nx.get(Nx.size() - 1)); + rData = EnergySpectrumHandler.GetFittingData(data, fittype, fittingPara); SeriseData right = new SeriseData(); - right.setX(Nx.get(Nx.size()-1)); + right.setX(Nx.get(Nx.size() - 1)); right.setY(rData.get(0)); line_serise_data.add(left); line_serise_data.add(right); functionFit = "y = a*x + b "; - if (Nx.size()>1) { + if (Nx.size() > 1) { functionFit = String.format("y = %f*x + %f ", fittingPara.get(0), fittingPara.get(1)); } - } else if(fittype.equals("poly2")) { - for(int i=1;i<51;i++) { + } else if (fittype.equals("poly2")) { + for (int i = 1; i < 51; i++) { SeriseData temp = new SeriseData(); List data = new LinkedList<>(); List rData = new LinkedList<>(); - data.add(i*maxBottom/50); - rData = EnergySpectrumHandler.GetFittingData(data,fittype,fittingPara); - temp.setX(i*maxBottom/50); + data.add(i * maxBottom / 50); + rData = EnergySpectrumHandler.GetFittingData(data, fittype, fittingPara); + temp.setX(i * maxBottom / 50); temp.setY(rData.get(0)); line_serise_data.add(temp); } functionFit = "y = a+ b*x + c*x^2 "; - if (Nx.size()>1) { - functionFit = String.format("y = %f+ %f*x + %f*x^2 ",fittingPara.get(0),fittingPara.get(1),fittingPara.get(2)); + if (Nx.size() > 1) { + functionFit = String.format("y = %f+ %f*x + %f*x^2 ", fittingPara.get(0), fittingPara.get(1), fittingPara.get(2)); } - } else if(fittype.equals("poly3")) { + } else if (fittype.equals("poly3")) { functionFit = "y = a+ b*x + c*x^2 +d*x^3 "; } String refTime = ""; String xeAct = ""; - if (Nx.size()>1) { + if (Nx.size() > 1) { refTime = extInfo.getAcqStartTime(); xeAct = String.valueOf(Xe_activity); } @@ -2374,19 +2378,19 @@ public class SpectrumAnalysisServiceImpl implements ISpectrumAnalysisService { private List handleHistogram(List hcounts, long bChannels, long gChannels, String type) { List projected_data_value = new LinkedList<>(); - if (type.equals("Vertical")){ - for (int i=0; i map = new HashMap<>(); String userName = JwtUtil.getUserNameByToken(request); String filePath = StringPool.SLASH + spectrumPathProperties.getUploadPath() + StringPool.SLASH + userName; - //判断sampleId是否存在 + // 判断sampleId是否存在 if (Objects.nonNull(sampleId)) { String sampleFilePath = spectrumAnalysisMapper.getSampleFilePath(sampleId); filePath = StringPool.SLASH + spectrumPathProperties.getSaveFilePath() + StringPool.SLASH + sampleFilePath.substring(0, sampleFilePath.lastIndexOf(StringPool.SLASH)); } FTPClient ftpClient = ftpUtil.LoginFTP(); - if (Objects.isNull(ftpClient)){ + if (Objects.isNull(ftpClient)) { result.error500("ftp connection failed"); return result; } @@ -2416,32 +2420,32 @@ public class SpectrumAnalysisServiceImpl implements ISpectrumAnalysisService { File file = null; try { if (StringUtils.isNotBlank(filePath)) { - //切换被动模式 + // 切换被动模式 ftpClient.enterLocalPassiveMode(); ftpClient.setFileType(FTPClient.BINARY_FILE_TYPE); // 设置编码,当文件中存在中文且上传后文件乱码时可使用此配置项 ftpClient.setControlEncoding("UTF-8"); ftpClient.setFileTransferMode(FTPClient.STREAM_TRANSFER_MODE); ftpClient.changeWorkingDirectory(filePath); - //获取ftp文件的输入流 + // 获取ftp文件的输入流 inputStream = ftpClient.retrieveFileStream(sampleFileName); - if (Objects.nonNull(inputStream)){ - //声明一个临时文件 + if (Objects.nonNull(inputStream)) { + // 声明一个临时文件 file = File.createTempFile("betaGamma", null); - //ftp文件输入流复制给临时文件 + // ftp文件输入流复制给临时文件 FileUtils.copyInputStreamToFile(inputStream, file); - //读取文件信息 + // 读取文件信息 EnergySpectrumStruct struct = EnergySpectrumHandler.getSourceData(file.getAbsolutePath()); String stationName = struct.site_code; - //根据台站id查询台站名称 - Map stationMap = (Map)redisUtil.get("stationMap"); - if (CollectionUtils.isEmpty(stationMap)){ + // 根据台站id查询台站名称 + Map stationMap = (Map) redisUtil.get("stationMap"); + if (CollectionUtils.isEmpty(stationMap)) { result.error500("The station cache information is empty!"); return result; } List stationNames = stationMap.values().stream().collect(Collectors.toList()); int index = stationNames.indexOf(stationName); - if (index<0){ + if (index < 0) { result.error500("The station information does not exist!"); return result; } @@ -2449,7 +2453,7 @@ public class SpectrumAnalysisServiceImpl implements ISpectrumAnalysisService { String stationId = String.valueOf(stationIds.get(index)); map.put("stationId", stationId); map.put("stationName", stationName); - //根据台站id查询探测器集合 + // 根据台站id查询探测器集合 List detectorList = new LinkedList<>(); GardsDetectors detectors = new GardsDetectors(); detectors.setDetectorCode("All"); @@ -2463,10 +2467,10 @@ public class SpectrumAnalysisServiceImpl implements ISpectrumAnalysisService { throw new RuntimeException(e); } finally { try { - if (Objects.nonNull(ftpClient)){ + if (Objects.nonNull(ftpClient)) { ftpClient.disconnect(); } - if (Objects.nonNull(inputStream)){ + if (Objects.nonNull(inputStream)) { inputStream.close(); } if (Objects.nonNull(file)) { @@ -2485,15 +2489,15 @@ public class SpectrumAnalysisServiceImpl implements ISpectrumAnalysisService { public Result changeDetector(String stationName) { Result result = new Result(); Map map = new HashMap<>(); - //根据台站id查询台站名称 - Map stationMap = (Map)redisUtil.get("stationMap"); - if (CollectionUtils.isEmpty(stationMap)){ + // 根据台站id查询台站名称 + Map stationMap = (Map) redisUtil.get("stationMap"); + if (CollectionUtils.isEmpty(stationMap)) { result.error500("The station cache information is empty!"); return result; } List stationNames = stationMap.values().stream().collect(Collectors.toList()); int index = stationNames.indexOf(stationName); - if (index<0){ + if (index < 0) { result.error500("The station information does not exist!"); return result; } @@ -2501,7 +2505,7 @@ public class SpectrumAnalysisServiceImpl implements ISpectrumAnalysisService { String stationId = String.valueOf(stationIds.get(index)); map.put("stationId", stationId); map.put("stationName", stationName); - //根据台站id查询探测器集合 + // 根据台站id查询探测器集合 List detectorList = new LinkedList<>(); GardsDetectors detectors = new GardsDetectors(); detectors.setDetectorCode("All"); @@ -2517,43 +2521,43 @@ public class SpectrumAnalysisServiceImpl implements ISpectrumAnalysisService { @Override public Result statisticsQuery(StatisticsQueryData statisticsQueryData) { Result result = new Result(); - if (Objects.isNull(statisticsQueryData.getStartTime())){ + if (Objects.isNull(statisticsQueryData.getStartTime())) { result.error500("The start time cannot be empty"); return result; } String startDate = DateUtils.formatDate(statisticsQueryData.getStartTime(), "yyyy-MM-dd"); statisticsQueryData.setStartDate(startDate); - if (Objects.isNull(statisticsQueryData.getEndTime())){ + if (Objects.isNull(statisticsQueryData.getEndTime())) { result.error500("The end time cannot be empty"); return result; } String endDate = DateUtils.formatDate(statisticsQueryData.getEndTime(), "yyyy-MM-dd"); statisticsQueryData.setEndDate(endDate); List detectorIdList = new LinkedList<>(); - if (StringUtils.isNotBlank(statisticsQueryData.getDetectorName())){ - if (statisticsQueryData.getDetectorName().equals("All")){ + if (StringUtils.isNotBlank(statisticsQueryData.getDetectorName())) { + if (statisticsQueryData.getDetectorName().equals("All")) { List detectorList = spectrumAnalysisMapper.getDetectorList(statisticsQueryData.getStationId()); detectorIdList = detectorList.stream().map(GardsDetectors::getDetectorId).collect(Collectors.toList()); - }else { + } else { detectorIdList.add(statisticsQueryData.getDetectorId()); } } statisticsQueryData.setDetectorList(detectorIdList); List items = new LinkedList<>(); - if (statisticsQueryData.isMDC()){ + if (statisticsQueryData.isMDC()) { items.add("GXR.MDC"); } - if (statisticsQueryData.isActivity()){ + if (statisticsQueryData.isActivity()) { items.add("GXR.CONC"); } statisticsQueryData.setItems(items); - if (CollectionUtils.isEmpty(statisticsQueryData.getNuclidesList())){ + if (CollectionUtils.isEmpty(statisticsQueryData.getNuclidesList())) { statisticsQueryData.setNuclidesList(new LinkedList<>()); } - //根据类型判断查询对应数据 + // 根据类型判断查询对应数据 List statisticsData = spectrumAnalysisMapper.statisticsQueryNuclides(statisticsQueryData); List stcGraphList = new LinkedList<>(); - if (statisticsQueryData.getNuclidesList().contains(XeNuclideName.XE_131m.getType())){ + if (statisticsQueryData.getNuclidesList().contains(XeNuclideName.XE_131m.getType())) { StcGraph stcGraphMDC = new StcGraph(); stcGraphMDC.setM_strGraphName("Xe131m MDC"); stcGraphMDC.setM_GraphPen("#00cafd"); @@ -2564,21 +2568,21 @@ public class SpectrumAnalysisServiceImpl implements ISpectrumAnalysisService { if (CollectionUtils.isNotEmpty(statisticsData)) { xe131m = statisticsData.stream().filter(item -> item.getNuclideName().equals(XeNuclideName.XE_131m.getType())).collect(Collectors.toList()); } - if (CollectionUtils.isNotEmpty(xe131m)){ - if (statisticsQueryData.isMDC()){ + if (CollectionUtils.isNotEmpty(xe131m)) { + if (statisticsQueryData.isMDC()) { List keys = new LinkedList<>(); List values = new LinkedList<>(); - for (StatisticsData data:xe131m) { + for (StatisticsData data : xe131m) { keys.add(data.getCollectStart()); values.add(data.getMdc()); } stcGraphMDC.setM_Keys(keys); stcGraphMDC.setM_Values(values); } - if (statisticsQueryData.isActivity()){ + if (statisticsQueryData.isActivity()) { List keys = new LinkedList<>(); List values = new LinkedList<>(); - for (StatisticsData data:xe131m) { + for (StatisticsData data : xe131m) { keys.add(data.getCollectStart()); values.add(data.getConc()); } @@ -2589,7 +2593,7 @@ public class SpectrumAnalysisServiceImpl implements ISpectrumAnalysisService { stcGraphList.add(stcGraphMDC); stcGraphList.add(stcGraphAct); } - if (statisticsQueryData.getNuclidesList().contains(XeNuclideName.XE_133m.getType())){ + if (statisticsQueryData.getNuclidesList().contains(XeNuclideName.XE_133m.getType())) { StcGraph stcGraphMDC = new StcGraph(); stcGraphMDC.setM_strGraphName("Xe133m MDC"); stcGraphMDC.setM_GraphPen("#119e4f"); @@ -2600,21 +2604,21 @@ public class SpectrumAnalysisServiceImpl implements ISpectrumAnalysisService { if (CollectionUtils.isNotEmpty(statisticsData)) { xe133m = statisticsData.stream().filter(item -> item.getNuclideName().equals(XeNuclideName.XE_133m.getType())).collect(Collectors.toList()); } - if (CollectionUtils.isNotEmpty(xe133m)){ - if (statisticsQueryData.isMDC()){ + if (CollectionUtils.isNotEmpty(xe133m)) { + if (statisticsQueryData.isMDC()) { List keys = new LinkedList<>(); List values = new LinkedList<>(); - for (StatisticsData data:xe133m) { + for (StatisticsData data : xe133m) { keys.add(data.getCollectStart()); values.add(data.getMdc()); } stcGraphMDC.setM_Keys(keys); stcGraphMDC.setM_Values(values); } - if (statisticsQueryData.isActivity()){ + if (statisticsQueryData.isActivity()) { List keys = new LinkedList<>(); List values = new LinkedList<>(); - for (StatisticsData data:xe133m) { + for (StatisticsData data : xe133m) { keys.add(data.getCollectStart()); values.add(data.getConc()); } @@ -2625,7 +2629,7 @@ public class SpectrumAnalysisServiceImpl implements ISpectrumAnalysisService { stcGraphList.add(stcGraphMDC); stcGraphList.add(stcGraphAct); } - if (statisticsQueryData.getNuclidesList().contains(XeNuclideName.XE_133.getType())){ + if (statisticsQueryData.getNuclidesList().contains(XeNuclideName.XE_133.getType())) { StcGraph stcGraphMDC = new StcGraph(); stcGraphMDC.setM_strGraphName("Xe133 MDC"); stcGraphMDC.setM_GraphPen("#00cafd"); @@ -2636,21 +2640,21 @@ public class SpectrumAnalysisServiceImpl implements ISpectrumAnalysisService { if (CollectionUtils.isNotEmpty(statisticsData)) { xe133 = statisticsData.stream().filter(item -> item.getNuclideName().equals(XeNuclideName.XE_133.getType())).collect(Collectors.toList()); } - if (CollectionUtils.isNotEmpty(xe133)){ - if (statisticsQueryData.isMDC()){ + if (CollectionUtils.isNotEmpty(xe133)) { + if (statisticsQueryData.isMDC()) { List keys = new LinkedList<>(); List values = new LinkedList<>(); - for (StatisticsData data:xe133) { + for (StatisticsData data : xe133) { keys.add(data.getCollectStart()); values.add(data.getMdc()); } stcGraphMDC.setM_Keys(keys); stcGraphMDC.setM_Values(values); } - if (statisticsQueryData.isActivity()){ + if (statisticsQueryData.isActivity()) { List keys = new LinkedList<>(); List values = new LinkedList<>(); - for (StatisticsData data:xe133) { + for (StatisticsData data : xe133) { keys.add(data.getCollectStart()); values.add(data.getConc()); } @@ -2661,7 +2665,7 @@ public class SpectrumAnalysisServiceImpl implements ISpectrumAnalysisService { stcGraphList.add(stcGraphMDC); stcGraphList.add(stcGraphAct); } - if (statisticsQueryData.getNuclidesList().contains(XeNuclideName.XE_135.getType())){ + if (statisticsQueryData.getNuclidesList().contains(XeNuclideName.XE_135.getType())) { StcGraph stcGraphMDC = new StcGraph(); stcGraphMDC.setM_strGraphName("Xe135 MDC"); stcGraphMDC.setM_GraphPen("#1775e7"); @@ -2672,21 +2676,21 @@ public class SpectrumAnalysisServiceImpl implements ISpectrumAnalysisService { if (CollectionUtils.isNotEmpty(statisticsData)) { xe135 = statisticsData.stream().filter(item -> item.getNuclideName().equals(XeNuclideName.XE_135.getType())).collect(Collectors.toList()); } - if (CollectionUtils.isNotEmpty(xe135)){ - if (statisticsQueryData.isMDC()){ + if (CollectionUtils.isNotEmpty(xe135)) { + if (statisticsQueryData.isMDC()) { List keys = new LinkedList<>(); List values = new LinkedList<>(); - for (StatisticsData data:xe135) { + for (StatisticsData data : xe135) { keys.add(data.getCollectStart()); values.add(data.getMdc()); } stcGraphMDC.setM_Keys(keys); stcGraphMDC.setM_Values(values); } - if (statisticsQueryData.isActivity()){ + if (statisticsQueryData.isActivity()) { List keys = new LinkedList<>(); List values = new LinkedList<>(); - for (StatisticsData data:xe135) { + for (StatisticsData data : xe135) { keys.add(data.getCollectStart()); values.add(data.getConc()); } @@ -2705,22 +2709,22 @@ public class SpectrumAnalysisServiceImpl implements ISpectrumAnalysisService { @Override public Result statisticsQueryBtn(Integer detectorId, String detectorName, Integer stationId, String statisticsType, Date startTime, Date endTime) { Result result = new Result(); - if (Objects.isNull(startTime)){ + if (Objects.isNull(startTime)) { result.error500("The start time cannot be empty"); return result; } String startDate = DateUtils.formatDate(startTime, "yyyy-MM-dd"); - if (Objects.isNull(endTime)){ + if (Objects.isNull(endTime)) { result.error500("The end time cannot be empty"); return result; } String endDate = DateUtils.formatDate(endTime, "yyyy-MM-dd"); List detectorIdList = new LinkedList<>(); - if (StringUtils.isNotBlank(detectorName)){ - if (detectorName.equals("All")){ + if (StringUtils.isNotBlank(detectorName)) { + if (detectorName.equals("All")) { List detectorList = spectrumAnalysisMapper.getDetectorList(stationId); detectorIdList = detectorList.stream().map(GardsDetectors::getDetectorId).collect(Collectors.toList()); - }else { + } else { detectorIdList.add(detectorId); } } @@ -2731,7 +2735,7 @@ public class SpectrumAnalysisServiceImpl implements ISpectrumAnalysisService { stcGraph.setM_GraphPen("green"); List keys = new LinkedList<>(); List values = new LinkedList<>(); - for (StatisticsData data:statisticsData) { + for (StatisticsData data : statisticsData) { keys.add(data.getDateTime()); values.add(data.getDataValue()); } @@ -2746,7 +2750,7 @@ public class SpectrumAnalysisServiceImpl implements ISpectrumAnalysisService { stcGraph.setM_GraphPen("green"); List keys = new LinkedList<>(); List values = new LinkedList<>(); - for (StatisticsData data:statisticsData) { + for (StatisticsData data : statisticsData) { keys.add(data.getDateTime()); values.add(data.getDataValue()); } @@ -2761,7 +2765,7 @@ public class SpectrumAnalysisServiceImpl implements ISpectrumAnalysisService { stcGraph.setM_GraphPen("green"); List keys = new LinkedList<>(); List values = new LinkedList<>(); - for (StatisticsData data:statisticsData) { + for (StatisticsData data : statisticsData) { keys.add(data.getDateTime()); values.add(data.getDataValue()); } @@ -2776,7 +2780,7 @@ public class SpectrumAnalysisServiceImpl implements ISpectrumAnalysisService { stcGraph.setM_GraphPen("green"); List keys = new LinkedList<>(); List values = new LinkedList<>(); - for (StatisticsData data:statisticsData) { + for (StatisticsData data : statisticsData) { keys.add(data.getDateTime()); values.add(data.getDataValue()); } @@ -2796,38 +2800,38 @@ public class SpectrumAnalysisServiceImpl implements ISpectrumAnalysisService { Map cacheMap = new HashMap<>(); Map map = new HashMap<>(); if ((CollectionUtils.isNotEmpty(tempPoints) && Objects.nonNull(count) && tempPoints.size() != count) || (Objects.isNull(paramA) || StringUtils.isBlank(String.valueOf(paramA))) - || (Objects.isNull(paramB) || StringUtils.isBlank(String.valueOf(paramB))) || (Objects.isNull(paramC) || StringUtils.isBlank(String.valueOf(paramC))) ){ + || (Objects.isNull(paramB) || StringUtils.isBlank(String.valueOf(paramB))) || (Objects.isNull(paramC) || StringUtils.isBlank(String.valueOf(paramC)))) { List xs = new LinkedList<>(); List ys = new LinkedList<>(); - for (int i=0; i fittingPara = EnergySpectrumHandler.GetFileFittingPara(xs, ys); List fittingParaStr = new LinkedList<>(); - for (Double para:fittingPara) { + for (Double para : fittingPara) { fittingParaStr.add(String.valueOf(para)); } map.put("CToE", fittingParaStr); List channels = new LinkedList<>(); - for (int i=0; i<255; i++) { + for (int i = 0; i < 255; i++) { channels.add(Double.valueOf(i)); } - List energys = EnergySpectrumHandler.GetFileFittingData(channels,fittingPara); + List energys = EnergySpectrumHandler.GetFileFittingData(channels, fittingPara); List newLineSeries = new LinkedList<>(); - for (int i=0; i fittingParaToUi = EnergySpectrumHandler.GetFileFittingPara(ys, xs); List fittingParaToUiStr = new LinkedList<>(); - for (Double para:fittingParaToUi) { + for (Double para : fittingParaToUi) { fittingParaToUiStr.add(String.valueOf(para)); } map.put("EToC", fittingParaToUiStr); @@ -2835,7 +2839,7 @@ public class SpectrumAnalysisServiceImpl implements ISpectrumAnalysisService { cacheMap.put("Series", tempPoints); cacheMap.put("fittingPara", fittingParaStr); cacheMap.put("fittingParaToUi", fittingParaToUiStr); - cache.put(qcFileName+"-"+userName+"-"+tabName, cacheMap); + cache.put(qcFileName + "-" + userName + "-" + tabName, cacheMap); betaCache.setBetaCache(cache); } else { List fittingPara = new LinkedList<>(); @@ -2847,20 +2851,20 @@ public class SpectrumAnalysisServiceImpl implements ISpectrumAnalysisService { fittingParaStr.add(String.valueOf(paramB)); fittingParaStr.add(String.valueOf(paramC)); List xs = new LinkedList<>(); - for (int i=0; i ys = EnergySpectrumHandler.GetFileFittingData(xs, fittingPara); List seriseDataList = new LinkedList<>(); List tableWidgets = new LinkedList<>(); - for (int i=0; i channels = new LinkedList<>(); - for (int i=0; i<255; i++) { + for (int i = 0; i < 255; i++) { channels.add(Double.valueOf(i)); } - List energys = EnergySpectrumHandler.GetFileFittingData(channels,fittingPara); + List energys = EnergySpectrumHandler.GetFileFittingData(channels, fittingPara); List newLineSeries = new LinkedList<>(); - for (int i=0; i fittingParaToUi = EnergySpectrumHandler.GetFileFittingPara(ys, xs); List fittingParaToUiStr = new LinkedList<>(); - for (Double para:fittingParaToUi) { + for (Double para : fittingParaToUi) { fittingParaToUiStr.add(String.valueOf(para)); } map.put("EToC", fittingParaToUiStr); @@ -2893,7 +2897,7 @@ public class SpectrumAnalysisServiceImpl implements ISpectrumAnalysisService { cacheMap.put("Series", seriseDataList); cacheMap.put("fittingPara", fittingParaStr); cacheMap.put("fittingParaToUi", fittingParaToUiStr); - cache.put(qcFileName+"-"+userName+"-"+tabName, cacheMap); + cache.put(qcFileName + "-" + userName + "-" + tabName, cacheMap); betaCache.setBetaCache(cache); } result.setSuccess(true); @@ -2903,15 +2907,15 @@ public class SpectrumAnalysisServiceImpl implements ISpectrumAnalysisService { @Override public Result getGammaGated(Integer chartHeight, Integer channelWidth, Integer gammaChannel, Integer sampleId, String qcFileName, HttpServletRequest request) { - //选择矩形框高度 - Integer flagHeight = channelWidth * (chartHeight/256); + // 选择矩形框高度 + Integer flagHeight = channelWidth * (chartHeight / 256); int value = Double.valueOf(flagHeight / 2).intValue(); - //计算得到最高值 + // 计算得到最高值 int up = gammaChannel - value; - if (up<0){ + if (up < 0) { up = 0; } - //计算得到最低值 + // 计算得到最低值 int down = up + value; Result result = new Result(); Map map = new HashMap<>(); @@ -2922,19 +2926,19 @@ public class SpectrumAnalysisServiceImpl implements ISpectrumAnalysisService { String collectStartStr = DateUtils.formatDate(sampleData.getCollectStart(), "yyyy/MM/dd HH:mm:ss"); String dbQcFilePath = spectrumAnalysisMapper.getQCFilePath(sampleData.getSiteDetCode(), collectStartStr); if (StringUtils.isNotBlank(dbQcFilePath)) { - qcPathName=StringPool.SLASH + spectrumPathProperties.getSaveFilePath() + StringPool.SLASH + dbQcFilePath.substring(0, dbQcFilePath.lastIndexOf(StringPool.SLASH)); + qcPathName = StringPool.SLASH + spectrumPathProperties.getSaveFilePath() + StringPool.SLASH + dbQcFilePath.substring(0, dbQcFilePath.lastIndexOf(StringPool.SLASH)); } } FTPClient ftpClient = ftpUtil.LoginFTP(); - if (Objects.isNull(ftpClient)){ + if (Objects.isNull(ftpClient)) { result.error500("ftp connection failed"); return result; } InputStream inputStream = null; File file = null; try { - if (StringUtils.isNotBlank(qcPathName)){ - //切换被动模式 + if (StringUtils.isNotBlank(qcPathName)) { + // 切换被动模式 ftpClient.enterLocalPassiveMode(); ftpClient.setFileType(FTPClient.BINARY_FILE_TYPE); // 设置编码,当文件中存在中文且上传后文件乱码时可使用此配置项 @@ -2942,19 +2946,19 @@ public class SpectrumAnalysisServiceImpl implements ISpectrumAnalysisService { ftpClient.setFileTransferMode(FTPClient.STREAM_TRANSFER_MODE); ftpClient.changeWorkingDirectory(qcPathName); inputStream = ftpClient.retrieveFileStream(qcFileName); - if (Objects.nonNull(inputStream)){ - //声明一个临时文件 + if (Objects.nonNull(inputStream)) { + // 声明一个临时文件 file = File.createTempFile("betaGamma", null); - //将ftp文件的输入流复制给临时文件 + // 将ftp文件的输入流复制给临时文件 FileUtils.copyInputStreamToFile(inputStream, file); EnergySpectrumStruct struct = EnergySpectrumHandler.getSourceData(file.getAbsolutePath()); - //Beta-Gamma Spectrum: QC + // Beta-Gamma Spectrum: QC long bChannels = struct.b_channels; List hCounts = struct.h_counts; List serise_data = new LinkedList<>(); - for ( int i=0; i map = new HashMap<>(); - //获取当前登陆的用户名 + // 获取当前登陆的用户名 String userName = JwtUtil.getUserNameByToken(request); if ("CurrentSpectrum".equals(analyseData.getApplyType())) { String dbName = analyseData.getDbNames().get(0); Integer sampleId = analyseData.getSampleIds().get(0); - if (Objects.nonNull(sampleId)){ + if (Objects.nonNull(sampleId)) { Integer analysisID = null; - if (dbName.equalsIgnoreCase("auto")){ + if (dbName.equalsIgnoreCase("auto")) { dbName = "RNAUTO"; analysisID = spectrumAnalysisMapper.getAnalysisID(dbName, sampleId, "RNAUTO"); - }else if (dbName.equalsIgnoreCase("man")){ + } else if (dbName.equalsIgnoreCase("man")) { dbName = "RNMAN"; analysisID = spectrumAnalysisMapper.getAnalysisID(dbName, sampleId, userName); } - SpectrumFileRecord dbSpectrumFilePath = spectrumAnalysisMapper.getDBSpectrumFilePath(dbName, sampleId,analysisID); + SpectrumFileRecord dbSpectrumFilePath = spectrumAnalysisMapper.getDBSpectrumFilePath(dbName, sampleId, analysisID); if (Objects.nonNull(dbSpectrumFilePath)) { String sampleFilePath = dbSpectrumFilePath.getSampleFilePath().substring(0, dbSpectrumFilePath.getSampleFilePath().lastIndexOf(StringPool.SLASH)); String gasFilePath = dbSpectrumFilePath.getGasBgFilePath().substring(0, dbSpectrumFilePath.getGasBgFilePath().lastIndexOf(StringPool.SLASH)); @@ -3017,7 +3021,7 @@ public class SpectrumAnalysisServiceImpl implements ISpectrumAnalysisService { String collectStartStr = DateUtils.formatDate(sampleData.getCollectStart(), "yyyy/MM/dd HH:mm:ss"); String dbQcFilePath = spectrumAnalysisMapper.getQCFilePath(sampleData.getSiteDetCode(), collectStartStr); if (StringUtils.isNotBlank(dbQcFilePath)) { - qcFilePath=StringPool.SLASH + spectrumPathProperties.getSaveFilePath() + StringPool.SLASH + dbQcFilePath.substring(0, dbQcFilePath.lastIndexOf(StringPool.SLASH)); + qcFilePath = StringPool.SLASH + spectrumPathProperties.getSaveFilePath() + StringPool.SLASH + dbQcFilePath.substring(0, dbQcFilePath.lastIndexOf(StringPool.SLASH)); } List analyseResult = BetaGammaAnalyzeCurrentProcess(analyseData, sampleFilePath, gasFilePath, detFilePath, qcFilePath, userName); map.put("xeData", analyseResult); @@ -3036,10 +3040,10 @@ public class SpectrumAnalysisServiceImpl implements ISpectrumAnalysisService { result.setResult(map); } } else if ("AllSpectrum".equals(analyseData.getApplyType())) { - //获取当前选中的文件名称 + // 获取当前选中的文件名称 String currentFileName = analyseData.getCurrentFileName(); List> loadDataList = new LinkedList<>(); - for (int i=0; i m_loadData = new HashMap<>(); Integer sampleId = analyseData.getSampleIds().get(i); String dbName = analyseData.getDbNames().get(i); @@ -3047,16 +3051,16 @@ public class SpectrumAnalysisServiceImpl implements ISpectrumAnalysisService { String gasFileName = analyseData.getGasFileNames().get(i); String detFileName = analyseData.getDetFileNames().get(i); String qcFileName = analyseData.getQcFileNames().get(i); - if (Objects.nonNull(sampleId)){ + if (Objects.nonNull(sampleId)) { Integer analysisID = null; - if (dbName.equalsIgnoreCase("auto")){ + if (dbName.equalsIgnoreCase("auto")) { dbName = "RNAUTO"; analysisID = spectrumAnalysisMapper.getAnalysisID(dbName, sampleId, "RNAUTO"); - }else if (dbName.equalsIgnoreCase("man")){ + } else if (dbName.equalsIgnoreCase("man")) { dbName = "RNMAN"; analysisID = spectrumAnalysisMapper.getAnalysisID(dbName, sampleId, userName); } - SpectrumFileRecord dbSpectrumFilePath = spectrumAnalysisMapper.getDBSpectrumFilePath(dbName, sampleId,analysisID); + SpectrumFileRecord dbSpectrumFilePath = spectrumAnalysisMapper.getDBSpectrumFilePath(dbName, sampleId, analysisID); if (Objects.nonNull(dbSpectrumFilePath)) { String sampleFilePath = dbSpectrumFilePath.getSampleFilePath().substring(0, dbSpectrumFilePath.getSampleFilePath().lastIndexOf(StringPool.SLASH)); String gasFilePath = dbSpectrumFilePath.getGasBgFilePath().substring(0, dbSpectrumFilePath.getGasBgFilePath().lastIndexOf(StringPool.SLASH)); @@ -3066,7 +3070,7 @@ public class SpectrumAnalysisServiceImpl implements ISpectrumAnalysisService { String collectStartStr = DateUtils.formatDate(sampleData.getCollectStart(), "yyyy/MM/dd HH:mm:ss"); String dbQcFilePath = spectrumAnalysisMapper.getQCFilePath(sampleData.getSiteDetCode(), collectStartStr); if (StringUtils.isNotBlank(dbQcFilePath)) { - qcFilePath=StringPool.SLASH + spectrumPathProperties.getSaveFilePath() + StringPool.SLASH + dbQcFilePath.substring(0, dbQcFilePath.lastIndexOf(StringPool.SLASH)); + qcFilePath = StringPool.SLASH + spectrumPathProperties.getSaveFilePath() + StringPool.SLASH + dbQcFilePath.substring(0, dbQcFilePath.lastIndexOf(StringPool.SLASH)); } m_loadData.put("sampleFilePath", sampleFilePath); m_loadData.put("gasFilePath", gasFilePath); @@ -3100,54 +3104,54 @@ public class SpectrumAnalysisServiceImpl implements ISpectrumAnalysisService { } private List BetaGammaAnalyzeCurrentProcess(AnalyseData analyseData, String sampleFilePath, String gasFilePath, String detFilePath, String qcFilePath, String userName) { - //根据文件路径 文件名称获取对应的临时文件 + // 根据文件路径 文件名称获取对应的临时文件 File sampleTmp = null; File gasTmp = null; File detTmp = null; - //需要返回到前端的XeData数据 + // 需要返回到前端的XeData数据 List xeResultsSpectrumList = new LinkedList<>(); try { - //sample文件名称 + // sample文件名称 List sampleFileNames = analyseData.getSampleFileNames(); String sampleFileName = sampleFileNames.get(0); - //gas文件名称 + // gas文件名称 List gasFileNames = analyseData.getGasFileNames(); String gasFileName = gasFileNames.get(0); - //det文件名称 + // det文件名称 List detFileNames = analyseData.getDetFileNames(); String detFileName = detFileNames.get(0); - //qc文件名称 + // qc文件名称 List qcFileNames = analyseData.getQcFileNames(); String qcFileName = qcFileNames.get(0); - //存储重新分析字段的实体类 + // 存储重新分析字段的实体类 SpectrumGroup spectrum_group = new SpectrumGroup(); - //声明一个map 缓存重新分析用到的数据 + // 声明一个map 缓存重新分析用到的数据 Map map = new HashMap<>(); - //声明一个map 缓存xeData计算后的结果 + // 声明一个map 缓存xeData计算后的结果 Map xeMap = new HashMap<>(); - //从本地缓存获取beta gamma的数组 + // 从本地缓存获取beta gamma的数组 Cache> cache = betaCache.getBetaCache(); - //根据qc文件名称-用户名-beta的方式获取beta的内容 + // 根据qc文件名称-用户名-beta的方式获取beta的内容 Map betaMap = cache.getIfPresent(qcFileName + "-" + userName + "-beta"); List betaList = new LinkedList<>(); List betaFittingPara = new LinkedList<>(); List betaFittingParaToUi = new LinkedList<>(); if (CollectionUtils.isNotEmpty(betaMap)) { - betaList = (List)betaMap.get("Series"); + betaList = (List) betaMap.get("Series"); betaFittingPara = (List) betaMap.get("fittingPara"); betaFittingParaToUi = (List) betaMap.get("fittingParaToUi"); } - //根据qc文件名称-用户名-gamma的方式获取gamma的内容 + // 根据qc文件名称-用户名-gamma的方式获取gamma的内容 Map gammaMap = cache.getIfPresent(qcFileName + "-" + userName + "-gamma"); List gammaList = new LinkedList<>(); List gammaFittingPara = new LinkedList<>(); List gammaFittingParaToUi = new LinkedList<>(); if (CollectionUtils.isNotEmpty(gammaMap)) { - gammaList = (List)gammaMap.get("Series"); + gammaList = (List) gammaMap.get("Series"); gammaFittingPara = (List) gammaMap.get("fittingPara"); gammaFittingParaToUi = (List) gammaMap.get("fittingParaToUi"); } - //判断是否对beta页面进行过分析 + // 判断是否对beta页面进行过分析 if (analyseData.isBetaEnergyValid()) { List beCal = new LinkedList<>(); beCal.add(Double.valueOf(betaFittingParaToUi.get(0))); @@ -3175,7 +3179,7 @@ public class SpectrumAnalysisServiceImpl implements ISpectrumAnalysisService { map.put("bBetaEnergyValidDet", false); } } - //判断是否对gamma页面进行过分析 + // 判断是否对gamma页面进行过分析 if (analyseData.isGammaEnergyValid()) { List geCal = new LinkedList<>(); geCal.add(Double.valueOf(gammaFittingParaToUi.get(0))); @@ -3203,25 +3207,25 @@ public class SpectrumAnalysisServiceImpl implements ISpectrumAnalysisService { map.put("bGammaEnergyValidDet", false); } } - //判断是否勾选了sample + // 判断是否勾选了sample spectrum_group.BgCalPara.bApplyNewCalicSample = analyseData.isSampleData(); - //判断是否勾选了gas + // 判断是否勾选了gas spectrum_group.BgCalPara.bApplyNewCalicGasBg = analyseData.isGasBgData(); - //判断是否勾选了det + // 判断是否勾选了det spectrum_group.BgCalPara.bApplyNewCalicDetBg = analyseData.isDetBgData(); - //判断是否勾选了qc + // 判断是否勾选了qc spectrum_group.BgCalPara.bApplyNewCalicQc = analyseData.isQcData(); - //获取sample文件 + // 获取sample文件 sampleTmp = phdFileUtil.analyzeFile(sampleFilePath, sampleFileName); - //获取gas文件 + // 获取gas文件 gasTmp = phdFileUtil.analyzeFile(gasFilePath, gasFileName); - //获取det文件 + // 获取det文件 detTmp = phdFileUtil.analyzeFile(detFilePath, detFileName); - //调用重新分析算法 获取算法结果 + // 调用重新分析算法 获取算法结果 BgAnalyseResult analyseResult = EnergySpectrumHandler.bgReAnalyse(sampleTmp.getAbsolutePath(), gasTmp.getAbsolutePath(), detTmp.getAbsolutePath(), spectrum_group.BgCalPara); - //存入分析用到的数据 + // 存入分析用到的数据 map.put("reAnalyseParam", spectrum_group.BgCalPara); - //存入计算后得到的xeData数据 + // 存入计算后得到的xeData数据 GardsXeResultsSpectrum xe131m = new GardsXeResultsSpectrum(); xe131m.setNuclideName(XeNuclideName.XE_131m.getType()); xe131m.setConc(analyseResult.Xe131m_con); @@ -3255,10 +3259,10 @@ public class SpectrumAnalysisServiceImpl implements ISpectrumAnalysisService { xe135.setNidFlag(analyseResult.XE_135_NID_FLAG); xeResultsSpectrumList.add(xe135); xeMap.put("XeData", xeResultsSpectrumList); - //新计算得到的边界值 + // 新计算得到的边界值 if (CollectionUtils.isNotEmpty(analyseResult.S_ROI_B_Boundary_start)) { List boundaryList = new LinkedList<>(); - for (int i=0; i boundaryList = new LinkedList<>(); - for (int i=0; i boundaryList = new LinkedList<>(); - for (int i=0; i BetaGammaAnalyzeAllProcess(List> loadDataList, AnalyseData analyseData, String userName, String currentFileName){ + private List BetaGammaAnalyzeAllProcess(List> loadDataList, AnalyseData analyseData, String userName, String currentFileName) { Map> analyseResultMap = new HashMap<>(); File sampleTmp = null; File gasTmp = null; File detTmp = null; try { - for (Map m_loadData: loadDataList){ + for (Map m_loadData : loadDataList) { String sampleFileName = m_loadData.get("sampleFileName"); String gasFileName = m_loadData.get("gasFileName"); String detFileName = m_loadData.get("detFileName"); @@ -3335,31 +3339,31 @@ public class SpectrumAnalysisServiceImpl implements ISpectrumAnalysisService { String gasFilePath = m_loadData.get("gasFilePath"); String detFilePath = m_loadData.get("detFilePath"); String qcFilePath = m_loadData.get("qcFilePath"); - //存储重新分析字段的实体类 + // 存储重新分析字段的实体类 SpectrumGroup spectrum_group = new SpectrumGroup(); - //声明一个map 缓存重新分析用到的数据 + // 声明一个map 缓存重新分析用到的数据 Map map = new HashMap<>(); - //声明一个map 缓存xeData计算后的结果 + // 声明一个map 缓存xeData计算后的结果 Map xeMap = new HashMap<>(); - //从本地缓存获取beta gamma的数组 + // 从本地缓存获取beta gamma的数组 Cache> cache = betaCache.getBetaCache(); - //根据qc文件名称-用户名-beta的方式获取beta的内容 + // 根据qc文件名称-用户名-beta的方式获取beta的内容 Map betaMap = cache.getIfPresent(qcFileName + "-" + userName + "-beta"); List betaList = new LinkedList<>(); List betaFittingPara = new LinkedList<>(); List betaFittingParaToUi = new LinkedList<>(); if (CollectionUtils.isNotEmpty(betaMap)) { - betaList = (List)betaMap.get("Series"); + betaList = (List) betaMap.get("Series"); betaFittingPara = (List) betaMap.get("fittingPara"); betaFittingParaToUi = (List) betaMap.get("fittingParaToUi"); } - //根据qc文件名称-用户名-gamma的方式获取gamma的内容 + // 根据qc文件名称-用户名-gamma的方式获取gamma的内容 Map gammaMap = cache.getIfPresent(qcFileName + "-" + userName + "-gamma"); List gammaList = new LinkedList<>(); List gammaFittingPara = new LinkedList<>(); List gammaFittingParaToUi = new LinkedList<>(); if (CollectionUtils.isNotEmpty(gammaMap)) { - gammaList = (List)gammaMap.get("Series"); + gammaList = (List) gammaMap.get("Series"); gammaFittingPara = (List) gammaMap.get("fittingPara"); gammaFittingParaToUi = (List) gammaMap.get("fittingParaToUi"); } @@ -3426,9 +3430,9 @@ public class SpectrumAnalysisServiceImpl implements ISpectrumAnalysisService { gasTmp = phdFileUtil.analyzeFile(gasFilePath, gasFileName); detTmp = phdFileUtil.analyzeFile(detFilePath, detFileName); BgAnalyseResult analyseResult = EnergySpectrumHandler.bgReAnalyse(sampleTmp.getAbsolutePath(), gasTmp.getAbsolutePath(), detTmp.getAbsolutePath(), spectrum_group.BgCalPara); - //存入重新分析的参数 + // 存入重新分析的参数 map.put("reAnalyseParam", spectrum_group.BgCalPara); - //存入xeData数据 + // 存入xeData数据 List xeResultsSpectrumList = new LinkedList<>(); GardsXeResultsSpectrum xe131m = new GardsXeResultsSpectrum(); xe131m.setNuclideName(XeNuclideName.XE_131m.getType()); @@ -3463,10 +3467,10 @@ public class SpectrumAnalysisServiceImpl implements ISpectrumAnalysisService { xe135.setNidFlag(analyseResult.XE_135_NID_FLAG); xeResultsSpectrumList.add(xe135); xeMap.put("XeData", xeResultsSpectrumList); - //新计算得到的边界值 + // 新计算得到的边界值 if (CollectionUtils.isNotEmpty(analyseResult.S_ROI_B_Boundary_start)) { List boundaryList = new LinkedList<>(); - for (int i=0; i boundaryList = new LinkedList<>(); - for (int i=0; i boundaryList = new LinkedList<>(); - for (int i=0; i map = new HashMap<>(); - //拼接ftp上传临时文件路径 + // 拼接ftp上传临时文件路径 String path = StringPool.SLASH + spectrumPathProperties.getUploadPath() + StringPool.SLASH + userName; - //声明一个实体类获取数据库中文件路径 + // 声明一个实体类获取数据库中文件路径 SpectrumFileRecord dbSpectrumFilePath = new SpectrumFileRecord(); Integer analysisID = null; String samplePath = ""; @@ -3550,12 +3554,12 @@ public class SpectrumAnalysisServiceImpl implements ISpectrumAnalysisService { File detTmp = null; BgCalibratePara BgCalPara = null; try { - //如果sampleId不为空 说明数据来源数据库 查询出对应的文件路径 - if (Objects.nonNull(sampleId) && StringUtils.isNotBlank(dbName)){ - if (dbName.equalsIgnoreCase("auto")){ + // 如果sampleId不为空 说明数据来源数据库 查询出对应的文件路径 + if (Objects.nonNull(sampleId) && StringUtils.isNotBlank(dbName)) { + if (dbName.equalsIgnoreCase("auto")) { dbName = "RNAUTO"; analysisID = spectrumAnalysisMapper.getAnalysisID(dbName, sampleId, "RNAUTO"); - } else if (dbName.equalsIgnoreCase("man")){ + } else if (dbName.equalsIgnoreCase("man")) { dbName = "RNMAN"; analysisID = spectrumAnalysisMapper.getAnalysisID(dbName, sampleId, userName); } @@ -3567,18 +3571,18 @@ public class SpectrumAnalysisServiceImpl implements ISpectrumAnalysisService { String collectStartStr = DateUtils.formatDate(sampleData.getCollectStart(), "yyyy/MM/dd HH:mm:ss"); String dbQcFilePath = spectrumAnalysisMapper.getQCFilePath(sampleData.getSiteDetCode(), collectStartStr); if (StringUtils.isNotBlank(dbQcFilePath)) { - qcPath=StringPool.SLASH + spectrumPathProperties.getSaveFilePath() + StringPool.SLASH + dbQcFilePath.substring(0, dbQcFilePath.lastIndexOf(StringPool.SLASH)); + qcPath = StringPool.SLASH + spectrumPathProperties.getSaveFilePath() + StringPool.SLASH + dbQcFilePath.substring(0, dbQcFilePath.lastIndexOf(StringPool.SLASH)); } - } else if ( (Objects.isNull(sampleId) && StringUtils.isNotBlank(dbName)) || (Objects.nonNull(sampleId) && StringUtils.isBlank(dbName)) ){ + } else if ((Objects.isNull(sampleId) && StringUtils.isNotBlank(dbName)) || (Objects.nonNull(sampleId) && StringUtils.isBlank(dbName))) { result.error500("Data load From DB need to pass in sampleId and dbName"); return result; - } else {//如果sampleId,dbNamed都为空 就指向ftp上传文件临时路径 + } else {// 如果sampleId,dbNamed都为空 就指向ftp上传文件临时路径 samplePath = path; gasPath = path; detPath = path; qcPath = path; } - //根据文件路径 文件名称获取对应的临时文件 + // 根据文件路径 文件名称获取对应的临时文件 sampleTmp = phdFileUtil.analyzeFile(samplePath, sampleFileName); gasTmp = phdFileUtil.analyzeFile(gasPath, gasFileName); detTmp = phdFileUtil.analyzeFile(detPath, detFileName); @@ -3587,33 +3591,33 @@ public class SpectrumAnalysisServiceImpl implements ISpectrumAnalysisService { return result; } map.put("sampleFileName", sampleFileName); - //获取本地缓存的数据信息 + // 获取本地缓存的数据信息 Cache> cache = betaCache.getBetaCache(); - //根据qc文件名称-用户名-beta的方式获取beta的内容 + // 根据qc文件名称-用户名-beta的方式获取beta的内容 Map betaMap = cache.getIfPresent(qcFileName + "-" + userName + "-beta"); List betaList = new LinkedList<>(); List betaFittingParaToUi = new LinkedList<>(); if (CollectionUtils.isNotEmpty(betaMap)) { - betaList = (List)betaMap.get("Series"); + betaList = (List) betaMap.get("Series"); betaFittingParaToUi = (List) betaMap.get("fittingParaToUi"); } - //根据qc文件名称-用户名-gamma的方式获取gamma的内容 + // 根据qc文件名称-用户名-gamma的方式获取gamma的内容 Map gammaMap = cache.getIfPresent(qcFileName + "-" + userName + "-gamma"); List gammaList = new LinkedList<>(); List gammaFittingParaToUi = new LinkedList<>(); if (CollectionUtils.isNotEmpty(gammaMap)) { - gammaList = (List)gammaMap.get("Series"); + gammaList = (List) gammaMap.get("Series"); gammaFittingParaToUi = (List) gammaMap.get("fittingParaToUi"); } - //根据key获取重新分析的参数 + // 根据key获取重新分析的参数 Map reAnalyseParam = cache.getIfPresent(sampleFileName + "-" + userName + "-reAnalyseParam"); - //判断重新分析的参数信息是否为空 + // 判断重新分析的参数信息是否为空 if (CollectionUtils.isNotEmpty(reAnalyseParam)) { BgCalPara = (BgCalibratePara) reAnalyseParam.get("reAnalyseParam"); } - //调用分析方法 + // 调用分析方法 phdFileUtil.analyzeSpectrum(sampleTmp, gasTmp, detTmp, BgCalPara, map); - //分析qc文件信息并重新计算qc边界值 + // 分析qc文件信息并重新计算qc边界值 if (StringUtils.isNotBlank(qcPath)) { EnergySpectrumStruct struct = phdFileUtil.analyzeFileSourceData(qcPath, qcFileName); if (Objects.nonNull(struct)) { @@ -3622,14 +3626,14 @@ public class SpectrumAnalysisServiceImpl implements ISpectrumAnalysisService { } if (CollectionUtils.isNotEmpty(map)) { List xeDataList = (List) map.get("XeData"); - if (CollectionUtils.isNotEmpty(xeDataList)){ - for (GardsXeResultsSpectrum xeData:xeDataList) { + if (CollectionUtils.isNotEmpty(xeDataList)) { + for (GardsXeResultsSpectrum xeData : xeDataList) { Double conc = xeData.getConc(); Double mdc = xeData.getMdc(); - if (conc < 0){ + if (conc < 0) { xeData.setColor("red"); xeData.setNidFlag(0); - } else if (0 mdc) { @@ -3638,7 +3642,7 @@ public class SpectrumAnalysisServiceImpl implements ISpectrumAnalysisService { } } } - //更新分析后的缓存信息 + // 更新分析后的缓存信息 cache.put(sampleFileName + "-" + userName + "-xeData", map); betaCache.setBetaCache(cache); map.put("bProcessed", true); @@ -3672,9 +3676,9 @@ public class SpectrumAnalysisServiceImpl implements ISpectrumAnalysisService { File detTmp = null; try { if (CollectionUtils.isNotEmpty(sampleFileNames)) { - for (int i=0; i map = new HashMap<>(); - //声明一个实体类获取数据库中文件路径 + // 声明一个实体类获取数据库中文件路径 SpectrumFileRecord dbSpectrumFilePath = new SpectrumFileRecord(); Integer analysisID = null; - //如果sampleId不为空 说明数据来源数据库 查询出对应的文件路径 - if (Objects.nonNull(sampleId) && StringUtils.isNotBlank(dbName)){ - if (dbName.equalsIgnoreCase("auto")){ + // 如果sampleId不为空 说明数据来源数据库 查询出对应的文件路径 + if (Objects.nonNull(sampleId) && StringUtils.isNotBlank(dbName)) { + if (dbName.equalsIgnoreCase("auto")) { dbName = "RNAUTO"; analysisID = spectrumAnalysisMapper.getAnalysisID(dbName, sampleId, "RNAUTO"); - } else if (dbName.equalsIgnoreCase("man")){ + } else if (dbName.equalsIgnoreCase("man")) { dbName = "RNMAN"; analysisID = spectrumAnalysisMapper.getAnalysisID(dbName, sampleId, userName); } - dbSpectrumFilePath = spectrumAnalysisMapper.getDBSpectrumFilePath(dbName, sampleId,analysisID); - if (StringUtils.isNotBlank(dbSpectrumFilePath.getSampleFilePath())){ + dbSpectrumFilePath = spectrumAnalysisMapper.getDBSpectrumFilePath(dbName, sampleId, analysisID); + if (StringUtils.isNotBlank(dbSpectrumFilePath.getSampleFilePath())) { samplePath = StringPool.SLASH + spectrumPathProperties.getSaveFilePath() + StringPool.SLASH + dbSpectrumFilePath.getSampleFilePath().substring(0, dbSpectrumFilePath.getSampleFilePath().lastIndexOf(StringPool.SLASH)); } if (StringUtils.isNotBlank(dbSpectrumFilePath.getGasBgFilePath())) { @@ -3713,49 +3717,49 @@ public class SpectrumAnalysisServiceImpl implements ISpectrumAnalysisService { String collectStartStr = DateUtils.formatDate(sampleData.getCollectStart(), "yyyy/MM/dd HH:mm:ss"); String dbQcFilePath = spectrumAnalysisMapper.getQCFilePath(sampleData.getSiteDetCode(), collectStartStr); if (StringUtils.isNotBlank(dbQcFilePath)) { - qcPath=StringPool.SLASH + spectrumPathProperties.getSaveFilePath() + StringPool.SLASH + dbQcFilePath.substring(0, dbQcFilePath.lastIndexOf(StringPool.SLASH)); + qcPath = StringPool.SLASH + spectrumPathProperties.getSaveFilePath() + StringPool.SLASH + dbQcFilePath.substring(0, dbQcFilePath.lastIndexOf(StringPool.SLASH)); } - } else if ( (Objects.isNull(sampleId) && StringUtils.isNotBlank(dbName)) || (Objects.nonNull(sampleId) && StringUtils.isBlank(dbName)) ){ + } else if ((Objects.isNull(sampleId) && StringUtils.isNotBlank(dbName)) || (Objects.nonNull(sampleId) && StringUtils.isBlank(dbName))) { result.error500("Data load From DB need to pass in sampleId and dbName"); return result; - } else {//如果sampleId,dbNamed都为空 就指向ftp上传文件临时路径 + } else {// 如果sampleId,dbNamed都为空 就指向ftp上传文件临时路径 samplePath = path; gasPath = path; detPath = path; qcPath = path; } - //根据文件路径 文件名称获取对应的临时文件 + // 根据文件路径 文件名称获取对应的临时文件 sampleTmp = phdFileUtil.analyzeFile(samplePath, sampleFileName); gasTmp = phdFileUtil.analyzeFile(gasPath, gasFileName); detTmp = phdFileUtil.analyzeFile(detPath, detFileName); map.put("sampleFileName", sampleFileName); - //获取本地缓存的数据信息 + // 获取本地缓存的数据信息 Cache> cache = betaCache.getBetaCache(); - //根据qc文件名称-用户名-beta的方式获取beta的内容 + // 根据qc文件名称-用户名-beta的方式获取beta的内容 Map betaMap = cache.getIfPresent(qcFileName + "-" + userName + "-beta"); List betaList = new LinkedList<>(); List betaFittingParaToUi = new LinkedList<>(); if (CollectionUtils.isNotEmpty(betaMap)) { - betaList = (List)betaMap.get("Series"); + betaList = (List) betaMap.get("Series"); betaFittingParaToUi = (List) betaMap.get("fittingParaToUi"); } - //根据qc文件名称-用户名-gamma的方式获取gamma的内容 + // 根据qc文件名称-用户名-gamma的方式获取gamma的内容 Map gammaMap = cache.getIfPresent(qcFileName + "-" + userName + "-gamma"); List gammaList = new LinkedList<>(); List gammaFittingParaToUi = new LinkedList<>(); if (CollectionUtils.isNotEmpty(gammaMap)) { - gammaList = (List)gammaMap.get("Series"); + gammaList = (List) gammaMap.get("Series"); gammaFittingParaToUi = (List) gammaMap.get("fittingParaToUi"); } - //根据key获取重新分析的参数 + // 根据key获取重新分析的参数 Map reAnalyseParam = cache.getIfPresent(sampleFileName + "-" + userName + "-reAnalyseParam"); - //判断重新分析的参数信息是否为空 + // 判断重新分析的参数信息是否为空 if (CollectionUtils.isNotEmpty(reAnalyseParam)) { BgCalPara = (BgCalibratePara) reAnalyseParam.get("reAnalyseParam"); } - //调用分析方法 + // 调用分析方法 phdFileUtil.analyzeSpectrum(sampleTmp, gasTmp, detTmp, BgCalPara, map); - //分析qc文件信息并重新计算qc边界值 + // 分析qc文件信息并重新计算qc边界值 if (StringUtils.isNotBlank(qcPath)) { EnergySpectrumStruct struct = phdFileUtil.analyzeFileSourceData(qcPath, qcFileName); if (Objects.nonNull(struct)) { @@ -3763,14 +3767,14 @@ public class SpectrumAnalysisServiceImpl implements ISpectrumAnalysisService { } } List xeDataList = (List) map.get("XeData"); - if (CollectionUtils.isNotEmpty(xeDataList)){ - for (GardsXeResultsSpectrum xeData:xeDataList) { + if (CollectionUtils.isNotEmpty(xeDataList)) { + for (GardsXeResultsSpectrum xeData : xeDataList) { Double conc = xeData.getConc(); Double mdc = xeData.getMdc(); - if (conc < 0){ + if (conc < 0) { xeData.setColor("red"); xeData.setNidFlag(0); - } else if (0 mdc) { @@ -3779,7 +3783,7 @@ public class SpectrumAnalysisServiceImpl implements ISpectrumAnalysisService { } } } - //更新分析后的缓存信息 + // 更新分析后的缓存信息 cache.put(sampleFileName + "-" + userName + "-xeData", map); betaCache.setBetaCache(cache); map.put("bProcessed", true); @@ -3809,29 +3813,29 @@ public class SpectrumAnalysisServiceImpl implements ISpectrumAnalysisService { @Transactional public Result saveToDB(BgDataAnlyseResultIn anlyseResultIn, HttpServletRequest request) { Result result = new Result(); - //根据请求体获取用户名 + // 根据请求体获取用户名 String userName = JwtUtil.getUserNameByToken(request); - //用户名赋值到 分析员 + // 用户名赋值到 分析员 anlyseResultIn.setUserName(userName); - //从本地缓存获取beta gamma的数组 + // 从本地缓存获取beta gamma的数组 Cache> cache = betaCache.getBetaCache(); - //根据qc文件名称-用户名-beta的方式获取beta的内容 + // 根据qc文件名称-用户名-beta的方式获取beta的内容 Map betaMap = cache.getIfPresent(anlyseResultIn.getQcFileName() + "-" + userName + "-beta"); List betaList = new LinkedList<>(); List betaFittingPara = new LinkedList<>(); if (CollectionUtils.isNotEmpty(betaMap)) { - betaList = (List)betaMap.get("Series"); + betaList = (List) betaMap.get("Series"); betaFittingPara = (List) betaMap.get("fittingPara"); } - //根据qc文件名称-用户名-gamma的方式获取gamma的内容 + // 根据qc文件名称-用户名-gamma的方式获取gamma的内容 Map gammaMap = cache.getIfPresent(anlyseResultIn.getQcFileName() + "-" + userName + "-gamma"); List gammaList = new LinkedList<>(); List gammaFittingPara = new LinkedList<>(); if (CollectionUtils.isNotEmpty(gammaMap)) { - gammaList = (List)gammaMap.get("Series"); + gammaList = (List) gammaMap.get("Series"); gammaFittingPara = (List) gammaMap.get("fittingPara"); } - //根据qc文件名称-用户名-reAnalyseParam获取重新分析用到的参数 + // 根据qc文件名称-用户名-reAnalyseParam获取重新分析用到的参数 Map reAnalyseParam = cache.getIfPresent(anlyseResultIn.getSampleFileName() + "-" + userName + "-reAnalyseParam"); BgCalibratePara BgCalPara = null; if (CollectionUtils.isNotEmpty(reAnalyseParam)) { @@ -3861,21 +3865,21 @@ public class SpectrumAnalysisServiceImpl implements ISpectrumAnalysisService { anlyseResultIn.setBBetaEnergyValidDet(bBetaEnergyValidDet); } } - //获取当前时间作为人工分析开始时间 + // 获取当前时间作为人工分析开始时间 String beginDate = DateUtils.formatDate(new Date(), "yyyy-MM-dd HH:mm:ss"); - //获取当前时间作为人工分析结束时间 + // 获取当前时间作为人工分析结束时间 String endDate = DateUtils.formatDate(new Date(), "yyyy-MM-dd HH:mm:ss"); - //判断当前分析员是否有过排班任务 + // 判断当前分析员是否有过排班任务 Integer stationId = spectrumAnalysisMapper.getStationId(anlyseResultIn.getStationName()); - //判断当前分析员是否有过当前台站的排班任务 + // 判断当前分析员是否有过当前台站的排班任务 boolean bAnalysisResultWriteAuthority = userTaskUtil.CheckUserWriteAuthorityForStation(anlyseResultIn.getUserName(), stationId); - if ( !bAnalysisResultWriteAuthority ){ + if (!bAnalysisResultWriteAuthority) { result.error500("This user has no right to store the results of the analysis to the database."); return result; } - //获取ROI Limit数据 + // 获取ROI Limit数据 // getROILimit(anlyseResultIn); - //根据sample文件名称模糊查询sampleId + // 根据sample文件名称模糊查询sampleId if (StringUtils.isNotBlank(anlyseResultIn.getDbName())) { Integer sampleId = spectrumAnalysisMapper.getSampleIdLikeFileName(anlyseResultIn.getSampleFileName()); Integer analysisID = null; @@ -3888,7 +3892,7 @@ public class SpectrumAnalysisServiceImpl implements ISpectrumAnalysisService { } if (Objects.nonNull(sampleId)) { SpectrumFileRecord dbSpectrumFilePath = spectrumAnalysisMapper.getDBSpectrumFilePath(anlyseResultIn.getDbName(), sampleId, analysisID); - if (StringUtils.isNotBlank(dbSpectrumFilePath.getSampleFilePath())){ + if (StringUtils.isNotBlank(dbSpectrumFilePath.getSampleFilePath())) { anlyseResultIn.setSampleFilePath(StringPool.SLASH + spectrumPathProperties.getSaveFilePath() + StringPool.SLASH + dbSpectrumFilePath.getSampleFilePath().substring(0, dbSpectrumFilePath.getSampleFilePath().lastIndexOf(StringPool.SLASH))); } if (StringUtils.isNotBlank(dbSpectrumFilePath.getGasBgFilePath())) { @@ -3897,7 +3901,7 @@ public class SpectrumAnalysisServiceImpl implements ISpectrumAnalysisService { if (StringUtils.isNotBlank(dbSpectrumFilePath.getDetBgFilePath())) { anlyseResultIn.setDetFilePath(StringPool.SLASH + spectrumPathProperties.getSaveFilePath() + StringPool.SLASH + dbSpectrumFilePath.getDetBgFilePath().substring(0, dbSpectrumFilePath.getDetBgFilePath().lastIndexOf(StringPool.SLASH))); } - if ( Objects.nonNull(dbSpectrumFilePath.getCollectStart()) && StringUtils.isNotBlank(dbSpectrumFilePath.getSiteDetCode()) ) { + if (Objects.nonNull(dbSpectrumFilePath.getCollectStart()) && StringUtils.isNotBlank(dbSpectrumFilePath.getSiteDetCode())) { String collectStartStr = DateUtils.formatDate(dbSpectrumFilePath.getCollectStart(), "yyyy/MM/dd HH:mm:ss"); String qcFilePath = spectrumAnalysisMapper.getQCFilePath(dbSpectrumFilePath.getSiteDetCode(), collectStartStr); if (StringUtils.isNotBlank(qcFilePath)) { @@ -3912,41 +3916,41 @@ public class SpectrumAnalysisServiceImpl implements ISpectrumAnalysisService { anlyseResultIn.setQcFilePath(StringPool.SLASH + spectrumPathProperties.getUploadPath() + StringPool.SLASH + userName); } - //处理数据 获取对应的channel/energy值 + // 处理数据 获取对应的channel/energy值 getChannelAndEnergy(anlyseResultIn, betaList, gammaList); - //分析文件内容 + // 分析文件内容 analyzePHDFile(anlyseResultIn, BgCalPara, betaFittingPara, gammaFittingPara); - //处理文件名称 + // 处理文件名称 String sampleFilePathName = phdFileUtil.NameStandardBy(anlyseResultIn.getSampleFilePath(), anlyseResultIn.getSampleFileName()); String gasFilePathName = phdFileUtil.NameStandardBy(anlyseResultIn.getGasFilePath(), anlyseResultIn.getGasFileName()); String detFilePathName = phdFileUtil.NameStandardBy(anlyseResultIn.getDetFilePath(), anlyseResultIn.getDetFileName()); String qcFilePathName = phdFileUtil.NameStandardBy(anlyseResultIn.getQcFilePath(), anlyseResultIn.getQcFileName()); - //判断文件是否存储过 如果没有则解析文件并进行存储 - if ( !OriginalDataStore(anlyseResultIn.getGasFilePath(), anlyseResultIn.getGasFileName(), gasFilePathName, anlyseResultIn.getUserName()) ){ + // 判断文件是否存储过 如果没有则解析文件并进行存储 + if (!OriginalDataStore(anlyseResultIn.getGasFilePath(), anlyseResultIn.getGasFileName(), gasFilePathName, anlyseResultIn.getUserName())) { result.error500("gasFile save failed"); return result; } - if ( !OriginalDataStore(anlyseResultIn.getDetFilePath(), anlyseResultIn.getGasFileName(), detFilePathName, anlyseResultIn.getUserName()) ){ + if (!OriginalDataStore(anlyseResultIn.getDetFilePath(), anlyseResultIn.getGasFileName(), detFilePathName, anlyseResultIn.getUserName())) { result.error500("detFile save failed"); return result; } if (StringUtils.isNotBlank(anlyseResultIn.getQcFileName())) { - if ( !OriginalDataStore(anlyseResultIn.getQcFilePath(), anlyseResultIn.getQcFileName(), qcFilePathName, anlyseResultIn.getUserName()) ){ + if (!OriginalDataStore(anlyseResultIn.getQcFilePath(), anlyseResultIn.getQcFileName(), qcFilePathName, anlyseResultIn.getUserName())) { result.error500("qcFile save failed"); return result; } } - if ( !OriginalDataStore(anlyseResultIn.getSampleFilePath(), anlyseResultIn.getSampleFileName(), sampleFilePathName, anlyseResultIn.getUserName()) ){ + if (!OriginalDataStore(anlyseResultIn.getSampleFilePath(), anlyseResultIn.getSampleFileName(), sampleFilePathName, anlyseResultIn.getUserName())) { result.error500("sampleFile save failed"); return result; } - //判断当前分析员是否有过历史分析当前文件 + // 判断当前分析员是否有过历史分析当前文件 Integer isExist = spectrumAnalysisMapper.SampleIsExist(sampleFilePathName, anlyseResultIn.getUserName()); - //根据文件名称查询对应的sampleId--- sampleFile gasFile detFile + // 根据文件名称查询对应的sampleId--- sampleFile gasFile detFile Integer sampleId = spectrumAnalysisMapper.getSampleId(sampleFilePathName); Integer gasId = spectrumAnalysisMapper.getSampleId(gasFilePathName); Integer detId = spectrumAnalysisMapper.getSampleId(detFilePathName); - //如果分析过就修改原记录--GARDS_ANALYSES + // 如果分析过就修改原记录--GARDS_ANALYSES GardsAnalysesSpectrum gardsAnalyses = new GardsAnalysesSpectrum(); gardsAnalyses.setSampleId(sampleId); gardsAnalyses.setAnalysisBeginStr(beginDate); @@ -3961,96 +3965,96 @@ public class SpectrumAnalysisServiceImpl implements ISpectrumAnalysisService { gardsAnalyses.setUsedgasphdId(gasId); gardsAnalyses.setUseddetphdId(detId); gardsAnalyses.setAnalyst(anlyseResultIn.getUserName()); - if (Objects.nonNull(isExist)){ + if (Objects.nonNull(isExist)) { spectrumAnalysisMapper.updateGardsAnalyses(gardsAnalyses); - }else {//如果没有分析过就新增--GARDS_ANALYSES + } else {// 如果没有分析过就新增--GARDS_ANALYSES spectrumAnalysisMapper.insertGardsAnalyses(gardsAnalyses); } - //查询analysisId根据sampleId 分析员名称--GARDS_ANALYSES + // 查询analysisId根据sampleId 分析员名称--GARDS_ANALYSES GardsAnalysesSpectrum analysis = spectrumAnalysisMapper.getAnalysis("RNMAN.GARDS_ANALYSES", sampleId, userName); Integer idAnalysis = analysis.getIdAnalysis(); - //获取sample,gas,det三个文件分别对应的beta,gamma数据集合 + // 获取sample,gas,det三个文件分别对应的beta,gamma数据集合 List calibrationPairsList = getCalibrationPairs(anlyseResultIn, sampleId, gasId, detId, idAnalysis); - //如果分析过--删除GARDS_CALIBRATION_PAIRS表数据 新增GARDS_CALIBRATION_PAIRS表数据 - if (Objects.nonNull(isExist)){ - //根据idAnalysis删除GARDS_CALIBRATION_PAIRS表数据 + // 如果分析过--删除GARDS_CALIBRATION_PAIRS表数据 新增GARDS_CALIBRATION_PAIRS表数据 + if (Objects.nonNull(isExist)) { + // 根据idAnalysis删除GARDS_CALIBRATION_PAIRS表数据 spectrumAnalysisMapper.deleteCalibrationPairs(idAnalysis); - //遍历新增GARDS_CALIBRATION_PAIRS表数据 - for (GardsCalibrationPairs calibrationPairs:calibrationPairsList) { + // 遍历新增GARDS_CALIBRATION_PAIRS表数据 + for (GardsCalibrationPairs calibrationPairs : calibrationPairsList) { spectrumAnalysisMapper.insertCalibrationPairs(calibrationPairs); } - }else {//如果没有分析过--新增GARDS_CALIBRATION_PAIRS表数据 - //遍历新增GARDS_CALIBRATION_PAIRS表数据 - for (GardsCalibrationPairs calibrationPairs:calibrationPairsList) { + } else {// 如果没有分析过--新增GARDS_CALIBRATION_PAIRS表数据 + // 遍历新增GARDS_CALIBRATION_PAIRS表数据 + for (GardsCalibrationPairs calibrationPairs : calibrationPairsList) { spectrumAnalysisMapper.insertCalibrationPairs(calibrationPairs); } } - //gards_calibration 数据表 + // gards_calibration 数据表 List calibrationList = getCalibration(anlyseResultIn, sampleId, gasId, detId, idAnalysis); - //判断是否分析过 - if (Objects.nonNull(isExist)) { - //删除Gards_Calibration表数据 + // 判断是否分析过 + if (Objects.nonNull(isExist)) { + // 删除Gards_Calibration表数据 spectrumAnalysisMapper.deleteCalibration(idAnalysis); - //新增Gards_Calibration表数据数据 - for (GardsCalibration calibration:calibrationList) { + // 新增Gards_Calibration表数据数据 + for (GardsCalibration calibration : calibrationList) { spectrumAnalysisMapper.insertCalibration(calibration); } } else { - //新增Gards_Calibration表数据数据 - for (GardsCalibration calibration:calibrationList) { + // 新增Gards_Calibration表数据数据 + for (GardsCalibration calibration : calibrationList) { spectrumAnalysisMapper.insertCalibration(calibration); } } - //gards_roi_channels数据表 + // gards_roi_channels数据表 List roiChannelsList = new LinkedList<>(); getROIChannel(sampleId, idAnalysis, anlyseResultIn.getRoiChannelsSpectrumList(), roiChannelsList, DataTypeAbbr.SAMPLEPHD.getType()); getROIChannel(gasId, idAnalysis, anlyseResultIn.getRoiChannelsSpectrumList(), roiChannelsList, DataTypeAbbr.GASBKPHD.getType()); getROIChannel(detId, idAnalysis, anlyseResultIn.getRoiChannelsSpectrumList(), roiChannelsList, DataTypeAbbr.DETBKPHD.getType()); - //如果分析过数据 - if (Objects.nonNull(isExist)){ - //删除gards_roi_channels数据表数据 + // 如果分析过数据 + if (Objects.nonNull(isExist)) { + // 删除gards_roi_channels数据表数据 spectrumAnalysisMapper.deleteROIChannels(idAnalysis); - //新增gards_roi_channels数据表数据 - for (GardsRoiChannels roiChannels:roiChannelsList) { + // 新增gards_roi_channels数据表数据 + for (GardsRoiChannels roiChannels : roiChannelsList) { spectrumAnalysisMapper.insertROIChannels(roiChannels); } - }else {//没有分析过 - //新增gards_roi_channels数据表数据 - for (GardsRoiChannels roiChannels:roiChannelsList) { + } else {// 没有分析过 + // 新增gards_roi_channels数据表数据 + for (GardsRoiChannels roiChannels : roiChannelsList) { spectrumAnalysisMapper.insertROIChannels(roiChannels); } } - //gards_Xe_results数据表 + // gards_Xe_results数据表 List xeResultsList = getXeResults(anlyseResultIn, sampleId, idAnalysis); - if(Objects.nonNull(isExist)) { - //删除gards_Xe_results数据表数据 + if (Objects.nonNull(isExist)) { + // 删除gards_Xe_results数据表数据 spectrumAnalysisMapper.deleteXeResult(idAnalysis); - //新增gards_Xe_results数据表数据 - for (GardsXeResults xeResults:xeResultsList) { + // 新增gards_Xe_results数据表数据 + for (GardsXeResults xeResults : xeResultsList) { spectrumAnalysisMapper.insertXeResult(xeResults); } } else { - //新增gards_Xe_results数据表数据 - for (GardsXeResults xeResults:xeResultsList) { + // 新增gards_Xe_results数据表数据 + for (GardsXeResults xeResults : xeResultsList) { spectrumAnalysisMapper.insertXeResult(xeResults); } } - //gards_roi_results数据表 + // gards_roi_results数据表 List roiResultsSpectrumList = getROIResult(anlyseResultIn.getRoiResultsSpectrumList(), sampleId, idAnalysis); - if(Objects.nonNull(isExist)) { - //删除gards_roi_results数据表数据 + if (Objects.nonNull(isExist)) { + // 删除gards_roi_results数据表数据 spectrumAnalysisMapper.deleteROIResults(idAnalysis); - //新增gards_roi_results数据表数据 - for (GardsRoiResults roiResults:roiResultsSpectrumList) { + // 新增gards_roi_results数据表数据 + for (GardsRoiResults roiResults : roiResultsSpectrumList) { spectrumAnalysisMapper.insertROIResults(roiResults); } } else { - //新增gards_roi_results数据表数据 - for (GardsRoiResults roiResults:roiResultsSpectrumList) { + // 新增gards_roi_results数据表数据 + for (GardsRoiResults roiResults : roiResultsSpectrumList) { spectrumAnalysisMapper.insertROIResults(roiResults); } } - //上传本次文件到ftp人工交互存储路径下 + // 上传本次文件到ftp人工交互存储路径下 File sampleTmp = null; File gasTmp = null; File detTmp = null; @@ -4073,37 +4077,37 @@ public class SpectrumAnalysisServiceImpl implements ISpectrumAnalysisService { } /** - * 获取需要导出的数据 (saveToHtml|saveToTxt|saveToExcel) + * 获取需要导出的数据 (saveToHtml|saveToTxt|saveToExcel) */ - private BgDataAnlyseResultIn getSaveData(BgDataAnlyseResultIn anlyseResultIn){ - //根据请求体获取用户名 - LoginUser user= (LoginUser) SecurityUtils.getSubject().getPrincipal(); + private BgDataAnlyseResultIn getSaveData(BgDataAnlyseResultIn anlyseResultIn) { + // 根据请求体获取用户名 + LoginUser user = (LoginUser) SecurityUtils.getSubject().getPrincipal(); String userName = user.getUsername(); - //用户名赋值到 分析员 + // 用户名赋值到 分析员 anlyseResultIn.setUserName(userName); - //从本地缓存获取beta gamma的数组 + // 从本地缓存获取beta gamma的数组 Cache> cache = betaCache.getBetaCache(); - //根据qc文件名称-用户名-beta的方式获取beta的内容 + // 根据qc文件名称-用户名-beta的方式获取beta的内容 Map betaMap = cache.getIfPresent(anlyseResultIn.getQcFileName() + "-" + userName + "-beta"); List betaList = new LinkedList<>(); List betaFittingPara = new LinkedList<>(); List betaFittingParaToUi = new LinkedList<>(); if (CollectionUtils.isNotEmpty(betaMap)) { - betaList = (List)betaMap.get("Series"); + betaList = (List) betaMap.get("Series"); betaFittingPara = (List) betaMap.get("fittingPara"); betaFittingParaToUi = (List) betaMap.get("fittingParaToUi"); } - //根据qc文件名称-用户名-gamma的方式获取gamma的内容 + // 根据qc文件名称-用户名-gamma的方式获取gamma的内容 Map gammaMap = cache.getIfPresent(anlyseResultIn.getQcFileName() + "-" + userName + "-gamma"); List gammaList = new LinkedList<>(); List gammaFittingPara = new LinkedList<>(); List gammaFittingParaToUi = new LinkedList<>(); if (CollectionUtils.isNotEmpty(gammaMap)) { - gammaList = (List)gammaMap.get("Series"); + gammaList = (List) gammaMap.get("Series"); gammaFittingPara = (List) gammaMap.get("fittingPara"); gammaFittingParaToUi = (List) gammaMap.get("fittingParaToUi"); } - //根据qc文件名称-用户名-reAnalyseParam获取重新分析用到的参数 + // 根据qc文件名称-用户名-reAnalyseParam获取重新分析用到的参数 Map reAnalyseParam = cache.getIfPresent(anlyseResultIn.getSampleFileName() + "-" + userName + "-reAnalyseParam"); BgCalibratePara BgCalPara = null; if (CollectionUtils.isNotEmpty(reAnalyseParam)) { @@ -4145,7 +4149,7 @@ public class SpectrumAnalysisServiceImpl implements ISpectrumAnalysisService { } if (Objects.nonNull(sampleId)) { SpectrumFileRecord dbSpectrumFilePath = spectrumAnalysisMapper.getDBSpectrumFilePath(anlyseResultIn.getDbName(), sampleId, analysisID); - if (StringUtils.isNotBlank(dbSpectrumFilePath.getSampleFilePath())){ + if (StringUtils.isNotBlank(dbSpectrumFilePath.getSampleFilePath())) { anlyseResultIn.setSampleFilePath(StringPool.SLASH + spectrumPathProperties.getSaveFilePath() + StringPool.SLASH + dbSpectrumFilePath.getSampleFilePath().substring(0, dbSpectrumFilePath.getSampleFilePath().lastIndexOf(StringPool.SLASH))); } if (StringUtils.isNotBlank(dbSpectrumFilePath.getGasBgFilePath())) { @@ -4154,7 +4158,7 @@ public class SpectrumAnalysisServiceImpl implements ISpectrumAnalysisService { if (StringUtils.isNotBlank(dbSpectrumFilePath.getDetBgFilePath())) { anlyseResultIn.setDetFilePath(StringPool.SLASH + spectrumPathProperties.getSaveFilePath() + StringPool.SLASH + dbSpectrumFilePath.getDetBgFilePath().substring(0, dbSpectrumFilePath.getDetBgFilePath().lastIndexOf(StringPool.SLASH))); } - if ( Objects.nonNull(dbSpectrumFilePath.getCollectStart()) && StringUtils.isNotBlank(dbSpectrumFilePath.getSiteDetCode()) ) { + if (Objects.nonNull(dbSpectrumFilePath.getCollectStart()) && StringUtils.isNotBlank(dbSpectrumFilePath.getSiteDetCode())) { String collectStartStr = DateUtils.formatDate(dbSpectrumFilePath.getCollectStart(), "yyyy/MM/dd HH:mm:ss"); String qcFilePath = spectrumAnalysisMapper.getQCFilePath(dbSpectrumFilePath.getSiteDetCode(), collectStartStr); anlyseResultIn.setQcFilePath(StringPool.SLASH + spectrumPathProperties.getSaveFilePath() + StringPool.SLASH + qcFilePath.substring(0, qcFilePath.lastIndexOf(StringPool.SLASH))); @@ -4166,18 +4170,18 @@ public class SpectrumAnalysisServiceImpl implements ISpectrumAnalysisService { anlyseResultIn.setDetFilePath(StringPool.SLASH + spectrumPathProperties.getUploadPath() + StringPool.SLASH + userName); anlyseResultIn.setQcFilePath(StringPool.SLASH + spectrumPathProperties.getUploadPath() + StringPool.SLASH + userName); } - //处理数据 获取对应的channel/energy值 + // 处理数据 获取对应的channel/energy值 getChannelAndEnergy(anlyseResultIn, betaList, gammaList); - //分析文件内容 + // 分析文件内容 analyzeSavePHDFile(anlyseResultIn, BgCalPara, betaFittingPara, gammaFittingPara, betaFittingParaToUi, gammaFittingParaToUi); // 返回需要Save的数据 return anlyseResultIn; } /* - * 对数据进行处理,变成真正可直接导出的数据 - * */ - private Map shiftSaveData(BgDataAnlyseResultIn anlyseResultIn){ + * 对数据进行处理,变成真正可直接导出的数据 + * */ + private Map shiftSaveData(BgDataAnlyseResultIn anlyseResultIn) { Map result = new HashMap<>(); List roiChannels = anlyseResultIn.getRoiChannelsSpectrumList(); List roiResults = anlyseResultIn.getRoiResultsSpectrumList(); @@ -4194,8 +4198,8 @@ public class SpectrumAnalysisServiceImpl implements ISpectrumAnalysisService { BeanUtil.copyProperties(roiChannel, roiChannelsDto); roiChannelsDto.shift(); String dataType = roiChannel.getDataType(); - if (StrUtil.isNotBlank(dataType)){ - switch (dataType){ + if (StrUtil.isNotBlank(dataType)) { + switch (dataType) { case "S": roiChannelsDtosS.add(roiChannelsDto); break; @@ -4247,8 +4251,8 @@ public class SpectrumAnalysisServiceImpl implements ISpectrumAnalysisService { } /* - * 使用模板引擎解析html模板 - * */ + * 使用模板引擎解析html模板 + * */ private String parseHtml(Map variables) { TemplateEngine templateEngine = new SpringTemplateEngine(); ClassLoaderTemplateResolver resolver = new ClassLoaderTemplateResolver(); @@ -4260,9 +4264,9 @@ public class SpectrumAnalysisServiceImpl implements ISpectrumAnalysisService { } /* - * 提取(saveToHtml|saveToTxt|saveToExcel)公共部分 - * */ - private Map exportData(BgDataAnlyseResultIn anlyseResultIn){ + * 提取(saveToHtml|saveToTxt|saveToExcel)公共部分 + * */ + private Map exportData(BgDataAnlyseResultIn anlyseResultIn) { // 解析文件,生成导出数据 Map analyze = shiftSaveData(getSaveData(anlyseResultIn)); // 转换能谱结构数据 @@ -4289,7 +4293,7 @@ public class SpectrumAnalysisServiceImpl implements ISpectrumAnalysisService { writer.write(result); } catch (IOException e) { e.printStackTrace(); - }finally { + } finally { if (ObjectUtil.isNotNull(writer)) writer.close(); } @@ -4303,7 +4307,7 @@ public class SpectrumAnalysisServiceImpl implements ISpectrumAnalysisService { analyze.replaceAll((key, value) -> ObjectUtil.isNull(value) ? "" : value); String export = "SaveToExcel.xls"; String template = SAVETOEXCEL.getName(); - ExportUtil.exportXls(response, template, analyze,export); + ExportUtil.exportXls(response, template, analyze, export); } @Override @@ -4339,40 +4343,42 @@ public class SpectrumAnalysisServiceImpl implements ISpectrumAnalysisService { analyze.put("gasRoi", roiChannels(roiChannelsDtosG)); analyze.putAll(roiResults(roiResultsDtos)); analyze.put("resultSummary", xeResults(xeResultsDtos)); - // 使数据适配导出模板 - String pathPrefix = "excelTemplate/"; - String path = pathPrefix + SAVETOTXT.getName(); - String template = ClassUtil.classPath(path); - List lines = FileUtil.readUtf8Lines(template); - // 正则表达式,匹配${}中的内容 - String regex = "\\$\\{([^}]+)}"; - List newLines = new ArrayList<>(); - List list = ListUtil.toList("sampleNewCalibration","sampleRoi", "detNewCalibration", "detRoi", "gasNewCalibration", "gasRoi","grossRoi", - "netRoi", "concRoi", "resultSummary"); - List skip = ListUtil.toList("${sampleNewCalibration}","${sampleRoi}", "${detNewCalibration}", "${detRoi}", "${gasNewCalibration}", "${gasRoi}", - "${grossRoi}", "${netRoi}", "${concRoi}", "${resultSummary}"); - for (String line : lines) { - List fieldNames = ReUtil.findAllGroup1(regex, line); - if (CollUtil.isEmpty(fieldNames)){ - newLines.add(line); - continue; - } - for (String fieldName : fieldNames) { - Object value = analyze.get(fieldName); - if (CollUtil.contains(list, fieldName)){ - newLines.addAll((List)value); - }else { - String search = "${" + fieldName + "}"; - String replacement = StrUtil.toString(value); - replacement = StrUtil.isBlank(replacement) ? "null" : replacement; - line = StrUtil.replace(line, search, replacement); - } - } - if (!CollUtil.contains(skip, line)) - newLines.add(line); - } PrintWriter writer = null; try { + // 使数据适配导出模板 + String pathPrefix = "excelTemplate/"; + String path = pathPrefix + SAVETOTXT.getName(); + InputStream inputStream = ClassUtil.classPathStream(path); + List lines = IOUtils.readLines(inputStream, "UTF-8"); + + // 正则表达式,匹配${}中的内容 + String regex = "\\$\\{([^}]+)}"; + List newLines = new ArrayList<>(); + List list = ListUtil.toList("sampleNewCalibration", "sampleRoi", "detNewCalibration", "detRoi", "gasNewCalibration", "gasRoi", "grossRoi", + "netRoi", "concRoi", "resultSummary"); + List skip = ListUtil.toList("${sampleNewCalibration}", "${sampleRoi}", "${detNewCalibration}", "${detRoi}", "${gasNewCalibration}", "${gasRoi}", + "${grossRoi}", "${netRoi}", "${concRoi}", "${resultSummary}"); + for (String line : lines) { + List fieldNames = ReUtil.findAllGroup1(regex, line); + if (CollUtil.isEmpty(fieldNames)) { + newLines.add(line); + continue; + } + for (String fieldName : fieldNames) { + Object value = analyze.get(fieldName); + if (CollUtil.contains(list, fieldName)) { + newLines.addAll((List) value); + } else { + String search = "${" + fieldName + "}"; + String replacement = StrUtil.toString(value); + replacement = StrUtil.isBlank(replacement) ? "null" : replacement; + line = StrUtil.replace(line, search, replacement); + } + } + if (!CollUtil.contains(skip, line)) + newLines.add(line); + } + String export = "SaveToTxt.txt"; writer = ExportUtil.streamWriter(response, export); for (String newLine : newLines) { @@ -4380,20 +4386,40 @@ public class SpectrumAnalysisServiceImpl implements ISpectrumAnalysisService { } } catch (IOException e) { e.printStackTrace(); - }finally { + } finally { if (ObjectUtil.isNotNull(writer)) writer.close(); } } - private List calibrations(GardsCalibrationSpectrum gammaCalibrationCE,GardsCalibrationSpectrum gammaCalibrationEC, GardsCalibrationSpectrum betaCalibrationCE, GardsCalibrationSpectrum betaCalibrationEC) { + @Override + public void viewAutomaticAnalysisLog(Integer sampleId, HttpServletResponse response) { + try { + String path = "C:\\Users\\a\\Desktop\\demo.log"; + InputStream inputStream = new FileInputStream(new File(path)); + OutputStream outputStream = ExportUtil.stream(response, "demo.log"); + + // 缓冲区大小 + byte[] buffer = new byte[4096]; + int bytesRead; + + // 将文件输出流写入到输出流中 + while ((bytesRead = inputStream.read(buffer)) != -1) { + outputStream.write(buffer, 0, bytesRead); + } + } catch (IOException e) { + e.printStackTrace(); + } + } + + private List calibrations(GardsCalibrationSpectrum gammaCalibrationCE, GardsCalibrationSpectrum gammaCalibrationEC, GardsCalibrationSpectrum betaCalibrationCE, GardsCalibrationSpectrum betaCalibrationEC) { int min = 79; String space = StrUtil.SPACE; List calibrationStrs = new ArrayList<>(); - String betaCH = " CH(x) = ("+betaCalibrationEC.getCoeff1()+")+("+betaCalibrationEC.getCoeff2()+")*x+("+betaCalibrationEC.getCoeff3()+")x*x"; - String gammaCH = " CH(x) = ("+gammaCalibrationEC.getCoeff1()+")+("+gammaCalibrationEC.getCoeff2()+")*x+("+gammaCalibrationEC.getCoeff3()+")x*x"; - String betaE = " E(x) = ("+betaCalibrationCE.getCoeff1()+")+("+betaCalibrationCE.getCoeff2()+")*x+("+betaCalibrationCE.getCoeff3()+")x*x"; - String gammaE = " E(x) = ("+gammaCalibrationCE.getCoeff1()+")+("+gammaCalibrationCE.getCoeff2()+")*x+("+gammaCalibrationCE.getCoeff3()+")x*x"; + String betaCH = " CH(x) = (" + betaCalibrationEC.getCoeff1() + ")+(" + betaCalibrationEC.getCoeff2() + ")*x+(" + betaCalibrationEC.getCoeff3() + ")x*x"; + String gammaCH = " CH(x) = (" + gammaCalibrationEC.getCoeff1() + ")+(" + gammaCalibrationEC.getCoeff2() + ")*x+(" + gammaCalibrationEC.getCoeff3() + ")x*x"; + String betaE = " E(x) = (" + betaCalibrationCE.getCoeff1() + ")+(" + betaCalibrationCE.getCoeff2() + ")*x+(" + betaCalibrationCE.getCoeff3() + ")x*x"; + String gammaE = " E(x) = (" + gammaCalibrationCE.getCoeff1() + ")+(" + gammaCalibrationCE.getCoeff2() + ")*x+(" + gammaCalibrationCE.getCoeff3() + ")x*x"; String calibrationCHStr = StrUtil.padAfter(betaCH, min, space) + StrUtil.padAfter(gammaCH, min, space); String calibrationEStr = StrUtil.padAfter(betaE, min, space) + StrUtil.padAfter(gammaE, min, space); calibrationStrs.add(calibrationCHStr); @@ -4401,7 +4427,7 @@ public class SpectrumAnalysisServiceImpl implements ISpectrumAnalysisService { return calibrationStrs; } - private List roiChannels(List roiChannels){ + private List roiChannels(List roiChannels) { int min = 49; String space = StrUtil.SPACE; List channelStrs = new ArrayList<>(); @@ -4420,7 +4446,7 @@ public class SpectrumAnalysisServiceImpl implements ISpectrumAnalysisService { return channelStrs; } - private Map roiResults(List roiResults){ + private Map roiResults(List roiResults) { int min1 = 49; int min2 = 22; String space = StrUtil.SPACE; @@ -4428,7 +4454,7 @@ public class SpectrumAnalysisServiceImpl implements ISpectrumAnalysisService { List netStrs = new ArrayList<>(); List concStrs = new ArrayList<>(); Map roiResultsStr = new HashMap<>(); - if (CollUtil.isEmpty(roiResults)){ + if (CollUtil.isEmpty(roiResults)) { roiResultsStr.put("grossRoi", grossStrs); roiResultsStr.put("netRoi", netStrs); roiResultsStr.put("concRoi", concStrs); @@ -4464,7 +4490,7 @@ public class SpectrumAnalysisServiceImpl implements ISpectrumAnalysisService { return roiResultsStr; } - private List xeResults(List xeResults){ + private List xeResults(List xeResults) { int min1 = 49; int min2 = 22; String space = StrUtil.SPACE; @@ -4493,16 +4519,16 @@ public class SpectrumAnalysisServiceImpl implements ISpectrumAnalysisService { File gasTmp = null; File detTmp = null; try { - //获取ftp文件路径下临时文件 + // 获取ftp文件路径下临时文件 sampleTmp = phdFileUtil.analyzeFile(anlyseResultIn.getSampleFilePath(), anlyseResultIn.getSampleFileName()); gasTmp = phdFileUtil.analyzeFile(anlyseResultIn.getGasFilePath(), anlyseResultIn.getGasFileName()); detTmp = phdFileUtil.analyzeFile(anlyseResultIn.getDetFilePath(), anlyseResultIn.getDetFileName()); - //如果勾选了Energy Calibration页面下sample Data + // 如果勾选了Energy Calibration页面下sample Data if (Objects.nonNull(sampleTmp)) { - if(anlyseResultIn.isCheckSample()) { + if (anlyseResultIn.isCheckSample()) { EnergySpectrumStruct sourceData = EnergySpectrumHandler.getSourceData(sampleTmp.getAbsolutePath()); - //没有点击过Energy Calibration页面下Gamma Detector Calibration的fitting按钮 channel/energy数据读取文件 如果点击过数据来源页面 - if (!anlyseResultIn.isBGammaEnergyValidSample()){ + // 没有点击过Energy Calibration页面下Gamma Detector Calibration的fitting按钮 channel/energy数据读取文件 如果点击过数据来源页面 + if (!anlyseResultIn.isBGammaEnergyValidSample()) { anlyseResultIn.setG_channel_sample(sourceData.g_centroid_channel); anlyseResultIn.setG_energy_sample(sourceData.g_energy); } else { @@ -4511,8 +4537,8 @@ public class SpectrumAnalysisServiceImpl implements ISpectrumAnalysisService { List energys = gammaList.stream().map(SeriseData::getY).collect(Collectors.toList()); anlyseResultIn.setG_energy_sample(energys); } - //没有点击过Energy Calibration页面下Beta Detector Calibration的fitting按钮 channel/energy数据读取文件 如果点击过数据来源页面 - if (!anlyseResultIn.isBBetaEnergyValidSample()){ + // 没有点击过Energy Calibration页面下Beta Detector Calibration的fitting按钮 channel/energy数据读取文件 如果点击过数据来源页面 + if (!anlyseResultIn.isBBetaEnergyValidSample()) { anlyseResultIn.setB_channel_sample(sourceData.b_channel); anlyseResultIn.setB_energy_sample(sourceData.b_electron_energy); } else { @@ -4521,7 +4547,7 @@ public class SpectrumAnalysisServiceImpl implements ISpectrumAnalysisService { List energys = betaList.stream().map(SeriseData::getY).collect(Collectors.toList()); anlyseResultIn.setB_energy_sample(energys); } - } else {//如果没有勾选Energy Calibration页面下sample Data + } else {// 如果没有勾选Energy Calibration页面下sample Data EnergySpectrumStruct sourceData = EnergySpectrumHandler.getSourceData(sampleTmp.getAbsolutePath()); anlyseResultIn.setG_channel_sample(sourceData.g_centroid_channel); anlyseResultIn.setG_energy_sample(sourceData.g_energy); @@ -4532,7 +4558,7 @@ public class SpectrumAnalysisServiceImpl implements ISpectrumAnalysisService { if (Objects.nonNull(gasTmp)) { if (anlyseResultIn.isCheckGas()) { EnergySpectrumStruct sourceData = EnergySpectrumHandler.getSourceData(gasTmp.getAbsolutePath()); - if (!anlyseResultIn.isBGammaEnergyValidGas()){ + if (!anlyseResultIn.isBGammaEnergyValidGas()) { anlyseResultIn.setG_channel_gas(sourceData.g_centroid_channel); anlyseResultIn.setG_energy_gas(sourceData.g_energy); } else { @@ -4541,7 +4567,7 @@ public class SpectrumAnalysisServiceImpl implements ISpectrumAnalysisService { List energys = gammaList.stream().map(SeriseData::getY).collect(Collectors.toList()); anlyseResultIn.setG_energy_gas(energys); } - if (!anlyseResultIn.isBBetaEnergyValidGas()){ + if (!anlyseResultIn.isBBetaEnergyValidGas()) { anlyseResultIn.setB_channel_gas(sourceData.b_channel); anlyseResultIn.setB_energy_gas(sourceData.b_electron_energy); } else { @@ -4561,7 +4587,7 @@ public class SpectrumAnalysisServiceImpl implements ISpectrumAnalysisService { if (Objects.nonNull(detTmp)) { if (anlyseResultIn.isCheckDet()) { EnergySpectrumStruct sourceData = EnergySpectrumHandler.getSourceData(detTmp.getAbsolutePath()); - if (!anlyseResultIn.isBGammaEnergyValidDet()){ + if (!anlyseResultIn.isBGammaEnergyValidDet()) { anlyseResultIn.setG_channel_det(sourceData.g_centroid_channel); anlyseResultIn.setG_energy_det(sourceData.g_energy); } else { @@ -4570,7 +4596,7 @@ public class SpectrumAnalysisServiceImpl implements ISpectrumAnalysisService { List energys = gammaList.stream().map(SeriseData::getY).collect(Collectors.toList()); anlyseResultIn.setG_energy_det(energys); } - if (!anlyseResultIn.isBBetaEnergyValidDet()){ + if (!anlyseResultIn.isBBetaEnergyValidDet()) { anlyseResultIn.setB_channel_det(sourceData.b_channel); anlyseResultIn.setB_energy_det(sourceData.b_electron_energy); } else { @@ -4602,18 +4628,18 @@ public class SpectrumAnalysisServiceImpl implements ISpectrumAnalysisService { } } - public void analyzeSavePHDFile(BgDataAnlyseResultIn anlyseResultIn, BgCalibratePara BgCalPara, List betaFittingPara, List gammaFittingPara,List betaFittingParaToUi, List gammaFittingParaToUi) { + public void analyzeSavePHDFile(BgDataAnlyseResultIn anlyseResultIn, BgCalibratePara BgCalPara, List betaFittingPara, List gammaFittingPara, List betaFittingParaToUi, List gammaFittingParaToUi) { File sampleTmp = null; File gasTmp = null; File detTmp = null; try { - //根据文件路径 文件名称获取对应的临时文件 + // 根据文件路径 文件名称获取对应的临时文件 sampleTmp = phdFileUtil.analyzeFile(anlyseResultIn.getSampleFilePath(), anlyseResultIn.getSampleFileName()); gasTmp = phdFileUtil.analyzeFile(anlyseResultIn.getGasFilePath(), anlyseResultIn.getGasFileName()); detTmp = phdFileUtil.analyzeFile(anlyseResultIn.getDetFilePath(), anlyseResultIn.getDetFileName()); - //调用动态库解析文件 - //Gamma Energy Calibration页面 如果点击过fitting使BGammaEnergyValid并且有勾选 - //如果三个sampleData,GasData,DetData数据都是被勾选状态 则需要传递新的参数重新分析 否则不需要改变数据分析当前文件内容 + // 调用动态库解析文件 + // Gamma Energy Calibration页面 如果点击过fitting使BGammaEnergyValid并且有勾选 + // 如果三个sampleData,GasData,DetData数据都是被勾选状态 则需要传递新的参数重新分析 否则不需要改变数据分析当前文件内容 BgAnalyseResult bgAnalyseResult = null; if (Objects.isNull(BgCalPara)) { bgAnalyseResult = EnergySpectrumHandler.bgAnalyse(sampleTmp.getAbsolutePath(), gasTmp.getAbsolutePath(), detTmp.getAbsolutePath()); @@ -4793,30 +4819,30 @@ public class SpectrumAnalysisServiceImpl implements ISpectrumAnalysisService { anlyseResultIn.setBetaCalibrationDEC(betaCalibrationDEC); } List roiChannelsSpectrumList = new LinkedList<>(); - for (int i=0; i roiResultsSpectrumList = new LinkedList<>(); - for (int i=0; ibgAnalyseResult.MDC.get(i)) { + if (bgAnalyseResult.ROI_con_uncer.get(i) > bgAnalyseResult.MDC.get(i)) { roiResults.setNidFlag(1); } else { roiResults.setNidFlag(0); @@ -4869,20 +4895,20 @@ public class SpectrumAnalysisServiceImpl implements ISpectrumAnalysisService { File gasTmp = null; File detTmp = null; try { - //根据文件路径 文件名称获取对应的临时文件 + // 根据文件路径 文件名称获取对应的临时文件 sampleTmp = phdFileUtil.analyzeFile(anlyseResultIn.getSampleFilePath(), anlyseResultIn.getSampleFileName()); gasTmp = phdFileUtil.analyzeFile(anlyseResultIn.getGasFilePath(), anlyseResultIn.getGasFileName()); detTmp = phdFileUtil.analyzeFile(anlyseResultIn.getDetFilePath(), anlyseResultIn.getDetFileName()); - //调用动态库解析文件 - //Gamma Energy Calibration页面 如果点击过fitting使BGammaEnergyValid并且有勾选 - //如果三个sampleData,GasData,DetData数据都是被勾选状态 则需要传递新的参数重新分析 否则不需要改变数据分析当前文件内容 + // 调用动态库解析文件 + // Gamma Energy Calibration页面 如果点击过fitting使BGammaEnergyValid并且有勾选 + // 如果三个sampleData,GasData,DetData数据都是被勾选状态 则需要传递新的参数重新分析 否则不需要改变数据分析当前文件内容 BgAnalyseResult bgAnalyseResult = null; if (Objects.isNull(BgCalPara)) { bgAnalyseResult = EnergySpectrumHandler.bgAnalyse(sampleTmp.getAbsolutePath(), gasTmp.getAbsolutePath(), detTmp.getAbsolutePath()); } else { bgAnalyseResult = EnergySpectrumHandler.bgReAnalyse(sampleTmp.getAbsolutePath(), gasTmp.getAbsolutePath(), detTmp.getAbsolutePath(), BgCalPara); } - //处理XeData的数据 + // 处理XeData的数据 List xeResultsSpectrumList = new LinkedList<>(); GardsXeResults xe131m = new GardsXeResults(); xe131m.setNuclideName(XeNuclideName.XE_131m.getType()); @@ -4917,7 +4943,7 @@ public class SpectrumAnalysisServiceImpl implements ISpectrumAnalysisService { xe135.setNidFlag(anlyseResultIn.getXe135Flag()); xeResultsSpectrumList.add(xe135); anlyseResultIn.setXeData(xeResultsSpectrumList); - //处理GammaCalibration的数据 + // 处理GammaCalibration的数据 List gammaCalibrationSpectrumList = new LinkedList<>(); if (anlyseResultIn.isBGammaEnergyValidSample()) { GardsCalibrationSpectrum gammaCalibrationS = new GardsCalibrationSpectrum(); @@ -4965,7 +4991,7 @@ public class SpectrumAnalysisServiceImpl implements ISpectrumAnalysisService { gammaCalibrationSpectrumList.add(gammaCalibrationD); } anlyseResultIn.setGammaCalibrationSpectrumList(gammaCalibrationSpectrumList); - //处理BetaCalibration数据 + // 处理BetaCalibration数据 List betaCalibrationSpectrumList = new LinkedList<>(); if (anlyseResultIn.isBBetaEnergyValidSample()) { GardsCalibrationSpectrum betaCalibrationS = new GardsCalibrationSpectrum(); @@ -5013,32 +5039,32 @@ public class SpectrumAnalysisServiceImpl implements ISpectrumAnalysisService { betaCalibrationSpectrumList.add(betaCalibrationD); } anlyseResultIn.setBetaCalibrationSpectrumList(betaCalibrationSpectrumList); - //存储roiChannel数据 + // 存储roiChannel数据 List roiChannelsSpectrumList = new LinkedList<>(); - for (int i=0; i roiResultsSpectrumList = new LinkedList<>(); - for (int i=0; ibgAnalyseResult.MDC.get(i)) { + if (bgAnalyseResult.ROI_con_uncer.get(i) > bgAnalyseResult.MDC.get(i)) { roiResults.setNidFlag(1); } else { roiResults.setNidFlag(0); @@ -5089,7 +5115,7 @@ public class SpectrumAnalysisServiceImpl implements ISpectrumAnalysisService { public List getCalibrationPairs(BgDataAnlyseResultIn anlyseResultIn, Integer sampleId, Integer gasId, Integer detId, Integer idAnalysis) { List calibrationPairsList = new LinkedList<>(); - for (int i=0; i< anlyseResultIn.getB_channel_sample().size(); i++){ + for (int i = 0; i < anlyseResultIn.getB_channel_sample().size(); i++) { GardsCalibrationPairs calibrationPairs = new GardsCalibrationPairs(); calibrationPairs.setSampleId(sampleId); calibrationPairs.setIdAnalysis(idAnalysis); @@ -5101,7 +5127,7 @@ public class SpectrumAnalysisServiceImpl implements ISpectrumAnalysisService { calibrationPairs.setYValue(anlyseResultIn.getB_energy_sample().get(i)); calibrationPairsList.add(calibrationPairs); } - for (int i=0; i< anlyseResultIn.getG_channel_sample().size(); i++){ + for (int i = 0; i < anlyseResultIn.getG_channel_sample().size(); i++) { GardsCalibrationPairs calibrationPairs = new GardsCalibrationPairs(); calibrationPairs.setSampleId(sampleId); calibrationPairs.setIdAnalysis(idAnalysis); @@ -5114,7 +5140,7 @@ public class SpectrumAnalysisServiceImpl implements ISpectrumAnalysisService { calibrationPairsList.add(calibrationPairs); } - for (int i=0; i< anlyseResultIn.getB_channel_gas().size(); i++){ + for (int i = 0; i < anlyseResultIn.getB_channel_gas().size(); i++) { GardsCalibrationPairs calibrationPairs = new GardsCalibrationPairs(); calibrationPairs.setSampleId(gasId); calibrationPairs.setIdAnalysis(idAnalysis); @@ -5127,7 +5153,7 @@ public class SpectrumAnalysisServiceImpl implements ISpectrumAnalysisService { calibrationPairsList.add(calibrationPairs); } - for (int i=0; i< anlyseResultIn.getG_channel_gas().size(); i++){ + for (int i = 0; i < anlyseResultIn.getG_channel_gas().size(); i++) { GardsCalibrationPairs calibrationPairs = new GardsCalibrationPairs(); calibrationPairs.setSampleId(gasId); calibrationPairs.setIdAnalysis(idAnalysis); @@ -5140,7 +5166,7 @@ public class SpectrumAnalysisServiceImpl implements ISpectrumAnalysisService { calibrationPairsList.add(calibrationPairs); } - for (int i=0; i< anlyseResultIn.getB_channel_det().size(); i++){ + for (int i = 0; i < anlyseResultIn.getB_channel_det().size(); i++) { GardsCalibrationPairs calibrationPairs = new GardsCalibrationPairs(); calibrationPairs.setSampleId(detId); calibrationPairs.setIdAnalysis(idAnalysis); @@ -5153,7 +5179,7 @@ public class SpectrumAnalysisServiceImpl implements ISpectrumAnalysisService { calibrationPairsList.add(calibrationPairs); } - for (int i=0; i< anlyseResultIn.getG_channel_det().size(); i++){ + for (int i = 0; i < anlyseResultIn.getG_channel_det().size(); i++) { GardsCalibrationPairs calibrationPairs = new GardsCalibrationPairs(); calibrationPairs.setSampleId(detId); calibrationPairs.setIdAnalysis(idAnalysis); @@ -5171,64 +5197,64 @@ public class SpectrumAnalysisServiceImpl implements ISpectrumAnalysisService { public void getROILimit(BgDataAnlyseResultIn anlyseResultIn) { File sampleTmp = null; try { - //获取ftp文件路径下临时文件 + // 获取ftp文件路径下临时文件 sampleTmp = phdFileUtil.analyzeFile(anlyseResultIn.getSampleFilePath(), anlyseResultIn.getSampleFileName()); - if (Objects.nonNull(sampleTmp)){ + if (Objects.nonNull(sampleTmp)) { EnergySpectrumStruct sourceData = EnergySpectrumHandler.getSourceData(sampleTmp.getAbsolutePath()); List poiBX1 = sourceData.POI_B_x1; List poiBX2 = sourceData.POI_B_x2; List poiGY1 = sourceData.POI_G_y1; List poiGY2 = sourceData.POI_G_y2; - //ROI Limit + // ROI Limit List report_limit_roi = new LinkedList<>(); - for(int pos=1;pos<=poiBX1.size();pos++) { + for (int pos = 1; pos <= poiBX1.size(); pos++) { report_limit_roi.add(String.valueOf(pos)); } List beginB = new LinkedList<>(); - for(int pos=1;pos<=poiBX1.size();pos++) { + for (int pos = 1; pos <= poiBX1.size(); pos++) { beginB.add(String.valueOf(poiBX1.get(pos))); } List endB = new LinkedList<>(); - for(int pos=0;pos beginG = new LinkedList<>(); - for(int pos=1;pos<=poiGY1.size();pos++) { + for (int pos = 1; pos <= poiGY1.size(); pos++) { beginG.add(String.valueOf(poiGY1.get(pos))); } List endG = new LinkedList<>(); - for(int pos=0;pos ratio_id = sourceData.ratio_id; List ROI_num_highter_G_energy_ROI = sourceData.ROI_num_highter_G_energy_ROI; List ROI_num_lower_G_energy_ROI = sourceData.ROI_num_lower_G_energy_ROI; List count_ratio = sourceData.count_ratio; List count_ratio_uncertainty = sourceData.count_ratio_uncertainty; List countRatioList = new LinkedList<>(); - for(int pos=0;pos countRatioUncertaintyList = new LinkedList<>(); - for(int pos=0;pos bg_efficiency = sourceData.bg_efficiency; List nuclide_name = sourceData.nuclide_name; List ROI_number = sourceData.ROI_number; List uncertainty = sourceData.uncertainty; List efficiencyList = new LinkedList<>(); - for(int pos=0;pos uncertaintyList = new LinkedList<>(); - for(int pos=0;pos roiChannelsSpectrumList, List roiChannelsList, String dataType) { - roiChannelsSpectrumList = roiChannelsSpectrumList.stream().filter(item-> item.getDataType().equals(dataType)).collect(Collectors.toList()); - for (int i=0; i item.getDataType().equals(dataType)).collect(Collectors.toList()); + for (int i = 0; i < roiChannelsSpectrumList.size(); i++) { GardsRoiChannels roiChannels = new GardsRoiChannels(); roiChannels.setSampleId(sampleId); roiChannels.setIdAnalysis(idAnalysis); - roiChannels.setRoi(i+1); + roiChannels.setRoi(i + 1); roiChannels.setBChanStart(roiChannelsSpectrumList.get(i).getBChanStart()); roiChannels.setBChanStop(roiChannelsSpectrumList.get(i).getBChanStop()); roiChannels.setGChanStart(roiChannelsSpectrumList.get(i).getGChanStart()); @@ -5360,7 +5386,7 @@ public class SpectrumAnalysisServiceImpl implements ISpectrumAnalysisService { public List getXeDataList(BgDataAnlyseResultIn anlyseResultIn, Integer sampleId, Integer idAnalysis) { List xeResultsList = new LinkedList<>(); - //Xe131m + // Xe131m GardsXeResultsSpectrum xe131m = new GardsXeResultsSpectrum(); xe131m.setSampleId(sampleId); xe131m.setIdAnalysis(idAnalysis); @@ -5371,7 +5397,7 @@ public class SpectrumAnalysisServiceImpl implements ISpectrumAnalysisService { xe131m.setLc(anlyseResultIn.getLc_Xe131m()); xe131m.setNidFlag(anlyseResultIn.getXe131mFlag()); xeResultsList.add(xe131m); - //Xe133 + // Xe133 GardsXeResultsSpectrum xe133 = new GardsXeResultsSpectrum(); xe133.setSampleId(sampleId); xe133.setIdAnalysis(idAnalysis); @@ -5382,7 +5408,7 @@ public class SpectrumAnalysisServiceImpl implements ISpectrumAnalysisService { xe133.setLc(anlyseResultIn.getLc_Xe133()); xe133.setNidFlag(anlyseResultIn.getXe133Flag()); xeResultsList.add(xe133); - //Xe133m + // Xe133m GardsXeResultsSpectrum xe133m = new GardsXeResultsSpectrum(); xe133m.setSampleId(sampleId); xe133m.setIdAnalysis(idAnalysis); @@ -5393,7 +5419,7 @@ public class SpectrumAnalysisServiceImpl implements ISpectrumAnalysisService { xe133m.setLc(anlyseResultIn.getLc_Xe133m()); xe133m.setNidFlag(anlyseResultIn.getXe133mFlag()); xeResultsList.add(xe133m); - //Xe135 + // Xe135 GardsXeResultsSpectrum xe135 = new GardsXeResultsSpectrum(); xe135.setSampleId(sampleId); xe135.setIdAnalysis(idAnalysis); @@ -5404,95 +5430,95 @@ public class SpectrumAnalysisServiceImpl implements ISpectrumAnalysisService { xe135.setLc(anlyseResultIn.getLc_Xe135()); xe135.setNidFlag(anlyseResultIn.getXe135Flag()); xeResultsList.add(xe135); - return xeResultsList; + return xeResultsList; } public List getXeResults(BgDataAnlyseResultIn anlyseResultIn, Integer sampleId, Integer idAnalysis) { List xeResultsList = anlyseResultIn.getXeData(); - for (GardsXeResults xeResults:xeResultsList) { + for (GardsXeResults xeResults : xeResultsList) { xeResults.setIdAnalysis(idAnalysis); xeResults.setSampleId(sampleId); } - return xeResultsList; + return xeResultsList; } @Transactional public boolean OriginalDataStore(String filePath, String fileName, String filePathName, String userName) { - //根据新的文件路径名称查询数据是否存在 + // 根据新的文件路径名称查询数据是否存在 GardsSampleData isExist = spectrumAnalysisMapper.findSampleByFile(filePathName); - //如果数据已经存入过数据库 则 修改状态后返回 - if (Objects.nonNull(isExist)){ - //如果文件已经存入过数据库则修改状态 + // 如果数据已经存入过数据库 则 修改状态后返回 + if (Objects.nonNull(isExist)) { + // 如果文件已经存入过数据库则修改状态 spectrumAnalysisMapper.updateAnalysesStatus(filePathName); return true; } - //连接ftp + // 连接ftp FTPClient ftpClient = ftpUtil.LoginFTP(); InputStream inputStream = null; File file = null; try { - //被动模式 + // 被动模式 ftpClient.enterLocalPassiveMode(); - //设置文件类型--二进制文件 + // 设置文件类型--二进制文件 ftpClient.setFileType(FTP.BINARY_FILE_TYPE); // ftpClient.setControlEncoding("UTF-8"); ftpClient.setFileTransferMode(FTPClient.STREAM_TRANSFER_MODE); - //切换文件路径 + // 切换文件路径 ftpClient.changeWorkingDirectory(filePath); inputStream = ftpClient.retrieveFileStream(fileName); - if (Objects.nonNull(inputStream)){ + if (Objects.nonNull(inputStream)) { file = File.createTempFile("betaGamma", null); - //将ftp文件的输入流复制给临时文件 + // 将ftp文件的输入流复制给临时文件 FileUtils.copyInputStreamToFile(inputStream, file); - //读取文件内容 + // 读取文件内容 EnergySpectrumStruct sourceData = EnergySpectrumHandler.getSourceData(file.getAbsolutePath()); - //获取文件中块名信息 + // 获取文件中块名信息 List readLines = getFileBlockList(file); - //查询台站id + // 查询台站id Integer stationId = spectrumAnalysisMapper.getStationId(sourceData.site_code); Integer detectorId = spectrumAnalysisMapper.getDetectorId(sourceData.detector_code); - if(Objects.isNull(stationId) || Objects.isNull(detectorId)) { + if (Objects.isNull(stationId) || Objects.isNull(detectorId)) { String error = "get station_id or detect_id error"; return false; } - //新增Gards_Sample_Data表数据 + // 新增Gards_Sample_Data表数据 sampleDataSpectrumService.saveSampleData(sourceData, stationId, detectorId, filePathName, readLines); - //获取sampleId + // 获取sampleId Integer sampleId = spectrumAnalysisMapper.getSampleId(filePathName); - //存储Gards_Sample_Aux表数据 + // 存储Gards_Sample_Aux表数据 sampleAuxSpectrumService.saveSampleAux(sourceData, sampleId, readLines); - //判断文件是否包含Comment块 新增Gards_Description数据 - if (readLines.contains(SampleFileHeader.COMMENT.getMessage())){ + // 判断文件是否包含Comment块 新增Gards_Description数据 + if (readLines.contains(SampleFileHeader.COMMENT.getMessage())) { sampleDescriptionSpectrumService.saveSampleDescription(sourceData, sampleId); } - //判断文件是否包含Certificate块 新增Gards_Sample_Cert数据 - if (readLines.contains(SampleFileHeader.CERTIFICATE.getMessage())){ + // 判断文件是否包含Certificate块 新增Gards_Sample_Cert数据 + if (readLines.contains(SampleFileHeader.CERTIFICATE.getMessage())) { sampleCertSpectrumService.saveSampleCert(sourceData, sampleId); sampleCertLineSpectrumService.saveSampleCertLine(sourceData, sampleId); } - //新增Gards_Calibration_Pairs_Orig数据 + // 新增Gards_Calibration_Pairs_Orig数据 calibrationPairsOrigSpectrumService.saveGardsCalibrationPairsOrig(sourceData, sampleId, readLines); - //判断文件是否包含b-gEfficiency块 新增Gards_Sample_Cert数据 - if (readLines.contains(SampleFileHeader.BGEFFICIENCY.getMessage())){ + // 判断文件是否包含b-gEfficiency块 新增Gards_Sample_Cert数据 + if (readLines.contains(SampleFileHeader.BGEFFICIENCY.getMessage())) { bgEfficiencyPairsSpectrumService.saveBgEfficiencyPairs(sourceData, sampleId); } - //判断文件是否包含TotalEff块 新增Gards_Sample_Cert数据 - if (readLines.contains(SampleFileHeader.TOTALEFF.getMessage())){ + // 判断文件是否包含TotalEff块 新增Gards_Sample_Cert数据 + if (readLines.contains(SampleFileHeader.TOTALEFF.getMessage())) { totalEfficiencyPairsSpectrumService.saveTotalEfficiencyPairs(sourceData, sampleId); } - //判断文件是否包含Ratios块 新增Gards_Sample_Ratios数据 - if (readLines.contains(SampleFileHeader.RATIOS.getMessage())){ + // 判断文件是否包含Ratios块 新增Gards_Sample_Ratios数据 + if (readLines.contains(SampleFileHeader.RATIOS.getMessage())) { sampleRatiosSpectrumService.saveSampleRatios(sourceData, sampleId); } - //判断是否包含ROI_Limits块 新增Gards_ROI_Limits数据 - if (readLines.contains(SampleFileHeader.ROILIMITS.getMessage())){ + // 判断是否包含ROI_Limits块 新增Gards_ROI_Limits数据 + if (readLines.contains(SampleFileHeader.ROILIMITS.getMessage())) { roiLimitsSpectrumService.saveRoiLimits(sourceData, sampleId); } - //新增Gards_Spectrum数据 + // 新增Gards_Spectrum数据 spectrumService.saveSpectrum(sourceData, sampleId, readLines, filePathName); - //判断是否包含Histogram块 新增Gards_Histogram数据 - if (readLines.contains(SampleFileHeader.HISTOGRAM.getMessage())){ + // 判断是否包含Histogram块 新增Gards_Histogram数据 + if (readLines.contains(SampleFileHeader.HISTOGRAM.getMessage())) { histogramService.saveHistogram(sourceData, sampleId, filePathName); } } @@ -5501,10 +5527,10 @@ public class SpectrumAnalysisServiceImpl implements ISpectrumAnalysisService { throw new RuntimeException(e); } finally { try { - if (Objects.nonNull(ftpClient)){ + if (Objects.nonNull(ftpClient)) { ftpClient.disconnect(); } - if (Objects.nonNull(inputStream)){ + if (Objects.nonNull(inputStream)) { inputStream.close(); } if (Objects.nonNull(file)) { @@ -5520,8 +5546,8 @@ public class SpectrumAnalysisServiceImpl implements ISpectrumAnalysisService { List readLines = new LinkedList<>(); try { List allLines = FileUtils.readLines(file, "UTF-8"); - for (String line:allLines) { - if (line.contains("#")){ + for (String line : allLines) { + if (line.contains("#")) { readLines.add(line); } } @@ -5533,11 +5559,11 @@ public class SpectrumAnalysisServiceImpl implements ISpectrumAnalysisService { public List getROIResult(List roiResultsSpectrumList, Integer sampleId, Integer idAnalysis) { List roiResultsList = new LinkedList<>(); - for (int i=0; i