自动处理,人工交互中acq对应的结束时间计算方式live_time改为real_time

This commit is contained in:
qiaoqinzheng 2023-12-28 09:36:42 +08:00
parent d0e015e57d
commit 1ad1d6c1c9
9 changed files with 397 additions and 24 deletions

View File

@ -85,8 +85,8 @@ public class SpectrumBaseBlockServiceImpl implements ISpectrumBaseBlockService {
if(StringUtils.isNotBlank(struct.acquisition_start_date) && StringUtils.isNotBlank(struct.acquisition_start_time)){
gardsSampleData.setAcquisitionStart(DateUtils.parseDate(struct.acquisition_start_date+StringConstant.SPACE+struct.acquisition_start_time));
}
if(Objects.nonNull(gardsSampleData.getAcquisitionStart()) && struct.acquisition_live_time > 0){
Date acquisitionStop = new Date((long) (gardsSampleData.getAcquisitionStart().getTime()+(struct.acquisition_live_time*1000)));
if(Objects.nonNull(gardsSampleData.getAcquisitionStart()) && struct.acquisition_real_time > 0){
Date acquisitionStop = new Date((long) (gardsSampleData.getAcquisitionStart().getTime()+(struct.acquisition_real_time*1000)));
gardsSampleData.setAcquisitionStop(acquisitionStop);
}
gardsSampleData.setAcquisitionRealSec(struct.acquisition_real_time);

View File

@ -3681,7 +3681,7 @@ public class GammaFileUtil extends AbstractLogOrReport {
middleData.sample_acquisiton_start = fileAnlyse.getAcq().getAcquisition_start_date()+StringPool.SPACE+fileAnlyse.getAcq().getAcquisition_start_time();
String acquisition_start = middleData.sample_acquisiton_start;
Date dataTime = DateUtils.parseDate(acquisition_start);
middleData.sample_acquistion_stop = DateUtils.formatDate(new Date((long) (dataTime.getTime() + fileAnlyse.getAcq().getAcquisition_live_time()*1000)), "yyyy/MM/dd HH:mm:ss");
middleData.sample_acquistion_stop = DateUtils.formatDate(new Date((long) (dataTime.getTime() + fileAnlyse.getAcq().getAcquisition_real_time()*1000)), "yyyy/MM/dd HH:mm:ss");
middleData.sample_acquistion_time = String.format("%.2f", fileAnlyse.getAcq().getAcquisition_real_time()) ;
middleData.sample_stationID = fileAnlyse.getHeader().getSite_code();
middleData.sample_detectID = fileAnlyse.getHeader().getDetector_code();

View File

@ -537,4 +537,18 @@ public class GammaController {
public void saveToPHD(String fileName, HttpServletRequest request, HttpServletResponse response) {
gammaService.saveToPHD(fileName, request, response);
}
@GetMapping("peakComparison")
@ApiOperation(value = "Peak Information页面查看Comparison数据", notes = "Peak Information页面查看Comparison数据")
public Result peakComparison(String fileName, HttpServletRequest request) {
return gammaService.peakComparison(fileName, request);
}
@GetMapping("nuclComparison")
@ApiOperation(value = "Radionuclide Activity页面查看Comparison数据", notes = "Radionuclide Activity页面查看Comparison数据")
public Result nuclComparison(String fileName, HttpServletRequest request) {
gammaService.nuclComparison(fileName, request);
return null;
}
}

View File

@ -0,0 +1,19 @@
package org.jeecg.modules.service;
import org.jeecg.modules.entity.vo.PeakInfo;
import org.jeecg.modules.entity.vo.TableNuclideActivity;
import java.sql.Connection;
import java.util.List;
public interface IDataService {
Connection connectInland();
Connection connectOverSea();
List<PeakInfo> viewPeaks(String siteDetCode, String spectralQualifier, Integer stationId, String collectStart, String acquisitionStop, Connection conn);
List<TableNuclideActivity> viewNucl(String siteDetCode, String spectralQualifier, Integer stationId, String collectStart, String acquisitionStop, Connection conn);
}

View File

@ -202,4 +202,9 @@ public interface IGammaService{
void IecToSpc(MultipartFile file, HttpServletResponse response);
void readMDCParameter();
Result peakComparison(String fileName, HttpServletRequest request);
Result nuclComparison(String fileName, HttpServletRequest request);
}

View File

@ -3,14 +3,12 @@ package org.jeecg.modules.service.impl;
import cn.hutool.core.collection.CollUtil;
import cn.hutool.core.collection.ListUtil;
import cn.hutool.core.date.DateUtil;
import cn.hutool.core.io.FileUtil;
import cn.hutool.core.map.MapUtil;
import cn.hutool.core.util.ObjectUtil;
import cn.hutool.core.util.ReUtil;
import cn.hutool.core.util.StrUtil;
import com.alibaba.fastjson.JSON;
import com.baomidou.dynamic.datasource.annotation.DS;
import com.baomidou.mybatisplus.core.conditions.query.LambdaQueryWrapper;
import com.baomidou.mybatisplus.core.toolkit.CollectionUtils;
import com.baomidou.mybatisplus.core.toolkit.StringPool;
import com.baomidou.mybatisplus.extension.plugins.pagination.Page;
@ -21,13 +19,11 @@ import org.apache.commons.io.FileUtils;
import org.apache.commons.io.IOUtils;
import org.apache.commons.lang3.StringUtils;
import com.google.common.cache.Cache;
import org.apache.commons.net.ftp.FTPClient;
import org.apache.shiro.SecurityUtils;
import org.jeecg.common.api.QueryRequest;
import org.jeecg.common.api.vo.Result;
import org.jeecg.common.cache.LocalCache;
import org.jeecg.common.constant.DateConstant;
import org.jeecg.common.handler.HttpClient;
import org.jeecg.common.properties.ParameterProperties;
import org.jeecg.common.properties.SpectrumPathProperties;
import org.jeecg.common.system.util.JwtUtil;
@ -40,7 +36,6 @@ import org.jeecg.modules.base.dto.NuclideActMdaDto;
import org.jeecg.modules.base.dto.PeakInfoDto;
import org.jeecg.modules.base.entity.configuration.GardsNuclLib;
import org.jeecg.modules.base.entity.configuration.GardsNuclLinesLib;
import org.jeecg.modules.base.entity.postgre.SysUser;
import org.jeecg.modules.base.entity.rnman.GardsAnalySetting;
import org.jeecg.modules.base.enums.*;
import org.jeecg.modules.entity.vo.*;
@ -82,7 +77,6 @@ import java.util.*;
import java.util.function.Function;
import java.util.stream.Collectors;
import static io.netty.util.ResourceLeakDetector.setEnabled;
import static org.jeecg.modules.base.enums.ExportTemplate.*;
@Service(value = "gammaService")
@ -155,6 +149,8 @@ public class GammaServiceImpl extends AbstractLogOrReport implements IGammaServi
private RedisStreamUtil redisStreamUtil;
@Autowired
private ISysDictSpectrumService sysDictService;
@Autowired
private IDataService dataService;
@Override
public Result initValue(Integer sampleId, String dbName, String analyst, String samfileName, HttpServletRequest request) {
@ -2334,16 +2330,6 @@ public class GammaServiceImpl extends AbstractLogOrReport implements IGammaServi
return result;
}
// public List<String> InitList(double energy, double tolerance, List<String> nuclides) {
// if (nuclides.size() < 1) {
// return new LinkedList<>();
// }
// double min = energy - tolerance;
// double max = energy + tolerance;
// List<String> nuclideList = spectrumAnalysisMapper.findNuclideList(min, max, nuclides);
// return nuclideList;
// }
public void InitTable(String name, Map<String, Object> map, PHDFile phd, Map<String, String> colorMap, Map<String, NuclideLines> nuclideMap) {
InitNuclideInfo(name, map);
long span = phd.getSpec().getG_energy_span();
@ -4460,7 +4446,7 @@ public class GammaServiceImpl extends AbstractLogOrReport implements IGammaServi
Date acq_start_dt = DateUtils.parseDate(acq_start);
double Decay_Time = ((acq_start_dt.getTime() - collect_stop_dt.getTime()) / 1000) / 3600.0;
long secs = (long) (acq_start_dt.getTime() + (phd.getAcq().getAcquisition_live_time() * 1000));
long secs = (long) (acq_start_dt.getTime() + (phd.getAcq().getAcquisition_real_time() * 1000));
String acq_stop = DateUtils.formatDate(new Date(secs), "yyyy/MM/dd HH:mm:ss.S");
map.put("stationId", phd.getHeader().getSite_code());
@ -5901,4 +5887,116 @@ public class GammaServiceImpl extends AbstractLogOrReport implements IGammaServi
}
}
@Override
public Result peakComparison(String fileName, HttpServletRequest request) {
Result result = new Result();
Connection conn = null;
List<PeakInfo> peakInfoList = new LinkedList<>();
//获取用户名
String userName = JwtUtil.getUserNameByToken(request);
//读取缓存内容
Cache<String, PHDFile> phdCache = localCache.getPHDCache();
PHDFile phd = phdCache.getIfPresent(fileName + StringPool.DASH + userName);
if (Objects.isNull(phd)) {
result.error500("Please select the parse file first");
return result;
}
try {
String collectStart = "";
if (phd.getCollect().getCollection_start_time().indexOf(StringPool.DOT) > 0) {
collectStart = phd.getCollect().getCollection_start_date() + " " + phd.getCollect().getCollection_start_time().substring(0, phd.getCollect().getCollection_start_time().indexOf(StringPool.DOT));
} else {
collectStart = phd.getCollect().getCollection_start_date() + " " + phd.getCollect().getCollection_start_time();
}
//获取采集开始时间
Date acqStart = DateUtils.parseDate(phd.getAcq().getAcquisition_start_date() + " " + phd.getAcq().getAcquisition_start_time());
//计算得到采集结束时间对应的毫秒数
long stopTime = (long) (acqStart.getTime() + (phd.getAcq().getAcquisition_real_time() * 1000));
//根据毫秒数得到采集结束时间
Date acquisitionStopDate = new Date(stopTime);
//格式化得到采集结束时间的字符串
String acquisitionStop = DateUtils.formatDate(acquisitionStopDate, "yyyy-MM-dd HH:mm:ss");
//连接本地同步的idc数据库
conn = dataService.connectInland();
//连接对象为空 则连接idc国际库
if (Objects.isNull(conn)) {
conn = dataService.connectOverSea();
}
//判断是否连接成功
if (Objects.nonNull(conn)) {
//查询获取Peaks表的对比数据内容
peakInfoList = dataService.viewPeaks(phd.getHeader().getSite_code(), phd.getHeader().getSpectrum_quantity(), 0, collectStart, acquisitionStop, conn);
}
result.setSuccess(true);
result.setResult(peakInfoList);
} catch (ParseException e) {
throw new RuntimeException(e);
} finally {
try {
if (Objects.nonNull(conn)) {
conn.close();
}
} catch (SQLException e) {
throw new RuntimeException(e);
}
}
return result;
}
@Override
public Result nuclComparison(String fileName, HttpServletRequest request) {
Result result = new Result();
Connection conn = null;
List<TableNuclideActivity> nuclideActivityList = new LinkedList<>();
//获取用户名
String userName = JwtUtil.getUserNameByToken(request);
//读取缓存内容
Cache<String, PHDFile> phdCache = localCache.getPHDCache();
PHDFile phd = phdCache.getIfPresent(fileName + StringPool.DASH + userName);
if (Objects.isNull(phd)) {
result.error500("Please select the parse file first");
return result;
}
try {
String collectStart = "";
if (phd.getCollect().getCollection_start_time().indexOf(StringPool.DOT) > 0) {
collectStart = phd.getCollect().getCollection_start_date() + " " + phd.getCollect().getCollection_start_time().substring(0, phd.getCollect().getCollection_start_time().indexOf(StringPool.DOT));
} else {
collectStart = phd.getCollect().getCollection_start_date() + " " + phd.getCollect().getCollection_start_time();
}
//获取采集开始时间
Date acqStart = DateUtils.parseDate(phd.getAcq().getAcquisition_start_date() + " " + phd.getAcq().getAcquisition_start_time());
//计算得到采集结束时间对应的毫秒数
long stopTime = (long) (acqStart.getTime() + (phd.getAcq().getAcquisition_real_time() * 1000));
//根据毫秒数得到采集结束时间
Date acquisitionStopDate = new Date(stopTime);
//格式化得到采集结束时间的字符串
String acquisitionStop = DateUtils.formatDate(acquisitionStopDate, "yyyy-MM-dd HH:mm:ss");
//连接本地同步的idc数据库
conn = dataService.connectInland();
//连接对象为空 则连接idc国际库
if (Objects.isNull(conn)) {
conn = dataService.connectOverSea();
}
//判断是否连接成功
if (Objects.nonNull(conn)) {
//查询获取Peaks表的对比数据内容
nuclideActivityList = dataService.viewNucl(phd.getHeader().getSite_code(), phd.getHeader().getSpectrum_quantity(), 0, collectStart, acquisitionStop, conn);
}
result.setSuccess(true);
result.setResult(nuclideActivityList);
} catch (ParseException e) {
throw new RuntimeException(e);
} finally {
try {
if (Objects.nonNull(conn)) {
conn.close();
}
} catch (SQLException e) {
throw new RuntimeException(e);
}
}
return result;
}
}

View File

@ -77,7 +77,7 @@ public class GardsSampleDataSpectrumServiceImpl extends ServiceImpl<GardsSampleD
sampleData.setQuantity(quantity);
if (StringUtils.isNotBlank(acquisition_start)) {
Date acquisitionStart = DateUtils.parseDate(acquisition_start);
long timeSecs = (long) (acquisitionStart.getTime() + acquisition_live_sec);
long timeSecs = (long) (acquisitionStart.getTime() + acquisition_real_sec * 1000);
Date acquisitionStop = new Date(timeSecs);
sampleData.setAcquisitionStart(acquisitionStart);
sampleData.setAcquisitionStop(acquisitionStop);
@ -106,7 +106,7 @@ public class GardsSampleDataSpectrumServiceImpl extends ServiceImpl<GardsSampleD
long acq_stop_dt = 0;
try {
if (StringUtils.isNotBlank(acq_start)) {
acq_stop_dt = (long) (DateUtils.parseDate(acq_start).getTime() + phd.getAcq().getAcquisition_live_time()*1000);
acq_stop_dt = (long) (DateUtils.parseDate(acq_start).getTime() + phd.getAcq().getAcquisition_real_time()*1000);
}
} catch (ParseException e) {
throw new RuntimeException(e);

View File

@ -0,0 +1,237 @@
package org.jeecg.modules.service.impl;
import com.baomidou.mybatisplus.core.toolkit.CollectionUtils;
import com.baomidou.mybatisplus.core.toolkit.StringPool;
import org.jeecg.common.util.NumberFormatUtil;
import org.jeecg.modules.entity.vo.PeakInfo;
import org.jeecg.modules.entity.vo.TableNuclideActivity;
import org.jeecg.modules.service.IDataService;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.stereotype.Service;
import java.sql.*;
import java.util.Comparator;
import java.util.LinkedList;
import java.util.List;
import java.util.Objects;
import java.util.stream.Collectors;
@Service
public class IDataServiceImpl implements IDataService {
@Value("${inland.url}")
private String url; // 本地数据源url即为主数据源
@Value("${inland.username}")
private String username;
@Value("${inland.password}")
private String password;
@Value("${oversea.url}")
private String urlO; // 国外数据源url即为从数据源
@Value("${oversea.username}")
private String usernameO;
@Value("${oversea.password}")
private String passwordO;
@Override
public Connection connectInland() {
Connection conn = null;
try {
conn = DriverManager.getConnection(url, username, password);
} catch (SQLException e) {
throw new RuntimeException(e);
}
return conn;
}
@Override
public Connection connectOverSea() {
Connection conn = null;
try {
conn = DriverManager.getConnection(urlO, usernameO, passwordO);
} catch (SQLException e) {
throw new RuntimeException(e);
}
return conn;
}
@Override
public List<PeakInfo> viewPeaks(String siteDetCode, String spectralQualifier, Integer stationId, String collectStart, String acquisitionStop, Connection conn) {
Statement statement = null;
Integer sampleId = null;
List<PeakInfo> peakInfoList = new LinkedList<>();
try {
statement = conn.createStatement();
//拼接sql根据探测器编码台站id采样开始时间采集结束时间 查询 sampleId
String sql = "";
sql+="SELECT SAMPLE_ID FROM RMSMAN.GARDS_SAMPLE_DATA ";
sql+="WHERE TRIM( SITE_DET_CODE ) = '"+siteDetCode+"' ";
sql+="AND STATION_ID = "+stationId+" ";
sql+="AND SPECTRAL_QUALIFIER = " + spectralQualifier +" ";
sql+="AND COLLECT_START = TO_DATE( '"+collectStart+"', 'YYYY-MM-DD HH24:MI:SS' ) ";
sql+="AND ACQUISITION_STOP = TO_DATE( '"+acquisitionStop+"', 'YYYY-MM-DD HH24:MI:SS' )";
ResultSet query = statement.executeQuery(sql);
while (query.next()) {
sampleId = query.getInt("SAMPLE_ID");
}
//判断条件查询到的sampleId是否为空 如果不为空 则继续查询
if(Objects.nonNull(sampleId)) {
//拼接sql根据sampleId查询peak表内容数据
String peakSql = "SELECT AREA,AREA_ERR,CENTROID,ENERGY,FWHM,MULTIPLET,DETECTABILITY,PEAK_ID FROM RMSMAN.GARDS_PEAKS " +
"WHERE SAMPLE_ID = "+sampleId;
//执行sql得到查询结果
ResultSet executeQuery = statement.executeQuery(peakSql);
while (executeQuery.next()) {
double area = executeQuery.getDouble("AREA");
double areaErr = executeQuery.getDouble("AREA_ERR");
double centroChannel = executeQuery.getDouble("CENTROID");
double energy = executeQuery.getDouble("ENERGY");
double fwhm = executeQuery.getDouble("FWHM");
int multiplet = executeQuery.getInt("MULTIPLET");
double significance = executeQuery.getDouble("DETECTABILITY");
int peakId = executeQuery.getInt("PEAK_ID");
//声明一个PeakInfo实体类存储查询出的数据
PeakInfo peak = new PeakInfo();
peak.index = peakId;
peak.area = area;
peak.areaErr = areaErr;
peak.peakCentroid = centroChannel;
peak.energy = energy;
peak.fwhm = fwhm;
peak.multiIndex = multiplet;
peak.significance = significance;
peakInfoList.add(peak);
}
//判断峰信息是否为空
if (CollectionUtils.isNotEmpty(peakInfoList)) {
//根据峰下标进行排序
peakInfoList = peakInfoList.stream().sorted(Comparator.comparing(item-> item.index)).collect(Collectors.toList());
//峰信息不为空 查询峰核素信息表 读取峰关联核素名称
String nuclIdedSql = "SELECT NAME,PEAK FROM RMSMAN.GARDS_NUCL_LINES_IDED " +
"WHERE SAMPLE_ID = "+sampleId;
ResultSet resultSet = statement.executeQuery(nuclIdedSql);
while (resultSet.next()) {
String name = resultSet.getString("NAME");
int peak = resultSet.getInt("PEAK");
peakInfoList.stream().forEach(item->{
if (item.index == peak) {
item.nuclides.add(name+ StringPool.SEMICOLON);
}
});
}
}
}
} catch (SQLException e) {
throw new RuntimeException(e);
} finally {
try {
if (Objects.nonNull(statement)) {
statement.close();
}
} catch (SQLException e) {
throw new RuntimeException(e);
}
}
return peakInfoList;
}
@Override
public List<TableNuclideActivity> viewNucl(String siteDetCode, String spectralQualifier, Integer stationId, String collectStart, String acquisitionStop, Connection conn) {
Statement statement = null;
Integer sampleId = null;
List<TableNuclideActivity> nuclideActivityList = new LinkedList<>();
try {
statement = conn.createStatement();
//拼接sql根据探测器编码台站id采样开始时间采集结束时间 查询 sampleId
String sql = "";
sql+="SELECT SAMPLE_ID FROM RMSMAN.GARDS_SAMPLE_DATA ";
sql+="WHERE TRIM( SITE_DET_CODE ) = '"+siteDetCode+"' ";
sql+="AND STATION_ID = "+stationId+" ";
sql+="AND SPECTRAL_QUALIFIER = " + spectralQualifier + " ";
sql+="AND COLLECT_START = TO_DATE( '"+collectStart+"', 'YYYY-MM-DD HH24:MI:SS' ) ";
sql+="AND ACQUISITION_STOP = TO_DATE( '"+acquisitionStop+"', 'YYYY-MM-DD HH24:MI:SS' )";
ResultSet query = statement.executeQuery(sql);
while (query.next()) {
sampleId = query.getInt("SAMPLE_ID");
}
//判断条件查询到的sampleId是否为空 如果不为空 则继续查询
if(Objects.nonNull(sampleId)) {
//拼接sql查询核素信息表
String nuclSql = "SELECT NAME,HALFLIFE,MDA,ACTIV_DECAY,ACTIV_KEY,ACTIV_KEY_ERR FROM RMSMAN.GARDS_NUCL_IDED " +
"WHERE SAMPLE_ID = "+sampleId;
//执行sql查询结果
ResultSet executeQuery = statement.executeQuery(nuclSql);
while (executeQuery.next()) {
String name = executeQuery.getString("NAME");
double halflife = executeQuery.getDouble("HALFLIFE");
char units = 'S';
if(halflife >= 31556736) // 1年 = 365.24 * 24 * 60 * 60 = 31556736s
{
halflife /= 31556736;
units = 'A';
}
else if(halflife >= 86400) // 1天 = 24 * 60 * 60 = 86400s
{
halflife /= 86400;
units = 'D';
}
else if(halflife >= 3600)
{
halflife /= 3600;
units = 'H';
}
double mdc = executeQuery.getDouble("MDA");
double conc = executeQuery.getDouble("ACTIV_DECAY");
double activity = executeQuery.getDouble("ACTIV_KEY");
double activityErr = executeQuery.getDouble("ACTIV_KEY_ERR");
TableNuclideActivity nuclideActivity = new TableNuclideActivity();
nuclideActivity.setNuclide(name);
nuclideActivity.setHalfLife(NumberFormatUtil.numberFormat(String.valueOf(halflife))+StringPool.SPACE+units);
nuclideActivity.setActivity(NumberFormatUtil.numberFormat(String.valueOf(activity)));
nuclideActivity.setActErr(NumberFormatUtil.numberFormat(String.valueOf(activityErr)));
nuclideActivity.setConc(NumberFormatUtil.numberFormat(String.valueOf(conc)));
nuclideActivity.setMdc(NumberFormatUtil.numberFormat(String.valueOf(mdc)));
nuclideActivityList.add(nuclideActivity);
}
//如果核素信息不为空
if (CollectionUtils.isNotEmpty(nuclideActivityList)) {
nuclideActivityList = nuclideActivityList.stream().sorted(Comparator.comparing(TableNuclideActivity::getNuclide)).collect(Collectors.toList());
//拼接sql查询峰核素表 effi等值
String nuclLinesIdedSql = "SELECT NAME,ENERGY,ABUNDANCE,EFFIC FROM RMSMAN.GARDS_NUCL_LINES_IDED " +
"WHERE SAMPLE_ID = "+sampleId+" AND KEY_FLAG = 1";
ResultSet resultSet = statement.executeQuery(nuclLinesIdedSql);
while (resultSet.next()) {
String name = resultSet.getString("NAME");
double energy = resultSet.getDouble("ENERGY");
double abundance = resultSet.getDouble("ABUNDANCE");
double effic = resultSet.getDouble("EFFIC");
nuclideActivityList.stream().forEach(item-> {
if (item.getNuclide().equals(name)) {
item.setEnergy(NumberFormatUtil.numberFormat(String.valueOf(energy)));
item.setYield(NumberFormatUtil.numberFormat(String.valueOf(abundance)));
item.setEfficiency(NumberFormatUtil.numberFormat(String.valueOf(effic)));
}
});
}
}
}
} catch (SQLException e) {
throw new RuntimeException(e);
} finally {
try {
if (Objects.nonNull(statement)) {
statement.close();
}
} catch (SQLException e) {
throw new RuntimeException(e);
}
}
return nuclideActivityList;
}
}

View File

@ -974,7 +974,7 @@ public class SpectrumAnalysisServiceImpl extends AbstractLogOrReport implements
String acquisition_start_str = sampleSourceData.acquisition_start_date+StringPool.SPACE+sampleSourceData.acquisition_start_time;
Date acquisition_start = DateUtils.parseDate(acquisition_start_str);
information.setAcquisition_start(acquisition_start);
long dataTime = (long) (acquisition_start.getTime() + sampleSourceData.acquisition_live_time * 1000);
long dataTime = (long) (acquisition_start.getTime() + sampleSourceData.acquisition_real_time * 1000);
String acquisition_stop_str = DateUtils.formatDate(new Date(dataTime), "yyyy/MM/dd HH:mm:ss");
Date acquisition_stop = DateUtils.parseDate(acquisition_stop_str);
information.setAcquisition_stop(acquisition_stop);
@ -4308,7 +4308,7 @@ public class SpectrumAnalysisServiceImpl extends AbstractLogOrReport implements
String acquisition_start_str = sampleSourceData.acquisition_start_date+StringPool.SPACE+sampleSourceData.acquisition_start_time;
Date acquisition_start = DateUtils.parseDate(acquisition_start_str);
information.setAcquisition_start(acquisition_start);
long dataTime = (long) (acquisition_start.getTime() + sampleSourceData.acquisition_live_time * 1000);
long dataTime = (long) (acquisition_start.getTime() + sampleSourceData.acquisition_real_time * 1000);
String acquisition_stop_str = DateUtils.formatDate(new Date(dataTime), "yyyy/MM/dd HH:mm:ss");
Date acquisition_stop = DateUtils.parseDate(acquisition_stop_str);
information.setAcquisition_stop(acquisition_stop);