spectrum-analysis模块实现查询折线图相关接口

This commit is contained in:
qiaoqinzheng 2023-07-11 15:36:02 +08:00
parent c88b114ade
commit 0b286a9af2
11 changed files with 388 additions and 7 deletions

View File

@ -77,12 +77,6 @@ public class GardsSohData implements Serializable {
@DateTimeFormat(pattern = "yyyy-MM-dd HH:mm:ss") @DateTimeFormat(pattern = "yyyy-MM-dd HH:mm:ss")
private Date moddate; private Date moddate;
/**
* 台站名称
*/
@TableField(exist = false)
private String stationName;
/** /**
* 探测器名称 * 探测器名称
*/ */

View File

@ -0,0 +1,151 @@
package org.jeecg.common.util;
import com.baomidou.mybatisplus.core.toolkit.CollectionUtils;
import com.baomidou.mybatisplus.core.toolkit.StringPool;
import com.baomidou.mybatisplus.core.toolkit.StringUtils;
import org.jeecg.common.api.vo.Result;
import org.jeecg.modules.entity.vo.HistogramData;
import org.jeecg.modules.entity.vo.SeriseData;
import org.jeecg.modules.entity.vo.SpectrumData;
import org.jeecg.modules.native_jni.EnergySpectrumHandler;
import org.jeecg.modules.native_jni.struct.EnergySpectrumStruct;
import org.springframework.stereotype.Component;
import java.text.DateFormat;
import java.text.ParseException;
import java.text.SimpleDateFormat;
import java.time.LocalDateTime;
import java.time.format.DateTimeFormatter;
import java.util.*;
@Component
public class PHDFileUtil {
public static Map<String, Object> getSourceData(String filePath){
//加载dll工具库
System.loadLibrary("ReadPHDFile");
EnergySpectrumStruct struct = EnergySpectrumHandler.getSourceData(filePath);
Map<String, Object> map = new HashMap<>();
try {
SpectrumData spectrumData = new SpectrumData();
//封装散点图下的基础数据信息
//Station Code
String stationCode = struct.site_code;
//Detector Code
String detectorCode = struct.detector_code;
//Data Type
String dataType = struct.data_type;
//Collection Start
Date CollectionStart = DateUtils.parseDate(struct.collection_start_date.replace(StringPool.SLASH,StringPool.DASH) + StringPool.SPACE + struct.collection_start_time.substring(0, struct.collection_start_time.indexOf(StringPool.DOT)), "yyyy-MM-dd HH:mm:ss");
//Collection Stop
Date CollectionStop = DateUtils.parseDate(struct.collection_stop_date.replace(StringPool.SLASH,StringPool.DASH) + StringPool.SPACE + struct.collection_stop_time.substring(0, struct.collection_stop_time.indexOf(StringPool.DOT)), "yyyy-MM-dd HH:mm:ss");
//Collection Time
String CollectionTime = String.format ("%.2f",Double.valueOf(CollectionStop.getTime() - CollectionStart.getTime()/ 1000));
//Acquisition Start
Date AcquisitionStart = DateUtils.parseDate(struct.acquisition_start_date.replace(StringPool.SLASH,StringPool.DASH) + StringPool.SPACE + struct.acquisition_start_time.substring(0, struct.acquisition_start_time.indexOf(StringPool.DOT)), "yyyy-MM-dd HH:mm:ss");
//Acq Real Time
double AcquisitionRealTime = struct.acquisition_real_time;
//Acq live Time
double AcquisitionLiveTime = struct.acquisition_live_time;
//Air Volume[m3]
double airVolume = struct.air_volume;
//Xe Volume[m3]
double xeVolume = struct.sample_volume_of_Xe;
spectrumData.setStationCode(stationCode);
spectrumData.setDetectorCode(detectorCode);
spectrumData.setDataType(dataType);
spectrumData.setCollectionStart(CollectionStart);
spectrumData.setCollectionStop(CollectionStop);
spectrumData.setCollectionTime(CollectionTime);
spectrumData.setAcquisitionStart(AcquisitionStart);
spectrumData.setAcquisitionRealTime(AcquisitionRealTime);
spectrumData.setAcquisitionLiveTime(AcquisitionLiveTime);
spectrumData.setAirVolume(airVolume);
spectrumData.setXeVolume(xeVolume);
map.put("spectrumData", spectrumData);
//统计散点图
//横坐标 beta-gamma
long bChannels = struct.b_channels;
//纵坐标 gamma
long gChannels = struct.g_channels;
//
List<Long> hCounts = struct.h_counts;
List<HistogramData> histogramDataList = new LinkedList<>();
for (int i=0; i<bChannels; i++){
//按照大小切割数组
List<Long> list = null;
if (i != bChannels-1){
list = hCounts.subList((int) (i * bChannels), (int) ((i + 1) * bChannels));
}else {
list = hCounts.subList((int) (i * bChannels), hCounts.size());
}
if (CollectionUtils.isNotEmpty(list)){
for (int j=0; j< list.size(); j++){
HistogramData his = new HistogramData();
his.setB(i);
his.setG(j);
Long count = list.get(j);
his.setC(count);
histogramDataList.add(his);
}
}
}
map.put("histogramDataList", histogramDataList);
//Gamma Spectrum Original
List<Long> gammaOriginalData = new LinkedList<>();
for (int i=0; i<gChannels; i++) {
long i_count = 0;
for (int j=0; j<bChannels; j++) {
i_count += hCounts.get((int) (i*bChannels + j));
}
gammaOriginalData.add(i_count);
}
List<SeriseData> gammaSeriseData = new LinkedList<>();
for (int i=0; i<gammaOriginalData.size(); i++){
SeriseData seriseData = new SeriseData();
seriseData.setX(i);
seriseData.setY(gammaOriginalData.get(i));
gammaSeriseData.add(seriseData);
}
map.put("gammaOriginalData", gammaSeriseData);
//Gamma Spectrum Projected
List<Double> gCentroidChannel = struct.g_centroid_channel;
List<Double> gEnergy = struct.g_energy;
List<Double> gammaProjectedData = EnergySpectrumHandler.GetFileFittingPara(gCentroidChannel, gEnergy);
map.put("gammaProjectedData", gammaProjectedData);
//Beta Spectrum Original
List<Long> betaOriginalData = new LinkedList<>();
for (int j=0; j<bChannels; ++j) {
long j_count = 0;
for (int i=0; i<gChannels; ++i)
{
j_count += hCounts.get((int) (i*bChannels + j));
}
betaOriginalData.add(j_count);
}
List<SeriseData> betaSeriseData = new LinkedList<>();
for (int i=0; i<betaOriginalData.size(); i++){
SeriseData seriseData = new SeriseData();
seriseData.setX(i);
seriseData.setY(betaOriginalData.get(i));
betaSeriseData.add(seriseData);
}
map.put("betaOriginalData", betaSeriseData);
//Beta Spectrum Projected
List<Double> bChannel = struct.b_channel;
List<Double> bElectronEnergy = struct.b_electron_energy;
List<Double> betaProjectedData = EnergySpectrumHandler.GetFileFittingPara(bChannel, bElectronEnergy);
map.put("betaProjectedData", betaProjectedData);
} catch (ParseException e) {
throw new RuntimeException(e);
}
return map;
}
}

View File

@ -23,7 +23,7 @@ public class SpectrumAnalysesController {
private ISpectrumAnalysisService spectrumAnalysisService; private ISpectrumAnalysisService spectrumAnalysisService;
@GetMapping("getDBSearchList") @GetMapping("getDBSearchList")
@ApiOperation(value = "查询台站,探测器数据接口", notes = "查询台站,探测器数据接口") @ApiOperation(value = "查询查询条件数据接口", notes = "查询查询条件数据接口")
public Result getDBSearchList(String[] menuTypes){ public Result getDBSearchList(String[] menuTypes){
return spectrumAnalysisService.getDBSearchList(menuTypes); return spectrumAnalysisService.getDBSearchList(menuTypes);
} }
@ -35,5 +35,10 @@ public class SpectrumAnalysesController {
return spectrumAnalysisService.getDBSpectrumList(queryRequest, gardsSampleData, dbName, menuTypes, CollectStop, AcqStart, startDate, endDate); return spectrumAnalysisService.getDBSpectrumList(queryRequest, gardsSampleData, dbName, menuTypes, CollectStop, AcqStart, startDate, endDate);
} }
@GetMapping("getDBSpectrumChart")
@ApiOperation(value = "查询折线图相关信息接口", notes = "查询折线图相关信息接口")
public Result getDBSpectrumPie(String dbName, Integer[] sampleId){
return spectrumAnalysisService.getDBSpectrumPie(dbName, sampleId);
}
} }

View File

@ -0,0 +1,16 @@
package org.jeecg.modules.entity.vo;
import lombok.Data;
import java.io.Serializable;
@Data
public class HistogramData implements Serializable {
private Integer b;
private Integer g;
private Long c;
}

View File

@ -0,0 +1,14 @@
package org.jeecg.modules.entity.vo;
import lombok.Data;
import java.io.Serializable;
@Data
public class SeriseData implements Serializable {
private int x;
private long y;
}

View File

@ -0,0 +1,41 @@
package org.jeecg.modules.entity.vo;
import com.fasterxml.jackson.annotation.JsonFormat;
import lombok.Data;
import org.springframework.format.annotation.DateTimeFormat;
import java.io.Serializable;
import java.util.Date;
@Data
public class SpectrumData implements Serializable {
private String stationCode;
private String detectorCode;
private String dataType;
@DateTimeFormat(pattern = "yyyy-MM-dd HH:mm:ss")
@JsonFormat(pattern = "yyyy-MM-dd HH:mm:ss", timezone = "GMT+8")
private Date CollectionStart;
@DateTimeFormat(pattern = "yyyy-MM-dd HH:mm:ss")
@JsonFormat(pattern = "yyyy-MM-dd HH:mm:ss", timezone = "GMT+8")
private Date CollectionStop;
private String CollectionTime;
@DateTimeFormat(pattern = "yyyy-MM-dd HH:mm:ss")
@JsonFormat(pattern = "yyyy-MM-dd HH:mm:ss", timezone = "GMT+8")
private Date AcquisitionStart;
private double AcquisitionRealTime;
private double AcquisitionLiveTime;
private double AirVolume;
private double XeVolume;
}

View File

@ -0,0 +1,29 @@
package org.jeecg.modules.entity.vo;
import lombok.Data;
import java.io.Serializable;
import java.util.Date;
@Data
public class SpectrumFileRecord implements Serializable {
private Integer sampleId;
private String sampleFilePath;
private String gasBgFilePath;
private String detBgFilePath;
private String qcFilePath;
private String logFilePath;
private String reportFilePath;
private String siteDetCode;
private Date collectStart;
}

View File

@ -3,6 +3,7 @@ package org.jeecg.modules.mapper;
import com.baomidou.mybatisplus.core.metadata.IPage; import com.baomidou.mybatisplus.core.metadata.IPage;
import com.baomidou.mybatisplus.extension.plugins.pagination.Page; import com.baomidou.mybatisplus.extension.plugins.pagination.Page;
import org.jeecg.modules.base.entity.GardsSampleData; import org.jeecg.modules.base.entity.GardsSampleData;
import org.jeecg.modules.entity.vo.SpectrumFileRecord;
import java.util.List; import java.util.List;
@ -10,4 +11,6 @@ public interface SpectrumAnalysisMapper {
Page<GardsSampleData> getDBSpectrumList(IPage<GardsSampleData> page, GardsSampleData gardsSampleData, String dbName, List<String> stationTypes, boolean CollectStop, boolean AcqStart, String startTime, String endTime); Page<GardsSampleData> getDBSpectrumList(IPage<GardsSampleData> page, GardsSampleData gardsSampleData, String dbName, List<String> stationTypes, boolean CollectStop, boolean AcqStart, String startTime, String endTime);
SpectrumFileRecord getDBSpectrumFilePath(String dbName, List<Integer> sampleIds);
} }

View File

@ -65,4 +65,26 @@
ORDER BY c.collect_stop DESC ORDER BY c.collect_stop DESC
</select> </select>
<select id="getDBSpectrumFilePath" resultType="org.jeecg.modules.entity.vo.SpectrumFileRecord">
SELECT
org_sample.SAMPLE_ID sampleId,
org_sample.INPUT_FILE_NAME sampleFilePath,
analyses.USEDGASPHD gasBgFilePath,
analyses.USEDDETPHD detBgFilePath,
analyses.LOG_PATH logFilePath,
analyses.REPORT_PAHT reportFilePath,
TRIM(org_sample.SITE_DET_CODE) siteDetCode,
org_sample.COLLECT_START collectStart
FROM ORIGINAL.GARDS_SAMPLE_DATA org_sample,
${dbName} analyses
<where>
analyses.SAMPLE_ID IN
<foreach collection="sampleIds" item="sampleId" open="(" close=")" separator=",">
#{sampleId}
</foreach>
AND org_sample.SAMPLE_ID=analyses.SAMPLE_ID
</where>
</select>
</mapper> </mapper>

View File

@ -12,4 +12,6 @@ public interface ISpectrumAnalysisService {
Result getDBSpectrumList(QueryRequest queryRequest, GardsSampleData gardsSampleData, String dbName, String[] menuTypes, boolean CollectStop, boolean AcqStart, Date startDate, Date endDate); Result getDBSpectrumList(QueryRequest queryRequest, GardsSampleData gardsSampleData, String dbName, String[] menuTypes, boolean CollectStop, boolean AcqStart, Date startDate, Date endDate);
Result getDBSpectrumPie(String dbName, Integer[] sampleId);
} }

View File

@ -3,13 +3,20 @@ package org.jeecg.modules.service.impl;
import com.baomidou.dynamic.datasource.annotation.DS; import com.baomidou.dynamic.datasource.annotation.DS;
import com.baomidou.mybatisplus.core.conditions.query.LambdaQueryWrapper; import com.baomidou.mybatisplus.core.conditions.query.LambdaQueryWrapper;
import com.baomidou.mybatisplus.core.toolkit.CollectionUtils; import com.baomidou.mybatisplus.core.toolkit.CollectionUtils;
import com.baomidou.mybatisplus.core.toolkit.StringPool;
import com.baomidou.mybatisplus.core.toolkit.StringUtils; import com.baomidou.mybatisplus.core.toolkit.StringUtils;
import com.baomidou.mybatisplus.extension.plugins.pagination.Page; import com.baomidou.mybatisplus.extension.plugins.pagination.Page;
import org.apache.commons.io.FileUtils;
import org.apache.commons.net.ftp.FTPClient;
import org.apache.commons.net.ftp.FTPFile;
import org.jeecg.common.api.QueryRequest; import org.jeecg.common.api.QueryRequest;
import org.jeecg.common.api.vo.Result; import org.jeecg.common.api.vo.Result;
import org.jeecg.common.util.DateUtils; import org.jeecg.common.util.DateUtils;
import org.jeecg.common.util.FTPUtil;
import org.jeecg.common.util.PHDFileUtil;
import org.jeecg.common.util.RedisUtil; import org.jeecg.common.util.RedisUtil;
import org.jeecg.modules.base.entity.GardsSampleData; import org.jeecg.modules.base.entity.GardsSampleData;
import org.jeecg.modules.entity.vo.SpectrumFileRecord;
import org.jeecg.modules.mapper.GardsDetectorsMapper; import org.jeecg.modules.mapper.GardsDetectorsMapper;
import org.jeecg.modules.mapper.SpectrumAnalysisMapper; import org.jeecg.modules.mapper.SpectrumAnalysisMapper;
import org.jeecg.modules.service.ISpectrumAnalysisService; import org.jeecg.modules.service.ISpectrumAnalysisService;
@ -19,6 +26,10 @@ import org.jeecg.modules.system.entity.GardsStations;
import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service; import org.springframework.stereotype.Service;
import java.io.File;
import java.io.IOException;
import java.io.InputStream;
import java.nio.charset.StandardCharsets;
import java.util.*; import java.util.*;
import java.util.stream.Collectors; import java.util.stream.Collectors;
@ -34,6 +45,8 @@ public class SpectrumAnalysisServiceImpl implements ISpectrumAnalysisService {
private ISysDictService sysDictService; private ISysDictService sysDictService;
@Autowired @Autowired
private GardsDetectorsMapper gardsDetectorsMapper; private GardsDetectorsMapper gardsDetectorsMapper;
@Autowired
private FTPUtil ftpUtil;
@Override @Override
public Result getDBSearchList(String[] menuTypes) { public Result getDBSearchList(String[] menuTypes) {
@ -115,4 +128,95 @@ public class SpectrumAnalysisServiceImpl implements ISpectrumAnalysisService {
result.setResult(sampleDataPage); result.setResult(sampleDataPage);
return result; return result;
} }
@Override
public Result getDBSpectrumPie(String dbName, Integer[] sampleId) {
Result result = new Result();
Map<String, Map<String, Object>> resultMap = new HashMap<>();
List<Integer> sampleIds = Arrays.asList(sampleId);
if (CollectionUtils.isEmpty(sampleIds)){
result.error500("至少需要选择一个数据");
return result;
}
if (dbName.equalsIgnoreCase("auto")){
dbName = "RNAUTO.GARDS_ANALYSES";
}else if (dbName.equalsIgnoreCase("man")){
dbName = "RNMAN.GARDS_ANALYSES";
}else {
result.error500("数据库类型不存在");
return result;
}
//查询数据库文件信息
SpectrumFileRecord dbSpectrumFilePath = spectrumAnalysisMapper.getDBSpectrumFilePath(dbName, sampleIds);
if(StringUtils.isNotBlank(dbSpectrumFilePath.getSampleFilePath())){
String sampleFilePath = dbSpectrumFilePath.getSampleFilePath();
String pathName = sampleFilePath.substring(0, sampleFilePath.lastIndexOf(StringPool.SLASH));
String fileName = sampleFilePath.substring(sampleFilePath.lastIndexOf(StringPool.SLASH)+1);
Map<String, Object> map = this.fenxi(pathName, fileName);
resultMap.put("sample",map);
}
if(StringUtils.isNotBlank(dbSpectrumFilePath.getGasBgFilePath())){
String gasBgFilePath = dbSpectrumFilePath.getGasBgFilePath();
String pathName = gasBgFilePath.substring(0, gasBgFilePath.lastIndexOf(StringPool.SLASH));
String fileName = gasBgFilePath.substring(gasBgFilePath.lastIndexOf(StringPool.SLASH)+1);
Map<String, Object> map = this.fenxi(pathName, fileName);
}
if(StringUtils.isNotBlank(dbSpectrumFilePath.getDetBgFilePath())){
String detBgFilePath = dbSpectrumFilePath.getDetBgFilePath();
String pathName = detBgFilePath.substring(0, detBgFilePath.lastIndexOf(StringPool.SLASH));
String fileName = detBgFilePath.substring(detBgFilePath.lastIndexOf(StringPool.SLASH)+1);
Map<String, Object> map = this.fenxi(pathName, fileName);
}
if(StringUtils.isNotBlank(dbSpectrumFilePath.getQcFilePath())){
String qcFilePath = dbSpectrumFilePath.getQcFilePath();
String pathName = qcFilePath.substring(0, qcFilePath.lastIndexOf(StringPool.SLASH));
String fileName = qcFilePath.substring(qcFilePath.lastIndexOf(StringPool.SLASH)+1);
Map<String, Object> map = this.fenxi(pathName, fileName);
}
result.setSuccess(true);
result.setResult(resultMap);
return result;
}
private Map<String, Object> fenxi(String pathName, String fileName) {
Map<String, Object> map = new HashMap<>();
FTPClient ftpClient = ftpUtil.LoginFTP();
if (Objects.isNull(ftpClient)){
throw new RuntimeException("ftp连接失败");
}
try {
//切换被动模式
ftpClient.enterLocalPassiveMode();
ftpClient.setFileType(FTPClient.BINARY_FILE_TYPE);
// 设置编码当文件中存在中文且上传后文件乱码时可使用此配置项
ftpClient.setControlEncoding("UTF-8");
ftpClient.setFileTransferMode(FTPClient.STREAM_TRANSFER_MODE);
List<String> paths = Arrays.asList(pathName.split(StringPool.SLASH));
for (String path:paths) {
ftpClient.changeWorkingDirectory(path);
}
List<FTPFile> ftpFiles = Arrays.asList(ftpClient.listFiles());
ftpFiles = ftpFiles.stream().filter(item -> item.getName().equals(fileName)).collect(Collectors.toList());
FTPFile ftpFile = ftpFiles.get(0);
if (Objects.nonNull(ftpFile)){
InputStream inputStream = ftpClient.retrieveFileStream(ftpFile.getName());
//声明一个临时文件
File file = File.createTempFile("tmp", null);
//将ftp文件的输入流复制给临时文件
FileUtils.copyInputStreamToFile(inputStream, file);
map = PHDFileUtil.getSourceData(file.getAbsolutePath());
}
} catch (IOException e) {
throw new RuntimeException(e);
} finally {
try {
if (Objects.nonNull(ftpClient)){
ftpClient.disconnect();
}
} catch (IOException e) {
throw new RuntimeException(e);
}
}
return map;
}
} }