FTPUtil新增获取ftp登录用户名,ftp登录密码,ftp字符集方法
PHDFileUtil实现查询按钮灯颜色方法,读取行数据方法,修改获取数据方法 新增UserTaskUtil工具类判断当前用户是否有排班任务权限操作当前台站信息 实现删除折线图缓存数据接口,查看comment数据接口,新增comment接口,查看ARR报告接口,查看RRR报告接口,查看Spectrum数据接口,查看SampleInformation数据接口,查看QC Result数据接口,查看RLR数据接口 新增部分实体类 修改探测器相关文件名称解决重名冲突问题 ReadLineUtil切换文件路径方式修改 新增查询用户排班任务接口
This commit is contained in:
parent
45f4df3ab6
commit
09902eaec3
|
@ -2,7 +2,6 @@ package org.jeecg.common.util;
|
||||||
|
|
||||||
import com.baomidou.mybatisplus.core.toolkit.CollectionUtils;
|
import com.baomidou.mybatisplus.core.toolkit.CollectionUtils;
|
||||||
import lombok.extern.slf4j.Slf4j;
|
import lombok.extern.slf4j.Slf4j;
|
||||||
import net.sf.saxon.trans.SymbolicName;
|
|
||||||
import org.apache.commons.lang3.StringUtils;
|
import org.apache.commons.lang3.StringUtils;
|
||||||
import org.apache.commons.net.ftp.FTP;
|
import org.apache.commons.net.ftp.FTP;
|
||||||
import org.apache.commons.net.ftp.FTPClient;
|
import org.apache.commons.net.ftp.FTPClient;
|
||||||
|
@ -39,6 +38,18 @@ public class FTPUtil {
|
||||||
@Value("${ftp.encoding}")
|
@Value("${ftp.encoding}")
|
||||||
private String encoding;
|
private String encoding;
|
||||||
|
|
||||||
|
public String getUserName(){
|
||||||
|
return this.userName;
|
||||||
|
}
|
||||||
|
|
||||||
|
public String getPassword(){
|
||||||
|
return this.password;
|
||||||
|
}
|
||||||
|
|
||||||
|
public String getEncoding(){
|
||||||
|
return this.encoding;
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* 登录ftp
|
* 登录ftp
|
||||||
* @return
|
* @return
|
||||||
|
|
|
@ -3,26 +3,32 @@ package org.jeecg.common.util;
|
||||||
import com.baomidou.mybatisplus.core.toolkit.CollectionUtils;
|
import com.baomidou.mybatisplus.core.toolkit.CollectionUtils;
|
||||||
import com.baomidou.mybatisplus.core.toolkit.StringPool;
|
import com.baomidou.mybatisplus.core.toolkit.StringPool;
|
||||||
import com.baomidou.mybatisplus.core.toolkit.StringUtils;
|
import com.baomidou.mybatisplus.core.toolkit.StringUtils;
|
||||||
import org.jeecg.common.api.vo.Result;
|
import org.apache.commons.io.FileUtils;
|
||||||
import org.jeecg.modules.entity.vo.HistogramData;
|
import org.apache.commons.net.ftp.FTPClient;
|
||||||
import org.jeecg.modules.entity.vo.SeriseData;
|
import org.apache.commons.net.ftp.FTPFile;
|
||||||
import org.jeecg.modules.entity.vo.SpectrumData;
|
import org.jeecg.modules.entity.GardsXeResults;
|
||||||
import org.jeecg.modules.entity.vo.XeData;
|
import org.jeecg.modules.entity.vo.*;
|
||||||
import org.jeecg.modules.native_jni.EnergySpectrumHandler;
|
import org.jeecg.modules.native_jni.EnergySpectrumHandler;
|
||||||
|
import org.jeecg.modules.native_jni.struct.BgBoundary;
|
||||||
|
import org.jeecg.modules.native_jni.struct.CalcBgBoundaryParam;
|
||||||
import org.jeecg.modules.native_jni.struct.EnergySpectrumStruct;
|
import org.jeecg.modules.native_jni.struct.EnergySpectrumStruct;
|
||||||
|
import org.springframework.beans.factory.annotation.Autowired;
|
||||||
|
import org.springframework.beans.factory.annotation.Value;
|
||||||
import org.springframework.stereotype.Component;
|
import org.springframework.stereotype.Component;
|
||||||
|
|
||||||
import java.text.DateFormat;
|
import java.io.File;
|
||||||
|
import java.io.IOException;
|
||||||
|
import java.io.InputStream;
|
||||||
import java.text.ParseException;
|
import java.text.ParseException;
|
||||||
import java.text.SimpleDateFormat;
|
|
||||||
import java.time.LocalDateTime;
|
|
||||||
import java.time.format.DateTimeFormatter;
|
|
||||||
import java.util.*;
|
import java.util.*;
|
||||||
|
|
||||||
@Component
|
@Component
|
||||||
public class PHDFileUtil {
|
public class PHDFileUtil {
|
||||||
|
|
||||||
public static Map<String, Object> getSourceData(String filePath){
|
@Autowired
|
||||||
|
private FTPUtil ftpUtil;
|
||||||
|
|
||||||
|
public Map<String, Object> getSourceData(String filePath, String fileName, List<GardsXeResults> xeDataList){
|
||||||
//加载dll工具库
|
//加载dll工具库
|
||||||
System.loadLibrary("ReadPHDFile");
|
System.loadLibrary("ReadPHDFile");
|
||||||
EnergySpectrumStruct struct = EnergySpectrumHandler.getSourceData(filePath);
|
EnergySpectrumStruct struct = EnergySpectrumHandler.getSourceData(filePath);
|
||||||
|
@ -37,13 +43,25 @@ public class PHDFileUtil {
|
||||||
//Data Type
|
//Data Type
|
||||||
String dataType = struct.data_type;
|
String dataType = struct.data_type;
|
||||||
//Collection Start
|
//Collection Start
|
||||||
Date CollectionStart = DateUtils.parseDate(struct.collection_start_date.replace(StringPool.SLASH,StringPool.DASH) + StringPool.SPACE + struct.collection_start_time.substring(0, struct.collection_start_time.indexOf(StringPool.DOT)), "yyyy-MM-dd HH:mm:ss");
|
Date CollectionStart = null;
|
||||||
|
if ( StringUtils.isNotBlank(struct.collection_start_date) && StringUtils.isNotBlank(struct.collection_start_time) ){
|
||||||
|
CollectionStart = DateUtils.parseDate(struct.collection_start_date.replace(StringPool.SLASH,StringPool.DASH) + StringPool.SPACE + struct.collection_start_time.substring(0, struct.collection_start_time.indexOf(StringPool.DOT)), "yyyy-MM-dd HH:mm:ss");
|
||||||
|
}
|
||||||
//Collection Stop
|
//Collection Stop
|
||||||
Date CollectionStop = DateUtils.parseDate(struct.collection_stop_date.replace(StringPool.SLASH,StringPool.DASH) + StringPool.SPACE + struct.collection_stop_time.substring(0, struct.collection_stop_time.indexOf(StringPool.DOT)), "yyyy-MM-dd HH:mm:ss");
|
Date CollectionStop = null;
|
||||||
|
if ( StringUtils.isNotBlank(struct.collection_stop_date) && StringUtils.isNotBlank(struct.collection_stop_time) ){
|
||||||
|
CollectionStop = DateUtils.parseDate(struct.collection_stop_date.replace(StringPool.SLASH,StringPool.DASH) + StringPool.SPACE + struct.collection_stop_time.substring(0, struct.collection_stop_time.indexOf(StringPool.DOT)), "yyyy-MM-dd HH:mm:ss");
|
||||||
|
}
|
||||||
//Collection Time
|
//Collection Time
|
||||||
String CollectionTime = String.format ("%.2f",Double.valueOf(CollectionStop.getTime()/1000 - CollectionStart.getTime()/ 1000));
|
String CollectionTime = "";
|
||||||
|
if ( Objects.nonNull(CollectionStart) && Objects.nonNull(CollectionStop) ){
|
||||||
|
CollectionTime = String.format ("%.2f",Double.valueOf(CollectionStop.getTime()/1000 - CollectionStart.getTime()/ 1000));
|
||||||
|
}
|
||||||
//Acquisition Start
|
//Acquisition Start
|
||||||
Date AcquisitionStart = DateUtils.parseDate(struct.acquisition_start_date.replace(StringPool.SLASH,StringPool.DASH) + StringPool.SPACE + struct.acquisition_start_time.substring(0, struct.acquisition_start_time.indexOf(StringPool.DOT)), "yyyy-MM-dd HH:mm:ss");
|
Date AcquisitionStart = null;
|
||||||
|
if ( StringUtils.isNotBlank(struct.collection_start_date) && StringUtils.isNotBlank(struct.collection_start_time) ){
|
||||||
|
AcquisitionStart = DateUtils.parseDate(struct.acquisition_start_date.replace(StringPool.SLASH,StringPool.DASH) + StringPool.SPACE + struct.acquisition_start_time.substring(0, struct.acquisition_start_time.indexOf(StringPool.DOT)), "yyyy-MM-dd HH:mm:ss");
|
||||||
|
}
|
||||||
//Acq Real Time
|
//Acq Real Time
|
||||||
double AcquisitionRealTime = struct.acquisition_real_time;
|
double AcquisitionRealTime = struct.acquisition_real_time;
|
||||||
//Acq live Time
|
//Acq live Time
|
||||||
|
@ -52,6 +70,14 @@ public class PHDFileUtil {
|
||||||
double airVolume = struct.air_volume;
|
double airVolume = struct.air_volume;
|
||||||
//Xe Volume[m3]
|
//Xe Volume[m3]
|
||||||
double xeVolume = struct.sample_volume_of_Xe;
|
double xeVolume = struct.sample_volume_of_Xe;
|
||||||
|
//xeCollectionYield
|
||||||
|
double xeCollectionYield = struct.Xe_collection_yield;
|
||||||
|
//gasBkMeasurementId
|
||||||
|
String gasBkMeasurementId = struct.gas_bk_measurement_id;
|
||||||
|
//detectorBkMeasurementId
|
||||||
|
String detectorBkMeasurementId = struct.detector_bk_measurement_id;
|
||||||
|
//measurementId
|
||||||
|
String measurementId = struct.measurement_id;
|
||||||
spectrumData.setStationCode(stationCode);
|
spectrumData.setStationCode(stationCode);
|
||||||
spectrumData.setDetectorCode(detectorCode);
|
spectrumData.setDetectorCode(detectorCode);
|
||||||
spectrumData.setDataType(dataType);
|
spectrumData.setDataType(dataType);
|
||||||
|
@ -63,8 +89,11 @@ public class PHDFileUtil {
|
||||||
spectrumData.setAcquisitionLiveTime(AcquisitionLiveTime);
|
spectrumData.setAcquisitionLiveTime(AcquisitionLiveTime);
|
||||||
spectrumData.setAirVolume(airVolume);
|
spectrumData.setAirVolume(airVolume);
|
||||||
spectrumData.setXeVolume(xeVolume);
|
spectrumData.setXeVolume(xeVolume);
|
||||||
|
spectrumData.setYield(xeCollectionYield);
|
||||||
|
spectrumData.setGasBkMeasurementId(gasBkMeasurementId);
|
||||||
|
spectrumData.setDetectorBkMeasurementId(detectorBkMeasurementId);
|
||||||
|
spectrumData.setMeasurementId(measurementId);
|
||||||
map.put("spectrumData", spectrumData);
|
map.put("spectrumData", spectrumData);
|
||||||
|
|
||||||
//统计散点图
|
//统计散点图
|
||||||
//横坐标 beta-gamma
|
//横坐标 beta-gamma
|
||||||
long bChannels = struct.b_channels;
|
long bChannels = struct.b_channels;
|
||||||
|
@ -73,6 +102,7 @@ public class PHDFileUtil {
|
||||||
//值
|
//值
|
||||||
List<Long> hCounts = struct.h_counts;
|
List<Long> hCounts = struct.h_counts;
|
||||||
List<HistogramData> histogramDataList = new LinkedList<>();
|
List<HistogramData> histogramDataList = new LinkedList<>();
|
||||||
|
List<HistogramData> histogramDataDList = new LinkedList<>();
|
||||||
for (int i=0; i<bChannels; i++){
|
for (int i=0; i<bChannels; i++){
|
||||||
//按照大小切割数组
|
//按照大小切割数组
|
||||||
List<Long> list = null;
|
List<Long> list = null;
|
||||||
|
@ -90,13 +120,19 @@ public class PHDFileUtil {
|
||||||
his.setG(j);
|
his.setG(j);
|
||||||
his.setC(count);
|
his.setC(count);
|
||||||
histogramDataList.add(his);
|
histogramDataList.add(his);
|
||||||
|
histogramDataDList.add(his);
|
||||||
|
}else {
|
||||||
|
HistogramData his = new HistogramData();
|
||||||
|
his.setB(i);
|
||||||
|
his.setG(j);
|
||||||
|
his.setC(count);
|
||||||
|
histogramDataDList.add(his);
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
map.put("histogramDataList", histogramDataList);
|
map.put("histogramDataList", histogramDataList);
|
||||||
|
map.put("histogramDataDList", histogramDataDList);
|
||||||
//Gamma Spectrum Original
|
//Gamma Spectrum Original
|
||||||
long numGChannel = struct.num_g_channel;
|
long numGChannel = struct.num_g_channel;
|
||||||
List<Long> gCounts = struct.g_counts;
|
List<Long> gCounts = struct.g_counts;
|
||||||
|
@ -109,7 +145,6 @@ public class PHDFileUtil {
|
||||||
gammaOriginalSeriseData.add(seriseData);
|
gammaOriginalSeriseData.add(seriseData);
|
||||||
}
|
}
|
||||||
map.put("gammaOriginalData", gammaOriginalSeriseData);
|
map.put("gammaOriginalData", gammaOriginalSeriseData);
|
||||||
|
|
||||||
//Gamma Spectrum Projected
|
//Gamma Spectrum Projected
|
||||||
List<Long> gammaProjectedData = new LinkedList<>();
|
List<Long> gammaProjectedData = new LinkedList<>();
|
||||||
for (int i=0; i<gChannels; i++) {
|
for (int i=0; i<gChannels; i++) {
|
||||||
|
@ -127,12 +162,11 @@ public class PHDFileUtil {
|
||||||
gammaProjectedSeriseData.add(seriseData);
|
gammaProjectedSeriseData.add(seriseData);
|
||||||
}
|
}
|
||||||
map.put("gammaProjectedData", gammaProjectedSeriseData);
|
map.put("gammaProjectedData", gammaProjectedSeriseData);
|
||||||
|
|
||||||
//Gamma Energy
|
//Gamma Energy
|
||||||
List<List<Double>> gammaEnergyList = new LinkedList<>();
|
List<List<Double>> gammaEnergyList = new LinkedList<>();
|
||||||
List<Double> gCentroidChannel = struct.g_centroid_channel;
|
List<Double> gCentroidChannel = struct.g_centroid_channel;
|
||||||
List<Double> gEnergy = struct.g_energy;
|
List<Double> gEnergy = struct.g_energy;
|
||||||
List<Double> gammaParam = EnergySpectrumHandler.GetFileFittingPara(gCentroidChannel, gEnergy);
|
List<Double> gammaParam = EnergySpectrumHandler.GetFileFittingPara(gEnergy, gCentroidChannel);
|
||||||
List<Double> gchannels = new ArrayList<>();
|
List<Double> gchannels = new ArrayList<>();
|
||||||
for (int i=0; i<numGChannel; i++){
|
for (int i=0; i<numGChannel; i++){
|
||||||
gchannels.clear();
|
gchannels.clear();
|
||||||
|
@ -141,8 +175,6 @@ public class PHDFileUtil {
|
||||||
gammaEnergyList.add(gammaEnergy);
|
gammaEnergyList.add(gammaEnergy);
|
||||||
}
|
}
|
||||||
map.put("gammaEnergyData", gammaEnergyList);
|
map.put("gammaEnergyData", gammaEnergyList);
|
||||||
|
|
||||||
|
|
||||||
//Beta Spectrum Original
|
//Beta Spectrum Original
|
||||||
long numBChannel = struct.num_b_channel;
|
long numBChannel = struct.num_b_channel;
|
||||||
List<Long> bCounts = struct.b_counts;
|
List<Long> bCounts = struct.b_counts;
|
||||||
|
@ -155,7 +187,6 @@ public class PHDFileUtil {
|
||||||
betaOriginalSeriseData.add(seriseData);
|
betaOriginalSeriseData.add(seriseData);
|
||||||
}
|
}
|
||||||
map.put("betaOriginalData", betaOriginalSeriseData);
|
map.put("betaOriginalData", betaOriginalSeriseData);
|
||||||
|
|
||||||
//Beta Spectrum Projected
|
//Beta Spectrum Projected
|
||||||
List<Long> betaProjectedData = new LinkedList<>();
|
List<Long> betaProjectedData = new LinkedList<>();
|
||||||
for (int j=0; j<bChannels; ++j) {
|
for (int j=0; j<bChannels; ++j) {
|
||||||
|
@ -178,7 +209,7 @@ public class PHDFileUtil {
|
||||||
List<List<Double>> betaEnergyList = new LinkedList<>();
|
List<List<Double>> betaEnergyList = new LinkedList<>();
|
||||||
List<Double> bChannel = struct.b_channel;
|
List<Double> bChannel = struct.b_channel;
|
||||||
List<Double> bElectronEnergy = struct.b_electron_energy;
|
List<Double> bElectronEnergy = struct.b_electron_energy;
|
||||||
List<Double> betaParam = EnergySpectrumHandler.GetFileFittingPara(bChannel, bElectronEnergy);
|
List<Double> betaParam = EnergySpectrumHandler.GetFileFittingPara(bElectronEnergy, bChannel);
|
||||||
List<Double> bchannels = new ArrayList<>();
|
List<Double> bchannels = new ArrayList<>();
|
||||||
for (int i=0; i<numGChannel; i++){
|
for (int i=0; i<numGChannel; i++){
|
||||||
bchannels.clear();
|
bchannels.clear();
|
||||||
|
@ -187,22 +218,35 @@ public class PHDFileUtil {
|
||||||
betaEnergyList.add(betaEnergy);
|
betaEnergyList.add(betaEnergy);
|
||||||
}
|
}
|
||||||
map.put("betaEnergyData", betaEnergyList);
|
map.put("betaEnergyData", betaEnergyList);
|
||||||
|
|
||||||
//Xe
|
//Xe
|
||||||
List<XeData> xeDataList = new LinkedList<>();
|
|
||||||
List<String> bgNuclideName = struct.bg_nuclide_name;
|
|
||||||
List<String> bgRoiNumber = struct.bg_ROI_number;
|
|
||||||
List<Double> bgEfficiency = struct.bg_efficiency;
|
|
||||||
List<Double> bgUncertainty = struct.bg_uncertainty;
|
|
||||||
for (int i=0; i< bgNuclideName.size(); i++){
|
|
||||||
XeData xeData = new XeData();
|
|
||||||
xeData.setIsotope(bgNuclideName.get(i));
|
|
||||||
xeData.setConcentration(bgRoiNumber.get(i));
|
|
||||||
xeData.setUncertainty(bgUncertainty.get(i));
|
|
||||||
xeData.setMDC(bgEfficiency.get(i));
|
|
||||||
xeDataList.add(xeData);
|
|
||||||
}
|
|
||||||
map.put("XeData", xeDataList);
|
map.put("XeData", xeDataList);
|
||||||
|
//计算边界值
|
||||||
|
CalcBgBoundaryParam calcBgBoundaryParam = new CalcBgBoundaryParam();
|
||||||
|
calcBgBoundaryParam.g_e_cal = gammaParam;
|
||||||
|
calcBgBoundaryParam.b_e_cal = betaParam;
|
||||||
|
calcBgBoundaryParam.b_energy = struct.b_electron_energy;
|
||||||
|
calcBgBoundaryParam.b_channel = struct.b_channel;
|
||||||
|
calcBgBoundaryParam.g_channel = struct.g_centroid_channel;
|
||||||
|
calcBgBoundaryParam.g_energy = struct.g_energy;
|
||||||
|
calcBgBoundaryParam.ROI_B_start_x1 = struct.POI_B_x1;
|
||||||
|
calcBgBoundaryParam.ROI_B_stop_x2 = struct.POI_B_x2;
|
||||||
|
calcBgBoundaryParam.ROI_G_start_y1 = struct.POI_G_y1;
|
||||||
|
calcBgBoundaryParam.ROI_G_stop_y2 = struct.POI_G_y2;
|
||||||
|
BgBoundary bgBoundary = EnergySpectrumHandler.CalcBgBoundary(calcBgBoundaryParam);
|
||||||
|
List<Boundary> boundaryList = new LinkedList<>();
|
||||||
|
List<Integer> roiBBoundaryStart = bgBoundary.ROI_B_Boundary_start;
|
||||||
|
List<Integer> roiBBoundaryStop = bgBoundary.ROI_B_Boundary_stop;
|
||||||
|
List<Integer> roiGBoundaryStart = bgBoundary.ROI_G_Boundary_start;
|
||||||
|
List<Integer> roiGBoundaryStop = bgBoundary.ROI_G_Boundary_stop;
|
||||||
|
for (int i=0; i<roiBBoundaryStart.size(); i++){
|
||||||
|
Boundary boundary = new Boundary();
|
||||||
|
boundary.setMinX(roiBBoundaryStart.get(i));
|
||||||
|
boundary.setMaxX(roiBBoundaryStop.get(i));
|
||||||
|
boundary.setMinY(roiGBoundaryStart.get(i));
|
||||||
|
boundary.setMaxY(roiGBoundaryStop.get(i));
|
||||||
|
boundaryList.add(boundary);
|
||||||
|
}
|
||||||
|
map.put("Boundary", boundaryList);
|
||||||
|
|
||||||
} catch (ParseException e) {
|
} catch (ParseException e) {
|
||||||
throw new RuntimeException(e);
|
throw new RuntimeException(e);
|
||||||
|
@ -210,4 +254,191 @@ public class PHDFileUtil {
|
||||||
return map;
|
return map;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public List<String> readLine(String filePath){
|
||||||
|
//连接ftp
|
||||||
|
FTPClient ftpClient = ftpUtil.LoginFTP();
|
||||||
|
//判断ftp是否连接成功
|
||||||
|
if (Objects.isNull(ftpClient)){
|
||||||
|
throw new RuntimeException("ftp连接失败!");
|
||||||
|
}
|
||||||
|
try {
|
||||||
|
ftpClient.enterLocalPassiveMode();
|
||||||
|
String fileName = filePath.substring(filePath.lastIndexOf(StringPool.SLASH) + 1);
|
||||||
|
String parameterFilePath = filePath.substring(0, filePath.lastIndexOf(StringPool.SLASH));
|
||||||
|
//判断文件路径是否为空
|
||||||
|
if (StringUtils.isNotBlank(parameterFilePath)){
|
||||||
|
//在当前工作路径下读取文件
|
||||||
|
ftpClient.changeWorkingDirectory(parameterFilePath);
|
||||||
|
ftpClient.setFileType(FTPClient.BINARY_FILE_TYPE);
|
||||||
|
// 设置编码,当文件中存在中文且上传后文件乱码时可使用此配置项
|
||||||
|
ftpClient.setControlEncoding(ftpUtil.getEncoding());
|
||||||
|
ftpClient.setFileTransferMode(FTPClient.STREAM_TRANSFER_MODE);
|
||||||
|
List<FTPFile> ftpFiles = Arrays.asList(ftpClient.listFiles());
|
||||||
|
if (CollectionUtils.isNotEmpty(ftpFiles)){
|
||||||
|
for (FTPFile ftpFile:ftpFiles) {
|
||||||
|
if (ftpFile.getName().equals(fileName)){
|
||||||
|
//读取ftp文件的输入流
|
||||||
|
InputStream iStream=ftpClient.retrieveFileStream(ftpFile.getName());
|
||||||
|
//声明一个临时文件
|
||||||
|
File file = File.createTempFile("tmp", null);
|
||||||
|
//将ftp文件的输入流复制给临时文件
|
||||||
|
FileUtils.copyInputStreamToFile(iStream, file);
|
||||||
|
List<String> allLines = FileUtils.readLines(file, ftpUtil.getEncoding());
|
||||||
|
if (Objects.nonNull(iStream)){
|
||||||
|
iStream.close();
|
||||||
|
}
|
||||||
|
return allLines;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} catch (IOException e) {
|
||||||
|
throw new RuntimeException(e);
|
||||||
|
} finally {
|
||||||
|
try {
|
||||||
|
ftpClient.disconnect();
|
||||||
|
} catch (IOException e) {
|
||||||
|
throw new RuntimeException(e);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return Collections.emptyList();
|
||||||
|
}
|
||||||
|
|
||||||
|
public void getLightColor(Map<String, Object> sampleMap, Map<String, Object> gasBgMap, Map<String, Object> detBgMap, Map<String, Object> qcMap){
|
||||||
|
SpectrumData spectrumData = (SpectrumData)sampleMap.get("spectrumData");
|
||||||
|
SpectrumData gasBgSpectrumData = (SpectrumData)gasBgMap.get("spectrumData");
|
||||||
|
SpectrumData detBgSpectrumData = (SpectrumData)detBgMap.get("spectrumData");
|
||||||
|
//灯颜色
|
||||||
|
Sections sections = new Sections();
|
||||||
|
List<Double> airVolumeSections = sections.getAirVolumeSections();
|
||||||
|
List<Double> collectionTimeSections = sections.getCollectionTimeSections();
|
||||||
|
List<Double> acquisitionTimeSections = sections.getAcquisitionTimeSections();
|
||||||
|
List<Double> xeVolumeSections = sections.getXeVolumeSections();
|
||||||
|
double airVolume = spectrumData.getAirVolume();
|
||||||
|
if (Objects.nonNull(airVolume)){
|
||||||
|
// air volume check
|
||||||
|
if (airVolumeSections.get(0) < airVolume && airVolume <= airVolumeSections.get(1)) { // red
|
||||||
|
sampleMap.put("SampleVolumeBtn", "RedLight");
|
||||||
|
gasBgMap.put("SampleVolumeBtn", "RedLight");
|
||||||
|
detBgMap.put("SampleVolumeBtn", "RedLight");
|
||||||
|
qcMap.put("SampleVolumeBtn", "RedLight");
|
||||||
|
} else if (airVolumeSections.get(1) < airVolume && airVolume <= airVolumeSections.get(2)) { // yellow
|
||||||
|
sampleMap.put("SampleVolumeBtn", "YellowLight");
|
||||||
|
gasBgMap.put("SampleVolumeBtn", "YellowLight");
|
||||||
|
detBgMap.put("SampleVolumeBtn", "YellowLight");
|
||||||
|
qcMap.put("SampleVolumeBtn", "YellowLight");
|
||||||
|
} else if (airVolumeSections.get(2) < airVolume) { // green
|
||||||
|
sampleMap.put("SampleVolumeBtn", "GreenLight");
|
||||||
|
gasBgMap.put("SampleVolumeBtn", "GreenLight");
|
||||||
|
detBgMap.put("SampleVolumeBtn", "GreenLight");
|
||||||
|
qcMap.put("SampleVolumeBtn", "GreenLight");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
String collectionTime = spectrumData.getCollectionTime();
|
||||||
|
if (StringUtils.isNotBlank(collectionTime)){
|
||||||
|
double collection_time = Double.parseDouble(collectionTime);
|
||||||
|
// collection time check
|
||||||
|
if (collectionTimeSections.get(0) < collection_time && collection_time <= collectionTimeSections.get(1)) { // red
|
||||||
|
sampleMap.put("CollectTimeBtn", "RedLight");
|
||||||
|
gasBgMap.put("CollectTimeBtn", "RedLight");
|
||||||
|
detBgMap.put("CollectTimeBtn", "RedLight");
|
||||||
|
qcMap.put("CollectTimeBtn", "RedLight");
|
||||||
|
} else if (collectionTimeSections.get(1) < collection_time && collection_time <= collectionTimeSections.get(2)) { // yellow
|
||||||
|
sampleMap.put("CollectTimeBtn", "YellowLight");
|
||||||
|
gasBgMap.put("CollectTimeBtn", "YellowLight");
|
||||||
|
detBgMap.put("CollectTimeBtn", "YellowLight");
|
||||||
|
qcMap.put("CollectTimeBtn", "YellowLight");
|
||||||
|
} else if (collectionTimeSections.get(2) < collection_time && collection_time <= collectionTimeSections.get(3)) { // green
|
||||||
|
sampleMap.put("CollectTimeBtn", "GreenLight");
|
||||||
|
gasBgMap.put("CollectTimeBtn", "GreenLight");
|
||||||
|
detBgMap.put("CollectTimeBtn", "GreenLight");
|
||||||
|
qcMap.put("CollectTimeBtn", "GreenLight");
|
||||||
|
} else if (collectionTimeSections.get(3) < collection_time && collection_time <= collectionTimeSections.get(4)) { // yellow
|
||||||
|
sampleMap.put("CollectTimeBtn", "YellowLight");
|
||||||
|
gasBgMap.put("CollectTimeBtn", "YellowLight");
|
||||||
|
detBgMap.put("CollectTimeBtn", "YellowLight");
|
||||||
|
qcMap.put("CollectTimeBtn", "YellowLight");
|
||||||
|
} else if (collectionTimeSections.get(4) < collection_time) { // red
|
||||||
|
sampleMap.put("CollectTimeBtn", "RedLight");
|
||||||
|
gasBgMap.put("CollectTimeBtn", "RedLight");
|
||||||
|
detBgMap.put("CollectTimeBtn", "RedLight");
|
||||||
|
qcMap.put("CollectTimeBtn", "RedLight");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
double acquisitionLiveTime = spectrumData.getAcquisitionLiveTime();
|
||||||
|
if (Objects.nonNull(acquisitionLiveTime)){
|
||||||
|
// acquisition time check
|
||||||
|
if (acquisitionTimeSections.get(0) < acquisitionLiveTime && acquisitionLiveTime <= acquisitionTimeSections.get(1)) { // red
|
||||||
|
sampleMap.put("AcqTimeBtn", "RedLight");
|
||||||
|
gasBgMap.put("AcqTimeBtn", "RedLight");
|
||||||
|
detBgMap.put("AcqTimeBtn", "RedLight");
|
||||||
|
qcMap.put("AcqTimeBtn", "RedLight");
|
||||||
|
} else if (acquisitionTimeSections.get(1) < acquisitionLiveTime && acquisitionLiveTime <= acquisitionTimeSections.get(2)) { // yellow
|
||||||
|
sampleMap.put("AcqTimeBtn", "YellowLight");
|
||||||
|
gasBgMap.put("AcqTimeBtn", "YellowLight");
|
||||||
|
detBgMap.put("AcqTimeBtn", "YellowLight");
|
||||||
|
qcMap.put("AcqTimeBtn", "YellowLight");
|
||||||
|
} else if (acquisitionTimeSections.get(2) < acquisitionLiveTime && acquisitionLiveTime <= acquisitionTimeSections.get(3)) { // green
|
||||||
|
sampleMap.put("AcqTimeBtn", "GreenLight");
|
||||||
|
gasBgMap.put("AcqTimeBtn", "GreenLight");
|
||||||
|
detBgMap.put("AcqTimeBtn", "GreenLight");
|
||||||
|
qcMap.put("AcqTimeBtn", "GreenLight");
|
||||||
|
} else if (acquisitionTimeSections.get(3) < acquisitionLiveTime && acquisitionLiveTime <= acquisitionTimeSections.get(4)) { // yellow
|
||||||
|
sampleMap.put("AcqTimeBtn", "YellowLight");
|
||||||
|
gasBgMap.put("AcqTimeBtn", "YellowLight");
|
||||||
|
detBgMap.put("AcqTimeBtn", "YellowLight");
|
||||||
|
qcMap.put("AcqTimeBtn", "YellowLight");
|
||||||
|
} else if (acquisitionTimeSections.get(4) < acquisitionLiveTime) { // red
|
||||||
|
sampleMap.put("AcqTimeBtn", "RedLight");
|
||||||
|
gasBgMap.put("AcqTimeBtn", "RedLight");
|
||||||
|
detBgMap.put("AcqTimeBtn", "RedLight");
|
||||||
|
qcMap.put("AcqTimeBtn", "RedLight");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
double yield = spectrumData.getYield();
|
||||||
|
if (Objects.nonNull(yield)) {
|
||||||
|
if (xeVolumeSections.get(0) < yield && yield <= xeVolumeSections.get(1)) { // red
|
||||||
|
sampleMap.put("XeVolumeBtn", "RedLight");
|
||||||
|
gasBgMap.put("XeVolumeBtn", "RedLight");
|
||||||
|
detBgMap.put("XeVolumeBtn", "RedLight");
|
||||||
|
qcMap.put("XeVolumeBtn", "RedLight");
|
||||||
|
} else if (xeVolumeSections.get(1) < yield && yield <= xeVolumeSections.get(2)) { // yellow
|
||||||
|
sampleMap.put("XeVolumeBtn", "YellowLight");
|
||||||
|
gasBgMap.put("XeVolumeBtn", "YellowLight");
|
||||||
|
detBgMap.put("XeVolumeBtn", "YellowLight");
|
||||||
|
qcMap.put("XeVolumeBtn", "YellowLight");
|
||||||
|
} else if (xeVolumeSections.get(2) < yield) { // green
|
||||||
|
sampleMap.put("XeVolumeBtn", "GreenLight");
|
||||||
|
gasBgMap.put("XeVolumeBtn", "GreenLight");
|
||||||
|
detBgMap.put("XeVolumeBtn", "GreenLight");
|
||||||
|
qcMap.put("XeVolumeBtn", "GreenLight");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (gasBgSpectrumData.getMeasurementId().equals(spectrumData.getGasBkMeasurementId())){
|
||||||
|
sampleMap.put("GasBgBtn", "GreenLight");
|
||||||
|
gasBgMap.put("GasBgBtn", "GreenLight");
|
||||||
|
detBgMap.put("GasBgBtn", "GreenLight");
|
||||||
|
qcMap.put("GasBgBtn", "GreenLight");
|
||||||
|
}else {
|
||||||
|
sampleMap.put("GasBgBtn", "RedLight");
|
||||||
|
gasBgMap.put("GasBgBtn", "RedLight");
|
||||||
|
detBgMap.put("GasBgBtn", "RedLight");
|
||||||
|
qcMap.put("GasBgBtn", "RedLight");
|
||||||
|
}
|
||||||
|
|
||||||
|
if (detBgSpectrumData.getMeasurementId().equals(spectrumData.getDetectorBkMeasurementId())){
|
||||||
|
sampleMap.put("DetBgBtn", "GreenLight");
|
||||||
|
gasBgMap.put("DetBgBtn", "GreenLight");
|
||||||
|
detBgMap.put("DetBgBtn", "GreenLight");
|
||||||
|
qcMap.put("DetBgBtn", "GreenLight");
|
||||||
|
}else {
|
||||||
|
sampleMap.put("DetBgBtn", "RedLight");
|
||||||
|
gasBgMap.put("DetBgBtn", "RedLight");
|
||||||
|
detBgMap.put("DetBgBtn", "RedLight");
|
||||||
|
qcMap.put("DetBgBtn", "RedLight");
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -0,0 +1,41 @@
|
||||||
|
package org.jeecg.common.util;
|
||||||
|
|
||||||
|
import org.jeecg.modules.service.IUserTaskService;
|
||||||
|
import org.jeecg.modules.system.entity.SysUser;
|
||||||
|
import org.springframework.beans.factory.annotation.Autowired;
|
||||||
|
import org.springframework.stereotype.Component;
|
||||||
|
|
||||||
|
import java.util.Date;
|
||||||
|
import java.util.List;
|
||||||
|
import java.util.Objects;
|
||||||
|
|
||||||
|
@Component
|
||||||
|
public class UserTaskUtil {
|
||||||
|
|
||||||
|
@Autowired
|
||||||
|
private IUserTaskService userTaskService;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* 根据用户名称查询当前用户是否有权限操作当前台站的信息
|
||||||
|
* @return
|
||||||
|
*/
|
||||||
|
public boolean checkUserStation(Integer stationId, String userName){
|
||||||
|
boolean flag = false;
|
||||||
|
//根据用户名称查询用户id
|
||||||
|
SysUser user = userTaskService.findUserByName(userName);
|
||||||
|
if (Objects.isNull(user)){
|
||||||
|
return flag;
|
||||||
|
}
|
||||||
|
String userId = user.getId();
|
||||||
|
//声明一个当前日期
|
||||||
|
Date nowDate = new Date();
|
||||||
|
String dateStr = DateUtils.formatDate(nowDate, "yyyy-MM-dd");
|
||||||
|
List<String> stationIds = userTaskService.findUserTaskStations(userId, dateStr);
|
||||||
|
if (stationIds.contains(stationId)){
|
||||||
|
flag = true;
|
||||||
|
}
|
||||||
|
return flag;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
}
|
|
@ -35,9 +35,6 @@ public class GammaController {
|
||||||
return spectrumAnalysisService.getDBSpectrumChart(dbName, sampleId);
|
return spectrumAnalysisService.getDBSpectrumChart(dbName, sampleId);
|
||||||
}
|
}
|
||||||
|
|
||||||
public static void main(String[] args) {
|
|
||||||
String filePath = "C:\\Users/qiaoqinzheng/Desktop/核素/AUX09_003-20151224_1855_S_FULL_40184.5.PHD";
|
|
||||||
File file = new File(filePath);
|
|
||||||
System.out.println(file.getParent());
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -8,10 +8,10 @@ import org.jeecg.modules.base.entity.GardsSampleData;
|
||||||
import org.jeecg.modules.service.ISpectrumAnalysisService;
|
import org.jeecg.modules.service.ISpectrumAnalysisService;
|
||||||
import org.springframework.beans.factory.annotation.Autowired;
|
import org.springframework.beans.factory.annotation.Autowired;
|
||||||
import org.springframework.format.annotation.DateTimeFormat;
|
import org.springframework.format.annotation.DateTimeFormat;
|
||||||
import org.springframework.web.bind.annotation.GetMapping;
|
import org.springframework.web.bind.annotation.*;
|
||||||
import org.springframework.web.bind.annotation.RequestMapping;
|
|
||||||
import org.springframework.web.bind.annotation.RestController;
|
|
||||||
|
|
||||||
|
import javax.servlet.http.HttpServletRequest;
|
||||||
|
import javax.servlet.http.HttpServletResponse;
|
||||||
import java.util.Date;
|
import java.util.Date;
|
||||||
|
|
||||||
@RestController
|
@RestController
|
||||||
|
@ -41,4 +41,58 @@ public class SpectrumAnalysesController {
|
||||||
return spectrumAnalysisService.getDBSpectrumChart(dbName, sampleId);
|
return spectrumAnalysisService.getDBSpectrumChart(dbName, sampleId);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@DeleteMapping("deleteDBSpectrumChartData")
|
||||||
|
@ApiOperation(value = "删除折线图缓存数据",notes = "删除折线图缓存数据")
|
||||||
|
public Result deleteDBSpectrumChartData(Integer[] sampleIds){
|
||||||
|
return spectrumAnalysisService.deleteDBSpectrumChartData(sampleIds);
|
||||||
|
}
|
||||||
|
|
||||||
|
@GetMapping("viewComment")
|
||||||
|
@ApiOperation(value = "查看comment数据", notes = "查看comment数据")
|
||||||
|
public Result viewComment(Integer sampleId, HttpServletRequest request){
|
||||||
|
return spectrumAnalysisService.viewComment(sampleId, request);
|
||||||
|
}
|
||||||
|
|
||||||
|
@PostMapping("addComment")
|
||||||
|
@ApiOperation(value = "新增comment", notes = "新增comment")
|
||||||
|
public Result addComment(Integer sampleId, HttpServletRequest request, String comment){
|
||||||
|
return spectrumAnalysisService.addComment(sampleId, request, comment);
|
||||||
|
}
|
||||||
|
|
||||||
|
@GetMapping("viewARR")
|
||||||
|
@ApiOperation(value = "查看ARR报告", notes = "查看ARR报告")
|
||||||
|
public void viewARR(Integer sampleId, HttpServletResponse response){
|
||||||
|
spectrumAnalysisService.viewARR(sampleId, response);
|
||||||
|
}
|
||||||
|
|
||||||
|
@GetMapping("viewRRR")
|
||||||
|
@ApiOperation(value = "查看RRR报告", notes = "查看RRR报告")
|
||||||
|
public void viewRRR(Integer sampleId, HttpServletResponse response){
|
||||||
|
spectrumAnalysisService.viewRRR(sampleId, response);
|
||||||
|
}
|
||||||
|
|
||||||
|
@GetMapping("viewSpectrum")
|
||||||
|
@ApiOperation(value = "查看Spectrum数据", notes = "查看Spectrum数据")
|
||||||
|
public Result viewSpectrum(Integer sampleId){
|
||||||
|
return spectrumAnalysisService.viewSpectrum(sampleId);
|
||||||
|
}
|
||||||
|
|
||||||
|
@GetMapping("viewSampleInformation")
|
||||||
|
@ApiOperation(value = "查看SampleInformation数据", notes = "查看SampleInformation数据")
|
||||||
|
public Result viewSampleInformation(Integer sampleId){
|
||||||
|
return spectrumAnalysisService.viewSampleInformation(sampleId);
|
||||||
|
}
|
||||||
|
|
||||||
|
@GetMapping("viewQCResult")
|
||||||
|
@ApiOperation(value = "查看QC Result数据", notes = "查看QC Result数据")
|
||||||
|
public Result viewQCResult(Integer sampleId){
|
||||||
|
return spectrumAnalysisService.viewQCResult(sampleId);
|
||||||
|
}
|
||||||
|
|
||||||
|
@GetMapping("viewRLR")
|
||||||
|
@ApiOperation(value = "查看RLR数据", notes = "查看RLR数据")
|
||||||
|
public Result viewRLR(Integer sampleId){
|
||||||
|
return spectrumAnalysisService.viewRLR(sampleId);
|
||||||
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -10,8 +10,8 @@ import java.io.Serializable;
|
||||||
import java.util.Date;
|
import java.util.Date;
|
||||||
|
|
||||||
@Data
|
@Data
|
||||||
@TableName("GARDS_ANALYSES")
|
@TableName("RNMAN.GARDS_ANALYSES")
|
||||||
public class GardsAnalyses implements Serializable {
|
public class GardsAnalysesMan implements Serializable {
|
||||||
|
|
||||||
@TableField(value = "IDANALYSIS")
|
@TableField(value = "IDANALYSIS")
|
||||||
private Integer IDANALYSIS;
|
private Integer IDANALYSIS;
|
|
@ -0,0 +1,45 @@
|
||||||
|
package org.jeecg.modules.entity;
|
||||||
|
|
||||||
|
|
||||||
|
import com.baomidou.mybatisplus.annotation.TableField;
|
||||||
|
import com.baomidou.mybatisplus.annotation.TableName;
|
||||||
|
import com.fasterxml.jackson.annotation.JsonFormat;
|
||||||
|
import lombok.Data;
|
||||||
|
import org.springframework.format.annotation.DateTimeFormat;
|
||||||
|
|
||||||
|
import java.util.Date;
|
||||||
|
|
||||||
|
@Data
|
||||||
|
@TableName(value = "GARDS_XE_RESULTS")
|
||||||
|
public class GardsXeResults {
|
||||||
|
|
||||||
|
@TableField(value = "SAMPLE_ID")
|
||||||
|
private Integer sampleId;
|
||||||
|
|
||||||
|
@TableField(value = "IDANALYSIS")
|
||||||
|
private Integer idanalysis;
|
||||||
|
|
||||||
|
@TableField(value = "NUCLIDE_NAME")
|
||||||
|
private String nuclideName;
|
||||||
|
|
||||||
|
@TableField(value = "CONC")
|
||||||
|
private Double conc;
|
||||||
|
|
||||||
|
@TableField(value = "CONC_ERR")
|
||||||
|
private Double concErr;
|
||||||
|
|
||||||
|
@TableField(value = "MDC")
|
||||||
|
private Double mdc;
|
||||||
|
|
||||||
|
@TableField(value = "LC")
|
||||||
|
private Double lc;
|
||||||
|
|
||||||
|
@TableField(value = "NID_FLAG")
|
||||||
|
private Integer nidFlag;
|
||||||
|
|
||||||
|
@TableField(value = "MODDATE")
|
||||||
|
@DateTimeFormat(pattern = "yyyy-MM-dd HH:mm:ss")
|
||||||
|
@JsonFormat(pattern = "yyyy-MM-dd HH:mm:ss")
|
||||||
|
private Date moddate;
|
||||||
|
|
||||||
|
}
|
|
@ -0,0 +1,18 @@
|
||||||
|
package org.jeecg.modules.entity.vo;
|
||||||
|
|
||||||
|
import lombok.Data;
|
||||||
|
|
||||||
|
import java.io.Serializable;
|
||||||
|
|
||||||
|
@Data
|
||||||
|
public class Boundary implements Serializable {
|
||||||
|
|
||||||
|
private Integer minX;
|
||||||
|
|
||||||
|
private Integer maxX;
|
||||||
|
|
||||||
|
private Integer minY;
|
||||||
|
|
||||||
|
private Integer maxY;
|
||||||
|
|
||||||
|
}
|
|
@ -0,0 +1,14 @@
|
||||||
|
package org.jeecg.modules.entity.vo;
|
||||||
|
|
||||||
|
import lombok.Data;
|
||||||
|
|
||||||
|
import java.io.Serializable;
|
||||||
|
|
||||||
|
@Data
|
||||||
|
public class CommentData implements Serializable {
|
||||||
|
|
||||||
|
private String analyst;
|
||||||
|
|
||||||
|
private String comment;
|
||||||
|
|
||||||
|
}
|
|
@ -0,0 +1,16 @@
|
||||||
|
package org.jeecg.modules.entity.vo;
|
||||||
|
|
||||||
|
import lombok.Data;
|
||||||
|
|
||||||
|
import java.io.Serializable;
|
||||||
|
|
||||||
|
@Data
|
||||||
|
public class CommentInfo implements Serializable {
|
||||||
|
|
||||||
|
private String spectrumCommentInfo;
|
||||||
|
|
||||||
|
private String spectrumOtherCommentInfo;
|
||||||
|
|
||||||
|
private String spectrumAnalysisCommentInfo;
|
||||||
|
|
||||||
|
}
|
|
@ -0,0 +1,38 @@
|
||||||
|
package org.jeecg.modules.entity.vo;
|
||||||
|
|
||||||
|
import lombok.Data;
|
||||||
|
|
||||||
|
import java.io.Serializable;
|
||||||
|
|
||||||
|
@Data
|
||||||
|
public class QCResult implements Serializable {
|
||||||
|
|
||||||
|
private String collectTimeEvaluationMetrics;
|
||||||
|
|
||||||
|
private String acquisitionTimeEvaluationMetrics;
|
||||||
|
|
||||||
|
private String xenonVolumeEvaluationMetrics;
|
||||||
|
|
||||||
|
private String xe133MDCEvaluationMetrics;
|
||||||
|
|
||||||
|
private String collectTimeValue;
|
||||||
|
|
||||||
|
private String collectTimeStatus;
|
||||||
|
|
||||||
|
private String acquisitionTimeValue;
|
||||||
|
|
||||||
|
private String acquisitionTimeStatus;
|
||||||
|
|
||||||
|
private String xenonVolumeValue;
|
||||||
|
|
||||||
|
private String xenonVolumeStatus;
|
||||||
|
|
||||||
|
private String xe133MDCValue;
|
||||||
|
|
||||||
|
private String xe133MDCStatus;
|
||||||
|
|
||||||
|
private boolean gasBgValueAndStatus;
|
||||||
|
|
||||||
|
private boolean detBgValueAndStatus;
|
||||||
|
|
||||||
|
}
|
|
@ -0,0 +1,58 @@
|
||||||
|
package org.jeecg.modules.entity.vo;
|
||||||
|
|
||||||
|
import lombok.Data;
|
||||||
|
|
||||||
|
import java.io.Serializable;
|
||||||
|
|
||||||
|
@Data
|
||||||
|
public class RlrDataValues implements Serializable {
|
||||||
|
|
||||||
|
private String srid;
|
||||||
|
|
||||||
|
private String colloct_start_date;
|
||||||
|
|
||||||
|
private String colloct_start_time;
|
||||||
|
|
||||||
|
private String colloct_stop_date;
|
||||||
|
|
||||||
|
private String colloct_stop_time;
|
||||||
|
|
||||||
|
private String acq_start_date;
|
||||||
|
|
||||||
|
private String acq_start_time;
|
||||||
|
|
||||||
|
private String acq_live_time;
|
||||||
|
|
||||||
|
private String xe131m_conc;
|
||||||
|
|
||||||
|
private String xe131m_uncert_conc;
|
||||||
|
|
||||||
|
private String xe131m_MDC;
|
||||||
|
|
||||||
|
private String xe131m_LC;
|
||||||
|
|
||||||
|
private String xe133m_conc;
|
||||||
|
|
||||||
|
private String xe133m_uncert_conc;
|
||||||
|
|
||||||
|
private String xe133m_MDC;
|
||||||
|
|
||||||
|
private String xe133m_LC;
|
||||||
|
|
||||||
|
private String xe133_conc;
|
||||||
|
|
||||||
|
private String xe133_uncert_conc;
|
||||||
|
|
||||||
|
private String xe133_MDC;
|
||||||
|
|
||||||
|
private String xe133_LC;
|
||||||
|
|
||||||
|
private String xe135_conc;
|
||||||
|
|
||||||
|
private String xe135_uncert_conc;
|
||||||
|
|
||||||
|
private String xe135_MDC;
|
||||||
|
|
||||||
|
private String xe135_LC;
|
||||||
|
|
||||||
|
}
|
|
@ -0,0 +1,46 @@
|
||||||
|
package org.jeecg.modules.entity.vo;
|
||||||
|
|
||||||
|
import lombok.Data;
|
||||||
|
|
||||||
|
import java.io.Serializable;
|
||||||
|
import java.util.LinkedList;
|
||||||
|
import java.util.List;
|
||||||
|
|
||||||
|
@Data
|
||||||
|
public class Sections implements Serializable {
|
||||||
|
|
||||||
|
private List<Double> collectionTimeSections;
|
||||||
|
|
||||||
|
private List<Double> acquisitionTimeSections;
|
||||||
|
|
||||||
|
private List<Double> xeVolumeSections;
|
||||||
|
|
||||||
|
private List<Double> airVolumeSections;
|
||||||
|
|
||||||
|
public Sections(){
|
||||||
|
collectionTimeSections = new LinkedList<>();
|
||||||
|
collectionTimeSections.add(0.0);
|
||||||
|
collectionTimeSections.add(6.0);
|
||||||
|
collectionTimeSections.add(10.8);
|
||||||
|
collectionTimeSections.add(13.2);
|
||||||
|
collectionTimeSections.add(24.0);
|
||||||
|
|
||||||
|
acquisitionTimeSections = new LinkedList<>();
|
||||||
|
acquisitionTimeSections.add(0.0);
|
||||||
|
acquisitionTimeSections.add(6.0);
|
||||||
|
acquisitionTimeSections.add(10.8);
|
||||||
|
acquisitionTimeSections.add(13.2);
|
||||||
|
acquisitionTimeSections.add(24.0);
|
||||||
|
|
||||||
|
xeVolumeSections = new LinkedList<>();
|
||||||
|
xeVolumeSections.add(0.0);
|
||||||
|
xeVolumeSections.add(0.2);
|
||||||
|
xeVolumeSections.add(0.87);
|
||||||
|
|
||||||
|
airVolumeSections = new LinkedList<>();
|
||||||
|
airVolumeSections.add(0.0);
|
||||||
|
airVolumeSections.add(2.3);
|
||||||
|
airVolumeSections.add(10.0);
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
|
@ -38,4 +38,12 @@ public class SpectrumData implements Serializable {
|
||||||
|
|
||||||
private double xeVolume;
|
private double xeVolume;
|
||||||
|
|
||||||
|
private double yield;
|
||||||
|
|
||||||
|
private String measurementId;
|
||||||
|
|
||||||
|
private String gasBkMeasurementId;
|
||||||
|
|
||||||
|
private String detectorBkMeasurementId;
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -0,0 +1,7 @@
|
||||||
|
package org.jeecg.modules.mapper;
|
||||||
|
|
||||||
|
import com.baomidou.mybatisplus.core.mapper.BaseMapper;
|
||||||
|
import org.jeecg.modules.entity.GardsAnalysesMan;
|
||||||
|
|
||||||
|
public interface GardsAnalysesManMapper extends BaseMapper<GardsAnalysesMan> {
|
||||||
|
}
|
|
@ -3,5 +3,5 @@ package org.jeecg.modules.mapper;
|
||||||
import com.baomidou.mybatisplus.core.mapper.BaseMapper;
|
import com.baomidou.mybatisplus.core.mapper.BaseMapper;
|
||||||
import org.jeecg.modules.system.entity.GardsDetectors;
|
import org.jeecg.modules.system.entity.GardsDetectors;
|
||||||
|
|
||||||
public interface GardsDetectorsMapper extends BaseMapper<GardsDetectors> {
|
public interface GardsDetectorsSpectrumMapper extends BaseMapper<GardsDetectors> {
|
||||||
}
|
}
|
|
@ -2,7 +2,10 @@ package org.jeecg.modules.mapper;
|
||||||
|
|
||||||
import com.baomidou.mybatisplus.core.metadata.IPage;
|
import com.baomidou.mybatisplus.core.metadata.IPage;
|
||||||
import com.baomidou.mybatisplus.extension.plugins.pagination.Page;
|
import com.baomidou.mybatisplus.extension.plugins.pagination.Page;
|
||||||
|
import org.apache.ibatis.annotations.Param;
|
||||||
import org.jeecg.modules.base.entity.GardsSampleData;
|
import org.jeecg.modules.base.entity.GardsSampleData;
|
||||||
|
import org.jeecg.modules.entity.GardsXeResults;
|
||||||
|
import org.jeecg.modules.entity.vo.CommentData;
|
||||||
import org.jeecg.modules.entity.vo.SpectrumFileRecord;
|
import org.jeecg.modules.entity.vo.SpectrumFileRecord;
|
||||||
import org.springframework.format.annotation.DateTimeFormat;
|
import org.springframework.format.annotation.DateTimeFormat;
|
||||||
|
|
||||||
|
@ -15,6 +18,18 @@ public interface SpectrumAnalysisMapper {
|
||||||
|
|
||||||
SpectrumFileRecord getDBSpectrumFilePath(String dbName, Integer sampleId);
|
SpectrumFileRecord getDBSpectrumFilePath(String dbName, Integer sampleId);
|
||||||
|
|
||||||
|
List<GardsXeResults> getXeDataList(Integer sampleId);
|
||||||
|
|
||||||
String getQCFilePath(String siteDetCode, String collectStartStr);
|
String getQCFilePath(String siteDetCode, String collectStartStr);
|
||||||
|
|
||||||
|
CommentData viewComment(@Param(value = "sampleId") Integer sampleId);
|
||||||
|
|
||||||
|
String getSampleFilePath(@Param(value = "sampleId") Integer sampleId);
|
||||||
|
|
||||||
|
Integer findStationIdBySampleId(@Param(value = "sampleId") Integer sampleId);
|
||||||
|
|
||||||
|
String viewARR(@Param(value = "sampleId") Integer sampleId);
|
||||||
|
|
||||||
|
String viewRRR(@Param(value = "sampleId") Integer sampleId);
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,7 +0,0 @@
|
||||||
package org.jeecg.modules.mapper;
|
|
||||||
|
|
||||||
import com.baomidou.mybatisplus.core.mapper.BaseMapper;
|
|
||||||
import org.jeecg.modules.entity.SysDictItem;
|
|
||||||
|
|
||||||
public interface SysDictItemMapper extends BaseMapper<SysDictItem> {
|
|
||||||
}
|
|
|
@ -3,5 +3,5 @@ package org.jeecg.modules.mapper;
|
||||||
import com.baomidou.mybatisplus.core.mapper.BaseMapper;
|
import com.baomidou.mybatisplus.core.mapper.BaseMapper;
|
||||||
import org.jeecg.modules.entity.SysDictItem;
|
import org.jeecg.modules.entity.SysDictItem;
|
||||||
|
|
||||||
public interface SysDictItemMapper extends BaseMapper<SysDictItem> {
|
public interface SysDictItemSpectrumMapper extends BaseMapper<SysDictItem> {
|
||||||
}
|
}
|
|
@ -1,7 +0,0 @@
|
||||||
package org.jeecg.modules.mapper;
|
|
||||||
|
|
||||||
import com.baomidou.mybatisplus.core.mapper.BaseMapper;
|
|
||||||
import org.jeecg.modules.entity.SysDict;
|
|
||||||
|
|
||||||
public interface SysDictMapper extends BaseMapper<SysDict> {
|
|
||||||
}
|
|
|
@ -3,5 +3,5 @@ package org.jeecg.modules.mapper;
|
||||||
import com.baomidou.mybatisplus.core.mapper.BaseMapper;
|
import com.baomidou.mybatisplus.core.mapper.BaseMapper;
|
||||||
import org.jeecg.modules.entity.SysDict;
|
import org.jeecg.modules.entity.SysDict;
|
||||||
|
|
||||||
public interface SysDictMapper extends BaseMapper<SysDict> {
|
public interface SysDictSpectrumMapper extends BaseMapper<SysDict> {
|
||||||
}
|
}
|
|
@ -65,6 +65,10 @@
|
||||||
ORDER BY c.collect_stop DESC
|
ORDER BY c.collect_stop DESC
|
||||||
</select>
|
</select>
|
||||||
|
|
||||||
|
<select id="getXeDataList" resultType="org.jeecg.modules.entity.GardsXeResults">
|
||||||
|
SELECT SAMPLE_ID,NUCLIDE_NAME,CONC,CONC_ERR,MDC,LC FROM RNAUTO.GARDS_XE_RESULTS where SAMPLE_ID = #{sampleId}
|
||||||
|
</select>
|
||||||
|
|
||||||
<select id="getDBSpectrumFilePath" resultType="org.jeecg.modules.entity.vo.SpectrumFileRecord">
|
<select id="getDBSpectrumFilePath" resultType="org.jeecg.modules.entity.vo.SpectrumFileRecord">
|
||||||
SELECT
|
SELECT
|
||||||
org_sample.SAMPLE_ID sampleId,
|
org_sample.SAMPLE_ID sampleId,
|
||||||
|
@ -96,4 +100,24 @@
|
||||||
</where>
|
</where>
|
||||||
</select>
|
</select>
|
||||||
|
|
||||||
|
<select id="viewComment" resultType="org.jeecg.modules.entity.vo.CommentData">
|
||||||
|
SELECT A.ANALYST, A.COMMENTS FROM RNMAN.GARDS_ANALYSES A WHERE A.SAMPLE_ID= #{sampleId}
|
||||||
|
</select>
|
||||||
|
|
||||||
|
<select id="getSampleFilePath" resultType="java.lang.String">
|
||||||
|
SELECT INPUT_FILE_NAME FROM ORIGINAL.GARDS_SAMPLE_DATA where SAMPLE_ID = #{sampleId}
|
||||||
|
</select>
|
||||||
|
|
||||||
|
<select id="findStationIdBySampleId" resultType="java.lang.Integer">
|
||||||
|
SELECT STATION_ID FROM ORIGINAL.GARDS_SAMPLE_DATA where SAMPLE_ID = #{sampleId}
|
||||||
|
</select>
|
||||||
|
|
||||||
|
<select id="viewARR" resultType="java.lang.String">
|
||||||
|
SELECT REPORT_PAHT FROM RNAUTO.GARDS_ANALYSES where SAMPLE_ID = #{sampleId}
|
||||||
|
</select>
|
||||||
|
|
||||||
|
<select id="viewRRR" resultType="java.lang.String">
|
||||||
|
SELECT REPORT_PAHT FROM RNMAN.GARDS_ANALYSES where SAMPLE_ID = #{sampleId}
|
||||||
|
</select>
|
||||||
|
|
||||||
</mapper>
|
</mapper>
|
|
@ -32,7 +32,7 @@ public class EnergySpectrumHandler {
|
||||||
* @param g_energy
|
* @param g_energy
|
||||||
* @return
|
* @return
|
||||||
*/
|
*/
|
||||||
public static native List<Double> GetFileFittingPara(List<Double> centroid_channel, List<Double> g_energy);
|
public static native List<Double> GetFileFittingPara(List<Double> g_energy, List<Double> centroid_channel);
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* 根据channel 获取 Energy
|
* 根据channel 获取 Energy
|
||||||
|
|
|
@ -0,0 +1,89 @@
|
||||||
|
package org.jeecg.modules.native_jni.struct;
|
||||||
|
|
||||||
|
import java.util.List;
|
||||||
|
|
||||||
|
public class AllGenerate {
|
||||||
|
|
||||||
|
//BgGasGenerate BgGas;
|
||||||
|
private List<Integer> g_ROI_B_Boundary_start;
|
||||||
|
private List<Integer> g_ROI_B_Boundary_stop;
|
||||||
|
private List<Integer> g_ROI_G_Boundary_start;
|
||||||
|
private List<Integer> g_ROI_G_Boundary_stop;
|
||||||
|
private List<Double> g_roi_cts; //气体本底谱感兴趣区计数
|
||||||
|
private List<Double> g_deduct_d_cts; //气体本底谱扣除探测器本底谱数据
|
||||||
|
private int g_b_fitting_type;
|
||||||
|
private int g_g_fitting_type;
|
||||||
|
private List<Double> g_b_fitting_e_c;
|
||||||
|
private List<Double> g_g_fitting_e_c;
|
||||||
|
private List<Double> g_b_fitting_c_e;
|
||||||
|
private List<Double> g_g_fitting_c_e;
|
||||||
|
|
||||||
|
//BgSampleGenerate BgSample;
|
||||||
|
// private BgBoundary s_boungdary;
|
||||||
|
private List<Integer> s_ROI_B_Boundary_start;
|
||||||
|
private List<Integer> s_ROI_B_Boundary_stop;
|
||||||
|
private List<Integer> s_ROI_G_Boundary_start;
|
||||||
|
private List<Integer> s_ROI_G_Boundary_stop;
|
||||||
|
private List<Double> s_roi_cts; //样品普感兴趣区计数
|
||||||
|
private List<Double> s_deduct_d_cts; //样品谱扣除探测器本底谱数据
|
||||||
|
private int s_b_fitting_type;
|
||||||
|
private int s_g_fitting_type;
|
||||||
|
private List<Double> s_b_fitting_e_c;
|
||||||
|
private List<Double> s_g_fitting_e_c;
|
||||||
|
private List<Double> s_b_fitting_c_e;
|
||||||
|
private List<Double> s_g_fitting_c_e;
|
||||||
|
private String s_collection_time; //采集时间
|
||||||
|
|
||||||
|
//BgOtherGenerate BgOther;
|
||||||
|
private List<Double> ROI_net_coutns; //感兴趣区净计数
|
||||||
|
private List<Double> ROI_net_err;
|
||||||
|
private List<Double> ROI_con_uncer; //感兴趣区浓度和不确定度 [n..0]浓度 [n..1]不确定度
|
||||||
|
private List<Double> ROI_con_counts_factor; //感兴趣区浓度计数系数 [n..0]系数
|
||||||
|
//enum XeType{both,_131m,_133m,none};
|
||||||
|
private int XeType;
|
||||||
|
private double LC_Xe135; //LC XE135
|
||||||
|
private double LC_Xe131m; //LC XE131m
|
||||||
|
private double LC_Xe133m; //LC XE133m
|
||||||
|
private double LC_Xe133; //LC XE133
|
||||||
|
private List<Double> LC;
|
||||||
|
private List<Double> LC_CTS;
|
||||||
|
private double MDC_Xe135; //MDC XE135
|
||||||
|
private double MDC_Xe131m; //MDC XE131m
|
||||||
|
private double MDC_Xe133m; //MDC XE133m
|
||||||
|
private double MDC_Xe133; //MDC XE133
|
||||||
|
private List<Double> MDC;
|
||||||
|
private List<Double> MDC_CTS;
|
||||||
|
private double Xe135_con; //135不浓度
|
||||||
|
private double Xe135_uncer; //135不确定度
|
||||||
|
private double Xe131m_con;
|
||||||
|
private double Xe131m_uncer;
|
||||||
|
private double Xe133m_con;
|
||||||
|
private double Xe133m_uncer;
|
||||||
|
private double Xe133_con;
|
||||||
|
private double Xe133_uncer;
|
||||||
|
private List<Integer> ROI_B_Boundary_start;
|
||||||
|
private List<Integer> ROI_B_Boundary_stop;
|
||||||
|
private List<Integer> ROI_G_Boundary_start;
|
||||||
|
private List<Integer> ROI_G_Boundary_stop;
|
||||||
|
private List<Double> d_roi_cts; //探测器本底谱感兴趣区计数
|
||||||
|
// 拟合后值
|
||||||
|
private int b_fitting_type;
|
||||||
|
private int g_fitting_type;
|
||||||
|
private List<Double> b_fitting_e_c;
|
||||||
|
private List<Double> g_fitting_e_c;
|
||||||
|
private List<Double> b_fitting_c_e;
|
||||||
|
private List<Double> g_fitting_c_e;
|
||||||
|
//BgDetbgrGenerate BgDetbgr;
|
||||||
|
private List<Integer> d_ROI_B_Boundary_start;
|
||||||
|
private List<Integer> d_ROI_B_Boundary_stop;
|
||||||
|
private List<Integer> d_ROI_G_Boundary_start;
|
||||||
|
private List<Integer> d_ROI_G_Boundary_stop;
|
||||||
|
private List<Double> d_d_roi_cts; //探测器本底谱感兴趣区计数
|
||||||
|
private int d_b_fitting_type;
|
||||||
|
private int d_g_fitting_type;
|
||||||
|
private List<Double> d_b_fitting_e_c;
|
||||||
|
private List<Double> d_g_fitting_e_c;
|
||||||
|
private List<Double> d_b_fitting_c_e;
|
||||||
|
private List<Double> d_g_fitting_c_e;
|
||||||
|
|
||||||
|
}
|
|
@ -4,6 +4,8 @@ import org.jeecg.common.api.QueryRequest;
|
||||||
import org.jeecg.common.api.vo.Result;
|
import org.jeecg.common.api.vo.Result;
|
||||||
import org.jeecg.modules.base.entity.GardsSampleData;
|
import org.jeecg.modules.base.entity.GardsSampleData;
|
||||||
|
|
||||||
|
import javax.servlet.http.HttpServletRequest;
|
||||||
|
import javax.servlet.http.HttpServletResponse;
|
||||||
import java.util.Date;
|
import java.util.Date;
|
||||||
|
|
||||||
public interface ISpectrumAnalysisService {
|
public interface ISpectrumAnalysisService {
|
||||||
|
@ -14,4 +16,22 @@ public interface ISpectrumAnalysisService {
|
||||||
|
|
||||||
Result getDBSpectrumChart(String dbName, Integer sampleId);
|
Result getDBSpectrumChart(String dbName, Integer sampleId);
|
||||||
|
|
||||||
|
Result deleteDBSpectrumChartData(Integer[] sampleIds);
|
||||||
|
|
||||||
|
Result viewComment(Integer sampleId, HttpServletRequest request);
|
||||||
|
|
||||||
|
Result addComment(Integer sampleId, HttpServletRequest request, String comment);
|
||||||
|
|
||||||
|
void viewARR(Integer sampleId, HttpServletResponse response);
|
||||||
|
|
||||||
|
void viewRRR(Integer sampleId, HttpServletResponse response);
|
||||||
|
|
||||||
|
Result viewSpectrum(Integer sampleId);
|
||||||
|
|
||||||
|
Result viewSampleInformation(Integer sampleId);
|
||||||
|
|
||||||
|
Result viewQCResult(Integer sampleId);
|
||||||
|
|
||||||
|
Result viewRLR(Integer sampleId);
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -5,7 +5,7 @@ import org.jeecg.modules.entity.SysDict;
|
||||||
|
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
|
|
||||||
public interface ISysDictService extends IService<SysDict> {
|
public interface ISysDictSpectrumService extends IService<SysDict> {
|
||||||
|
|
||||||
List<String> findStationType(List<String> menuTypeList);
|
List<String> findStationType(List<String> menuTypeList);
|
||||||
|
|
|
@ -0,0 +1,21 @@
|
||||||
|
package org.jeecg.modules.service;
|
||||||
|
|
||||||
|
import org.jeecg.modules.system.entity.SysUser;
|
||||||
|
import org.springframework.cloud.openfeign.FeignClient;
|
||||||
|
import org.springframework.stereotype.Component;
|
||||||
|
import org.springframework.web.bind.annotation.RequestMapping;
|
||||||
|
import org.springframework.web.bind.annotation.RequestParam;
|
||||||
|
|
||||||
|
import java.util.List;
|
||||||
|
|
||||||
|
@Component
|
||||||
|
@FeignClient("jeecg-system")
|
||||||
|
public interface IUserTaskService {
|
||||||
|
|
||||||
|
@RequestMapping("/sys/user/findUserByName")
|
||||||
|
SysUser findUserByName(@RequestParam String userName);
|
||||||
|
|
||||||
|
@RequestMapping("/sysTask/findUserTaskStations")
|
||||||
|
List<String> findUserTaskStations(@RequestParam String userId, @RequestParam String nowDate);
|
||||||
|
|
||||||
|
}
|
|
@ -1,5 +1,6 @@
|
||||||
package org.jeecg.modules.service.impl;
|
package org.jeecg.modules.service.impl;
|
||||||
|
|
||||||
|
import cn.hutool.core.util.ObjectUtil;
|
||||||
import com.baomidou.dynamic.datasource.annotation.DS;
|
import com.baomidou.dynamic.datasource.annotation.DS;
|
||||||
import com.baomidou.mybatisplus.core.conditions.query.LambdaQueryWrapper;
|
import com.baomidou.mybatisplus.core.conditions.query.LambdaQueryWrapper;
|
||||||
import com.baomidou.mybatisplus.core.toolkit.CollectionUtils;
|
import com.baomidou.mybatisplus.core.toolkit.CollectionUtils;
|
||||||
|
@ -11,25 +12,32 @@ import org.apache.commons.net.ftp.FTPClient;
|
||||||
import org.apache.commons.net.ftp.FTPFile;
|
import org.apache.commons.net.ftp.FTPFile;
|
||||||
import org.jeecg.common.api.QueryRequest;
|
import org.jeecg.common.api.QueryRequest;
|
||||||
import org.jeecg.common.api.vo.Result;
|
import org.jeecg.common.api.vo.Result;
|
||||||
import org.jeecg.common.util.DateUtils;
|
import org.jeecg.common.properties.SpectrumPathProperties;
|
||||||
import org.jeecg.common.util.FTPUtil;
|
import org.jeecg.common.system.util.JwtUtil;
|
||||||
import org.jeecg.common.util.PHDFileUtil;
|
import org.jeecg.common.util.*;
|
||||||
import org.jeecg.common.util.RedisUtil;
|
|
||||||
import org.jeecg.modules.base.entity.GardsSampleData;
|
import org.jeecg.modules.base.entity.GardsSampleData;
|
||||||
import org.jeecg.modules.entity.vo.SpectrumFileRecord;
|
import org.jeecg.modules.entity.GardsAnalysesMan;
|
||||||
import org.jeecg.modules.mapper.GardsDetectorsMapper;
|
import org.jeecg.modules.entity.GardsXeResults;
|
||||||
|
import org.jeecg.modules.entity.vo.*;
|
||||||
|
import org.jeecg.modules.mapper.GardsAnalysesManMapper;
|
||||||
|
import org.jeecg.modules.mapper.GardsDetectorsSpectrumMapper;
|
||||||
import org.jeecg.modules.mapper.SpectrumAnalysisMapper;
|
import org.jeecg.modules.mapper.SpectrumAnalysisMapper;
|
||||||
|
import org.jeecg.modules.native_jni.EnergySpectrumHandler;
|
||||||
|
import org.jeecg.modules.native_jni.struct.EnergySpectrumStruct;
|
||||||
import org.jeecg.modules.service.ISpectrumAnalysisService;
|
import org.jeecg.modules.service.ISpectrumAnalysisService;
|
||||||
import org.jeecg.modules.service.ISysDictService;
|
import org.jeecg.modules.service.ISysDictSpectrumService;
|
||||||
import org.jeecg.modules.system.entity.GardsDetectors;
|
import org.jeecg.modules.system.entity.GardsDetectors;
|
||||||
import org.jeecg.modules.system.entity.GardsStations;
|
import org.jeecg.modules.system.entity.GardsStations;
|
||||||
import org.springframework.beans.factory.annotation.Autowired;
|
import org.springframework.beans.factory.annotation.Autowired;
|
||||||
import org.springframework.stereotype.Service;
|
import org.springframework.stereotype.Service;
|
||||||
|
|
||||||
|
import javax.servlet.ServletOutputStream;
|
||||||
|
import javax.servlet.http.HttpServletRequest;
|
||||||
|
import javax.servlet.http.HttpServletResponse;
|
||||||
import java.io.File;
|
import java.io.File;
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import java.io.InputStream;
|
import java.io.InputStream;
|
||||||
import java.nio.charset.StandardCharsets;
|
import java.text.ParseException;
|
||||||
import java.util.*;
|
import java.util.*;
|
||||||
import java.util.stream.Collectors;
|
import java.util.stream.Collectors;
|
||||||
|
|
||||||
|
@ -42,11 +50,19 @@ public class SpectrumAnalysisServiceImpl implements ISpectrumAnalysisService {
|
||||||
@Autowired
|
@Autowired
|
||||||
private RedisUtil redisUtil;
|
private RedisUtil redisUtil;
|
||||||
@Autowired
|
@Autowired
|
||||||
private ISysDictService sysDictService;
|
private ISysDictSpectrumService sysDictService;
|
||||||
@Autowired
|
@Autowired
|
||||||
private GardsDetectorsMapper gardsDetectorsMapper;
|
private GardsDetectorsSpectrumMapper gardsDetectorsSpectrumMapper;
|
||||||
@Autowired
|
@Autowired
|
||||||
private FTPUtil ftpUtil;
|
private FTPUtil ftpUtil;
|
||||||
|
@Autowired
|
||||||
|
private SpectrumPathProperties spectrumPathProperties;
|
||||||
|
@Autowired
|
||||||
|
private UserTaskUtil userTaskUtil;
|
||||||
|
@Autowired
|
||||||
|
private GardsAnalysesManMapper gardsAnalysesManMapper;
|
||||||
|
@Autowired
|
||||||
|
private PHDFileUtil phdFileUtil;
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public Result getDBSearchList(String[] menuTypes) {
|
public Result getDBSearchList(String[] menuTypes) {
|
||||||
|
@ -76,7 +92,7 @@ public class SpectrumAnalysisServiceImpl implements ISpectrumAnalysisService {
|
||||||
LambdaQueryWrapper<GardsDetectors> queryWrapper = new LambdaQueryWrapper<>();
|
LambdaQueryWrapper<GardsDetectors> queryWrapper = new LambdaQueryWrapper<>();
|
||||||
queryWrapper.in(GardsDetectors::getStationId, stationIds);
|
queryWrapper.in(GardsDetectors::getStationId, stationIds);
|
||||||
queryWrapper.orderByAsc(GardsDetectors::getDetectorId);
|
queryWrapper.orderByAsc(GardsDetectors::getDetectorId);
|
||||||
List<GardsDetectors> gardsDetectors = gardsDetectorsMapper.selectList(queryWrapper);
|
List<GardsDetectors> gardsDetectors = gardsDetectorsSpectrumMapper.selectList(queryWrapper);
|
||||||
List<String> detectorCodes = gardsDetectors.stream().map(GardsDetectors::getDetectorCode).collect(Collectors.toList());
|
List<String> detectorCodes = gardsDetectors.stream().map(GardsDetectors::getDetectorCode).collect(Collectors.toList());
|
||||||
map.put("stationCode", stationCodes);
|
map.put("stationCode", stationCodes);
|
||||||
map.put("detectorCode", detectorCodes);
|
map.put("detectorCode", detectorCodes);
|
||||||
|
@ -137,12 +153,6 @@ public class SpectrumAnalysisServiceImpl implements ISpectrumAnalysisService {
|
||||||
public Result getDBSpectrumChart(String dbName, Integer sampleId) {
|
public Result getDBSpectrumChart(String dbName, Integer sampleId) {
|
||||||
Result result = new Result();
|
Result result = new Result();
|
||||||
Map<String, Map<String, Object>> resultMap = new HashMap<>();
|
Map<String, Map<String, Object>> resultMap = new HashMap<>();
|
||||||
if (redisUtil.hasKey("Spectrum_"+sampleId)){
|
|
||||||
resultMap = (Map<String, Map<String, Object>>) redisUtil.get("Spectrum_" + sampleId);
|
|
||||||
result.setSuccess(true);
|
|
||||||
result.setResult(resultMap);
|
|
||||||
return result;
|
|
||||||
}
|
|
||||||
if (Objects.isNull(sampleId)){
|
if (Objects.isNull(sampleId)){
|
||||||
result.error500("请选择一条数据");
|
result.error500("请选择一条数据");
|
||||||
return result;
|
return result;
|
||||||
|
@ -157,46 +167,51 @@ public class SpectrumAnalysisServiceImpl implements ISpectrumAnalysisService {
|
||||||
}
|
}
|
||||||
//查询数据库文件信息
|
//查询数据库文件信息
|
||||||
SpectrumFileRecord dbSpectrumFilePath = spectrumAnalysisMapper.getDBSpectrumFilePath(dbName, sampleId);
|
SpectrumFileRecord dbSpectrumFilePath = spectrumAnalysisMapper.getDBSpectrumFilePath(dbName, sampleId);
|
||||||
|
//查询数据库中结果的Xe数据
|
||||||
|
List<GardsXeResults> xeDataList = spectrumAnalysisMapper.getXeDataList(sampleId);
|
||||||
if (Objects.nonNull(dbSpectrumFilePath)) {
|
if (Objects.nonNull(dbSpectrumFilePath)) {
|
||||||
|
Map<String, Object> sampleMap = new HashMap<>();
|
||||||
|
Map<String, Object> gasBgMap = new HashMap<>();
|
||||||
|
Map<String, Object> detBgMap = new HashMap<>();
|
||||||
|
Map<String, Object> qcMap = new HashMap<>();
|
||||||
if(StringUtils.isNotBlank(dbSpectrumFilePath.getSampleFilePath())){
|
if(StringUtils.isNotBlank(dbSpectrumFilePath.getSampleFilePath())){
|
||||||
String sampleFilePath = dbSpectrumFilePath.getSampleFilePath();
|
String sampleFilePath = dbSpectrumFilePath.getSampleFilePath();
|
||||||
String pathName = sampleFilePath.substring(0, sampleFilePath.lastIndexOf(StringPool.SLASH));
|
String pathName = sampleFilePath.substring(0, sampleFilePath.lastIndexOf(StringPool.SLASH));
|
||||||
String fileName = sampleFilePath.substring(sampleFilePath.lastIndexOf(StringPool.SLASH)+1);
|
String fileName = sampleFilePath.substring(sampleFilePath.lastIndexOf(StringPool.SLASH)+1);
|
||||||
Map<String, Object> map = this.fenxi(pathName, fileName);
|
sampleMap = this.fenxi(pathName, fileName, xeDataList);
|
||||||
resultMap.put("sample",map);
|
resultMap.put("sample",sampleMap);
|
||||||
}
|
}
|
||||||
if(StringUtils.isNotBlank(dbSpectrumFilePath.getGasBgFilePath())){
|
if(StringUtils.isNotBlank(dbSpectrumFilePath.getGasBgFilePath())){
|
||||||
String gasBgFilePath = dbSpectrumFilePath.getGasBgFilePath();
|
String gasBgFilePath = dbSpectrumFilePath.getGasBgFilePath();
|
||||||
String pathName = gasBgFilePath.substring(0, gasBgFilePath.lastIndexOf(StringPool.SLASH));
|
String pathName = gasBgFilePath.substring(0, gasBgFilePath.lastIndexOf(StringPool.SLASH));
|
||||||
String fileName = gasBgFilePath.substring(gasBgFilePath.lastIndexOf(StringPool.SLASH)+1);
|
String fileName = gasBgFilePath.substring(gasBgFilePath.lastIndexOf(StringPool.SLASH)+1);
|
||||||
Map<String, Object> map = this.fenxi(pathName, fileName);
|
gasBgMap = this.fenxi(pathName, fileName, xeDataList);
|
||||||
resultMap.put("gasBg",map);
|
resultMap.put("gasBg",gasBgMap);
|
||||||
}
|
}
|
||||||
if(StringUtils.isNotBlank(dbSpectrumFilePath.getDetBgFilePath())){
|
if(StringUtils.isNotBlank(dbSpectrumFilePath.getDetBgFilePath())){
|
||||||
String detBgFilePath = dbSpectrumFilePath.getDetBgFilePath();
|
String detBgFilePath = dbSpectrumFilePath.getDetBgFilePath();
|
||||||
String pathName = detBgFilePath.substring(0, detBgFilePath.lastIndexOf(StringPool.SLASH));
|
String pathName = detBgFilePath.substring(0, detBgFilePath.lastIndexOf(StringPool.SLASH));
|
||||||
String fileName = detBgFilePath.substring(detBgFilePath.lastIndexOf(StringPool.SLASH)+1);
|
String fileName = detBgFilePath.substring(detBgFilePath.lastIndexOf(StringPool.SLASH)+1);
|
||||||
Map<String, Object> map = this.fenxi(pathName, fileName);
|
detBgMap = this.fenxi(pathName, fileName, xeDataList);
|
||||||
resultMap.put("detBg",map);
|
resultMap.put("detBg",detBgMap);
|
||||||
}
|
}
|
||||||
String collectStartStr = DateUtils.formatDate(dbSpectrumFilePath.getCollectStart(), "yyyy-MM-dd HH:mm:ss");
|
String collectStartStr = DateUtils.formatDate(dbSpectrumFilePath.getCollectStart(), "yyyy-MM-dd HH:mm:ss");
|
||||||
String dbQcFilePath = spectrumAnalysisMapper.getQCFilePath(dbSpectrumFilePath.getSiteDetCode(), collectStartStr);
|
String dbQcFilePath = spectrumAnalysisMapper.getQCFilePath(dbSpectrumFilePath.getSiteDetCode(), collectStartStr);
|
||||||
dbSpectrumFilePath.setQcFilePath(dbQcFilePath);
|
dbSpectrumFilePath.setQcFilePath(dbQcFilePath);
|
||||||
if(StringUtils.isNotBlank(dbSpectrumFilePath.getQcFilePath())){
|
if(StringUtils.isNotBlank(dbQcFilePath)){
|
||||||
String qcFilePath = dbSpectrumFilePath.getQcFilePath();
|
String pathName = dbQcFilePath.substring(0, dbQcFilePath.lastIndexOf(StringPool.SLASH));
|
||||||
String pathName = qcFilePath.substring(0, qcFilePath.lastIndexOf(StringPool.SLASH));
|
String fileName = dbQcFilePath.substring(dbQcFilePath.lastIndexOf(StringPool.SLASH)+1);
|
||||||
String fileName = qcFilePath.substring(qcFilePath.lastIndexOf(StringPool.SLASH)+1);
|
qcMap = this.fenxi(pathName, fileName, xeDataList);
|
||||||
Map<String, Object> map = this.fenxi(pathName, fileName);
|
resultMap.put("qc",qcMap);
|
||||||
resultMap.put("qc",map);
|
|
||||||
}
|
}
|
||||||
redisUtil.set("Spectrum_"+sampleId, resultMap);
|
phdFileUtil.getLightColor(sampleMap,gasBgMap,detBgMap,qcMap);
|
||||||
}
|
}
|
||||||
result.setSuccess(true);
|
result.setSuccess(true);
|
||||||
result.setResult(resultMap);
|
result.setResult(resultMap);
|
||||||
return result;
|
return result;
|
||||||
}
|
}
|
||||||
|
|
||||||
private Map<String, Object> fenxi(String pathName, String fileName) {
|
private Map<String, Object> fenxi(String pathName, String fileName, List<GardsXeResults> xeDataList) {
|
||||||
Map<String, Object> map = new HashMap<>();
|
Map<String, Object> map = new HashMap<>();
|
||||||
FTPClient ftpClient = ftpUtil.LoginFTP();
|
FTPClient ftpClient = ftpUtil.LoginFTP();
|
||||||
if (Objects.isNull(ftpClient)){
|
if (Objects.isNull(ftpClient)){
|
||||||
|
@ -209,12 +224,10 @@ public class SpectrumAnalysisServiceImpl implements ISpectrumAnalysisService {
|
||||||
// 设置编码,当文件中存在中文且上传后文件乱码时可使用此配置项
|
// 设置编码,当文件中存在中文且上传后文件乱码时可使用此配置项
|
||||||
ftpClient.setControlEncoding("UTF-8");
|
ftpClient.setControlEncoding("UTF-8");
|
||||||
ftpClient.setFileTransferMode(FTPClient.STREAM_TRANSFER_MODE);
|
ftpClient.setFileTransferMode(FTPClient.STREAM_TRANSFER_MODE);
|
||||||
List<String> paths = Arrays.asList(pathName.split(StringPool.SLASH));
|
pathName=StringPool.SLASH + spectrumPathProperties.getRootPath() + StringPool.SLASH + pathName;
|
||||||
for (String path:paths) {
|
ftpClient.changeWorkingDirectory(pathName);
|
||||||
ftpClient.changeWorkingDirectory(path);
|
|
||||||
}
|
|
||||||
List<FTPFile> ftpFiles = Arrays.asList(ftpClient.listFiles());
|
List<FTPFile> ftpFiles = Arrays.asList(ftpClient.listFiles());
|
||||||
ftpFiles = ftpFiles.stream().filter(item -> item.getName().equals(fileName)).collect(Collectors.toList());
|
ftpFiles=ftpFiles.stream().filter(item -> item.getName().equals(fileName)).collect(Collectors.toList());
|
||||||
FTPFile ftpFile = ftpFiles.get(0);
|
FTPFile ftpFile = ftpFiles.get(0);
|
||||||
if (Objects.nonNull(ftpFile)){
|
if (Objects.nonNull(ftpFile)){
|
||||||
InputStream inputStream = ftpClient.retrieveFileStream(ftpFile.getName());
|
InputStream inputStream = ftpClient.retrieveFileStream(ftpFile.getName());
|
||||||
|
@ -222,7 +235,10 @@ public class SpectrumAnalysisServiceImpl implements ISpectrumAnalysisService {
|
||||||
File file = File.createTempFile("tmp", null);
|
File file = File.createTempFile("tmp", null);
|
||||||
//将ftp文件的输入流复制给临时文件
|
//将ftp文件的输入流复制给临时文件
|
||||||
FileUtils.copyInputStreamToFile(inputStream, file);
|
FileUtils.copyInputStreamToFile(inputStream, file);
|
||||||
map = PHDFileUtil.getSourceData(file.getAbsolutePath());
|
map = phdFileUtil.getSourceData(file.getAbsolutePath(), fileName, xeDataList);
|
||||||
|
if (Objects.nonNull(inputStream)){
|
||||||
|
inputStream.close();
|
||||||
|
}
|
||||||
}
|
}
|
||||||
} catch (IOException e) {
|
} catch (IOException e) {
|
||||||
throw new RuntimeException(e);
|
throw new RuntimeException(e);
|
||||||
|
@ -237,4 +253,658 @@ public class SpectrumAnalysisServiceImpl implements ISpectrumAnalysisService {
|
||||||
}
|
}
|
||||||
return map;
|
return map;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public Result deleteDBSpectrumChartData(Integer[] sampleIds) {
|
||||||
|
Result result = new Result();
|
||||||
|
if (Objects.isNull(sampleIds)){
|
||||||
|
result.error500("删除失败!");
|
||||||
|
return result;
|
||||||
|
}
|
||||||
|
for (Integer sampleId:sampleIds) {
|
||||||
|
redisUtil.del("Spectrum_"+sampleId);
|
||||||
|
}
|
||||||
|
result.success("删除成功");
|
||||||
|
return result;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public Result viewComment(Integer sampleId, HttpServletRequest request) {
|
||||||
|
Result result = new Result();
|
||||||
|
CommentInfo commentInfo = new CommentInfo();
|
||||||
|
String userName = JwtUtil.getUserNameByToken(request);
|
||||||
|
String sampleFilePath = spectrumAnalysisMapper.getSampleFilePath(sampleId);
|
||||||
|
String pathName = sampleFilePath.substring(0, sampleFilePath.lastIndexOf(StringPool.SLASH));
|
||||||
|
String fileName = sampleFilePath.substring(sampleFilePath.lastIndexOf(StringPool.SLASH)+1);
|
||||||
|
FTPClient ftpClient = ftpUtil.LoginFTP();
|
||||||
|
if (Objects.isNull(ftpClient)){
|
||||||
|
throw new RuntimeException("ftp连接失败");
|
||||||
|
}
|
||||||
|
try {
|
||||||
|
//切换被动模式
|
||||||
|
ftpClient.enterLocalPassiveMode();
|
||||||
|
ftpClient.setFileType(FTPClient.BINARY_FILE_TYPE);
|
||||||
|
// 设置编码,当文件中存在中文且上传后文件乱码时可使用此配置项
|
||||||
|
ftpClient.setControlEncoding("UTF-8");
|
||||||
|
ftpClient.setFileTransferMode(FTPClient.STREAM_TRANSFER_MODE);
|
||||||
|
pathName=StringPool.SLASH + spectrumPathProperties.getRootPath() + StringPool.SLASH + pathName;
|
||||||
|
ftpClient.changeWorkingDirectory(pathName);
|
||||||
|
List<FTPFile> ftpFiles = Arrays.asList(ftpClient.listFiles());
|
||||||
|
ftpFiles=ftpFiles.stream().filter(item -> item.getName().equals(fileName)).collect(Collectors.toList());
|
||||||
|
FTPFile ftpFile = ftpFiles.get(0);
|
||||||
|
if (Objects.nonNull(ftpFile)){
|
||||||
|
InputStream inputStream = ftpClient.retrieveFileStream(ftpFile.getName());
|
||||||
|
//声明一个临时文件
|
||||||
|
File file = File.createTempFile("tmp", null);
|
||||||
|
//将ftp文件的输入流复制给临时文件
|
||||||
|
FileUtils.copyInputStreamToFile(inputStream, file);
|
||||||
|
//加载dll工具库
|
||||||
|
System.loadLibrary("ReadPHDFile");
|
||||||
|
EnergySpectrumStruct struct = EnergySpectrumHandler.getSourceData(file.getAbsolutePath());
|
||||||
|
commentInfo.setSpectrumCommentInfo(struct.comment);
|
||||||
|
}
|
||||||
|
} catch (IOException e) {
|
||||||
|
throw new RuntimeException(e);
|
||||||
|
} finally {
|
||||||
|
try {
|
||||||
|
if (Objects.nonNull(ftpClient)){
|
||||||
|
ftpClient.disconnect();
|
||||||
|
}
|
||||||
|
} catch (IOException e) {
|
||||||
|
throw new RuntimeException(e);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
CommentData commentData = spectrumAnalysisMapper.viewComment(sampleId);
|
||||||
|
if (!commentData.getAnalyst().equals(userName)){
|
||||||
|
if (StringUtils.isNotBlank(commentData.getComment())){
|
||||||
|
String line = "------------------------------------------------------------";
|
||||||
|
String comment = line+"\n[ User "+commentData.getAnalyst()+" Comment ] : \n"+commentData.getComment()+"\n"+line+" \n";
|
||||||
|
commentInfo.setSpectrumOtherCommentInfo(comment);
|
||||||
|
}
|
||||||
|
}else {
|
||||||
|
commentInfo.setSpectrumAnalysisCommentInfo(commentData.getComment());
|
||||||
|
}
|
||||||
|
result.setSuccess(true);
|
||||||
|
result.setResult(commentInfo);
|
||||||
|
return result;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public Result addComment(Integer sampleId, HttpServletRequest request, String comment) {
|
||||||
|
Result result = new Result();
|
||||||
|
CommentData commentData = spectrumAnalysisMapper.viewComment(sampleId);
|
||||||
|
String userName = JwtUtil.getUserNameByToken(request);
|
||||||
|
if (StringUtils.isBlank(userName)){
|
||||||
|
result.error500("当前登录用户信息获取失败!");
|
||||||
|
return result;
|
||||||
|
}
|
||||||
|
//查询备注信息不为空
|
||||||
|
if (Objects.nonNull(commentData)){
|
||||||
|
//判断查询出的备注信息的操作用户是否是当前登录用户
|
||||||
|
if( userName.equals(commentData.getAnalyst()) ){
|
||||||
|
//如果是 则可以进行修改备注信息的操作
|
||||||
|
LambdaQueryWrapper<GardsAnalysesMan> queryWrapper = new LambdaQueryWrapper<>();
|
||||||
|
queryWrapper.eq(GardsAnalysesMan::getSampleId, sampleId);
|
||||||
|
GardsAnalysesMan analysesMan = gardsAnalysesManMapper.selectOne(queryWrapper);
|
||||||
|
if (Objects.isNull(analysesMan)){
|
||||||
|
result.error500("人工交互分析结果不存在!");
|
||||||
|
return result;
|
||||||
|
}else {
|
||||||
|
analysesMan.setComments(comment);
|
||||||
|
analysesMan.setModdate(new Date());
|
||||||
|
gardsAnalysesManMapper.updateById(analysesMan);
|
||||||
|
result.success("修改成功");
|
||||||
|
}
|
||||||
|
}else {
|
||||||
|
//如果不是 则提示 不可以进行操作
|
||||||
|
result.error500("当前信息操作人员不是当前登录用户,不允许操作!");
|
||||||
|
return result;
|
||||||
|
}
|
||||||
|
}else {
|
||||||
|
//根据样品id查询对应的台站id
|
||||||
|
Integer stationId = spectrumAnalysisMapper.findStationIdBySampleId(sampleId);
|
||||||
|
if (Objects.isNull(stationId)){
|
||||||
|
result.error500("当前信息对应的台站信息不存在,请核对当前基础数据!");
|
||||||
|
return result;
|
||||||
|
}
|
||||||
|
//检查当前用户是否在当前时间有当前台站的排班任务
|
||||||
|
boolean checkFlag = userTaskUtil.checkUserStation(stationId, userName);
|
||||||
|
//如果有排班任务 则可以新增 否则不可以新增
|
||||||
|
if (checkFlag){
|
||||||
|
GardsAnalysesMan analysesMan = new GardsAnalysesMan();
|
||||||
|
analysesMan.setSampleId(sampleId);
|
||||||
|
analysesMan.setAnalyst(userName);
|
||||||
|
analysesMan.setComments(comment);
|
||||||
|
analysesMan.setModdate(new Date());
|
||||||
|
gardsAnalysesManMapper.insert(analysesMan);
|
||||||
|
result.success("新增成功");
|
||||||
|
}else {
|
||||||
|
result.error500("当前操作人员没有对应台站的排班任务,不允许操作!");
|
||||||
|
return result;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return result;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void viewARR(Integer sampleId, HttpServletResponse response) {
|
||||||
|
//获取自动处理生成的报告地址
|
||||||
|
String reportPath = spectrumAnalysisMapper.viewARR(sampleId);
|
||||||
|
if (StringUtils.isBlank(reportPath)){
|
||||||
|
throw new RuntimeException("自动处理程序生成报告不存在!");
|
||||||
|
}
|
||||||
|
String pathName = reportPath.substring(0, reportPath.lastIndexOf(StringPool.SLASH));
|
||||||
|
String fileName = reportPath.substring(reportPath.lastIndexOf(StringPool.SLASH)+1)+".txt";
|
||||||
|
//连接ftp
|
||||||
|
FTPClient ftpClient = ftpUtil.LoginFTP();
|
||||||
|
if (Objects.isNull(ftpClient)){
|
||||||
|
throw new RuntimeException("ftp连接失败");
|
||||||
|
}
|
||||||
|
try {
|
||||||
|
//切换被动模式
|
||||||
|
ftpClient.enterLocalPassiveMode();
|
||||||
|
ftpClient.setFileType(FTPClient.BINARY_FILE_TYPE);
|
||||||
|
// 设置编码,当文件中存在中文且上传后文件乱码时可使用此配置项
|
||||||
|
ftpClient.setControlEncoding("UTF-8");
|
||||||
|
ftpClient.setFileTransferMode(FTPClient.STREAM_TRANSFER_MODE);
|
||||||
|
pathName=StringPool.SLASH + spectrumPathProperties.getRootPath() + StringPool.SLASH + pathName;
|
||||||
|
ftpClient.changeWorkingDirectory(pathName);
|
||||||
|
List<FTPFile> ftpFiles = Arrays.asList(ftpClient.listFiles());
|
||||||
|
ftpFiles=ftpFiles.stream().filter(item -> item.getName().equals(fileName)).collect(Collectors.toList());
|
||||||
|
FTPFile ftpFile = ftpFiles.get(0);
|
||||||
|
if (Objects.nonNull(ftpFile)){
|
||||||
|
InputStream inputStream = ftpClient.retrieveFileStream(ftpFile.getName());
|
||||||
|
ServletOutputStream outputStream = response.getOutputStream();
|
||||||
|
byte[] buffer = new byte[1024];
|
||||||
|
int bytesRead;
|
||||||
|
// 将文件输出流写入到输出流中
|
||||||
|
while ((bytesRead = inputStream.read(buffer)) != -1) {
|
||||||
|
outputStream.write(buffer, 0, bytesRead);
|
||||||
|
}
|
||||||
|
if (ObjectUtil.isNotNull(inputStream)){
|
||||||
|
inputStream.close();
|
||||||
|
}
|
||||||
|
if (ObjectUtil.isNotNull(outputStream)){
|
||||||
|
outputStream.close();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} catch (IOException e) {
|
||||||
|
throw new RuntimeException(e);
|
||||||
|
} finally {
|
||||||
|
try {
|
||||||
|
if (Objects.nonNull(ftpClient)){
|
||||||
|
ftpClient.disconnect();
|
||||||
|
}
|
||||||
|
} catch (IOException e) {
|
||||||
|
throw new RuntimeException(e);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void viewRRR(Integer sampleId, HttpServletResponse response) {
|
||||||
|
//获取自动处理生成的报告地址
|
||||||
|
String reportPath = spectrumAnalysisMapper.viewRRR(sampleId);
|
||||||
|
if (StringUtils.isBlank(reportPath)){
|
||||||
|
throw new RuntimeException("人工交互分析生成报告不存在!");
|
||||||
|
}
|
||||||
|
String pathName = reportPath.substring(0, reportPath.lastIndexOf(StringPool.SLASH));
|
||||||
|
String fileName = reportPath.substring(reportPath.lastIndexOf(StringPool.SLASH)+1)+".txt";
|
||||||
|
//连接ftp
|
||||||
|
FTPClient ftpClient = ftpUtil.LoginFTP();
|
||||||
|
if (Objects.isNull(ftpClient)){
|
||||||
|
throw new RuntimeException("ftp连接失败");
|
||||||
|
}
|
||||||
|
try {
|
||||||
|
//切换被动模式
|
||||||
|
ftpClient.enterLocalPassiveMode();
|
||||||
|
ftpClient.setFileType(FTPClient.BINARY_FILE_TYPE);
|
||||||
|
// 设置编码,当文件中存在中文且上传后文件乱码时可使用此配置项
|
||||||
|
ftpClient.setControlEncoding("UTF-8");
|
||||||
|
ftpClient.setFileTransferMode(FTPClient.STREAM_TRANSFER_MODE);
|
||||||
|
pathName=StringPool.SLASH + spectrumPathProperties.getRootPath() + StringPool.SLASH + pathName;
|
||||||
|
ftpClient.changeWorkingDirectory(pathName);
|
||||||
|
List<FTPFile> ftpFiles = Arrays.asList(ftpClient.listFiles());
|
||||||
|
ftpFiles=ftpFiles.stream().filter(item -> item.getName().equals(fileName)).collect(Collectors.toList());
|
||||||
|
FTPFile ftpFile = ftpFiles.get(0);
|
||||||
|
if (Objects.nonNull(ftpFile)){
|
||||||
|
InputStream inputStream = ftpClient.retrieveFileStream(ftpFile.getName());
|
||||||
|
ServletOutputStream outputStream = response.getOutputStream();
|
||||||
|
byte[] buffer = new byte[1024];
|
||||||
|
int bytesRead;
|
||||||
|
// 将文件输出流写入到输出流中
|
||||||
|
while ((bytesRead = inputStream.read(buffer)) != -1) {
|
||||||
|
outputStream.write(buffer, 0, bytesRead);
|
||||||
|
}
|
||||||
|
if (ObjectUtil.isNotNull(inputStream)){
|
||||||
|
inputStream.close();
|
||||||
|
}
|
||||||
|
if (ObjectUtil.isNotNull(outputStream)){
|
||||||
|
outputStream.close();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} catch (IOException e) {
|
||||||
|
throw new RuntimeException(e);
|
||||||
|
} finally {
|
||||||
|
try {
|
||||||
|
if (Objects.nonNull(ftpClient)){
|
||||||
|
ftpClient.disconnect();
|
||||||
|
}
|
||||||
|
} catch (IOException e) {
|
||||||
|
throw new RuntimeException(e);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public Result viewSpectrum(Integer sampleId) {
|
||||||
|
Result result = new Result();
|
||||||
|
Map<String, List<String>> map = new HashMap<>();
|
||||||
|
String dbName = "RNAUTO.GARDS_ANALYSES";
|
||||||
|
SpectrumFileRecord dbSpectrumFilePath = spectrumAnalysisMapper.getDBSpectrumFilePath(dbName, sampleId);
|
||||||
|
if (Objects.nonNull(dbSpectrumFilePath)) {
|
||||||
|
if(StringUtils.isNotBlank(dbSpectrumFilePath.getSampleFilePath())){
|
||||||
|
String sampleFilePath = StringPool.SLASH+ spectrumPathProperties.getRootPath() +StringPool.SLASH+dbSpectrumFilePath.getSampleFilePath();
|
||||||
|
List<String> lines = phdFileUtil.readLine(sampleFilePath);
|
||||||
|
map.put("sample", lines);
|
||||||
|
}
|
||||||
|
if(StringUtils.isNotBlank(dbSpectrumFilePath.getGasBgFilePath())){
|
||||||
|
String gasBgFilePath = StringPool.SLASH+ spectrumPathProperties.getRootPath() +StringPool.SLASH+dbSpectrumFilePath.getGasBgFilePath();
|
||||||
|
List<String> lines = phdFileUtil.readLine(gasBgFilePath);
|
||||||
|
map.put("gasBg", lines);
|
||||||
|
}
|
||||||
|
if(StringUtils.isNotBlank(dbSpectrumFilePath.getDetBgFilePath())){
|
||||||
|
String detBgFilePath = StringPool.SLASH+ spectrumPathProperties.getRootPath() +StringPool.SLASH+dbSpectrumFilePath.getDetBgFilePath();
|
||||||
|
List<String> lines = phdFileUtil.readLine(detBgFilePath);
|
||||||
|
map.put("detBg", lines);
|
||||||
|
}
|
||||||
|
String collectStartStr = DateUtils.formatDate(dbSpectrumFilePath.getCollectStart(), "yyyy-MM-dd HH:mm:ss");
|
||||||
|
String dbQcFilePath = StringPool.SLASH+ spectrumPathProperties.getRootPath() +StringPool.SLASH+spectrumAnalysisMapper.getQCFilePath(dbSpectrumFilePath.getSiteDetCode(), collectStartStr);
|
||||||
|
if(StringUtils.isNotBlank(dbQcFilePath)){
|
||||||
|
List<String> lines = phdFileUtil.readLine(dbQcFilePath);
|
||||||
|
map.put("qc", lines);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
result.setSuccess(true);
|
||||||
|
result.setResult(map);
|
||||||
|
return result;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public Result viewSampleInformation(Integer sampleId) {
|
||||||
|
Result result = new Result();
|
||||||
|
String sampleFilePath = spectrumAnalysisMapper.getSampleFilePath(sampleId);
|
||||||
|
if (StringUtils.isNotBlank(sampleFilePath)){
|
||||||
|
String pathName = sampleFilePath.substring(0, sampleFilePath.lastIndexOf(StringPool.SLASH));
|
||||||
|
String fileName = sampleFilePath.substring(sampleFilePath.lastIndexOf(StringPool.SLASH)+1);
|
||||||
|
FTPClient ftpClient = ftpUtil.LoginFTP();
|
||||||
|
if (Objects.isNull(ftpClient)){
|
||||||
|
throw new RuntimeException("ftp连接失败");
|
||||||
|
}
|
||||||
|
try {
|
||||||
|
//切换被动模式
|
||||||
|
ftpClient.enterLocalPassiveMode();
|
||||||
|
ftpClient.setFileType(FTPClient.BINARY_FILE_TYPE);
|
||||||
|
// 设置编码,当文件中存在中文且上传后文件乱码时可使用此配置项
|
||||||
|
ftpClient.setControlEncoding("UTF-8");
|
||||||
|
ftpClient.setFileTransferMode(FTPClient.STREAM_TRANSFER_MODE);
|
||||||
|
pathName=StringPool.SLASH + spectrumPathProperties.getRootPath() + StringPool.SLASH + pathName;
|
||||||
|
ftpClient.changeWorkingDirectory(pathName);
|
||||||
|
List<FTPFile> ftpFiles = Arrays.asList(ftpClient.listFiles());
|
||||||
|
ftpFiles=ftpFiles.stream().filter(item -> item.getName().equals(fileName)).collect(Collectors.toList());
|
||||||
|
FTPFile ftpFile = ftpFiles.get(0);
|
||||||
|
if (Objects.nonNull(ftpFile)){
|
||||||
|
InputStream inputStream = ftpClient.retrieveFileStream(ftpFile.getName());
|
||||||
|
//声明一个临时文件
|
||||||
|
File file = File.createTempFile("tmp", null);
|
||||||
|
//将ftp文件的输入流复制给临时文件
|
||||||
|
FileUtils.copyInputStreamToFile(inputStream, file);
|
||||||
|
//加载dll工具库
|
||||||
|
System.loadLibrary("ReadPHDFile");
|
||||||
|
EnergySpectrumStruct struct = EnergySpectrumHandler.getSourceData(file.getAbsolutePath());
|
||||||
|
String measurementID = struct.measurement_id;
|
||||||
|
String bkgdMeasurementID = struct.detector_bk_measurement_id;
|
||||||
|
String gasBkgdMeasurementID = struct.gas_bk_measurement_id;
|
||||||
|
String sampleRefId = struct.sample_ref_id;
|
||||||
|
String sit_det_code = struct.site_code;
|
||||||
|
String detect_code = struct.detector_code;
|
||||||
|
String sample_id = sampleId.toString();
|
||||||
|
Date collectStartDate = DateUtils.parseDate(struct.collection_start_date.replace(StringPool.SLASH, StringPool.DASH) + StringPool.SPACE + struct.collection_start_time.substring(0, struct.collection_start_time.indexOf(StringPool.DOT)), "yyyy-MM-dd HH:mm:ss");
|
||||||
|
Date collectStopDate = DateUtils.parseDate(struct.collection_stop_date.replace(StringPool.SLASH, StringPool.DASH) + StringPool.SPACE + struct.collection_stop_time.substring(0, struct.collection_stop_time.indexOf(StringPool.DOT)), "yyyy-MM-dd HH:mm:ss");
|
||||||
|
String collect_start = DateUtils.formatDate(collectStartDate, "yyyy-MM-dd HH:mm:ss");
|
||||||
|
String collect_stop = DateUtils.formatDate(collectStopDate, "yyyy-MM-dd HH:mm:ss");
|
||||||
|
String collection_time_value = String.format ("%.2f",Double.valueOf(collectStopDate.getTime()/1000 - collectStartDate.getTime()/ 1000));
|
||||||
|
String s_volume_of_Xe = String.valueOf(struct.air_volume);
|
||||||
|
String s_xe_stable_volume = String.valueOf(struct.sample_volume_of_Xe);
|
||||||
|
Date acquisitionStartDate = DateUtils.parseDate(struct.acquisition_start_date.replace(StringPool.SLASH, StringPool.DASH) + StringPool.SPACE + struct.acquisition_start_time.substring(0, struct.acquisition_start_time.indexOf(StringPool.DOT)), "yyyy-MM-dd HH:mm:ss");
|
||||||
|
String acquisition_start = DateUtils.formatDate(acquisitionStartDate, "yyyy-MM-dd HH:mm:ss");
|
||||||
|
String acquisition_real_sec = String.valueOf(struct.acquisition_real_time);
|
||||||
|
String acquisition_live_sec = String.valueOf(struct.acquisition_live_time);
|
||||||
|
StringBuffer strBuffer = new StringBuffer();
|
||||||
|
strBuffer.append("\n");
|
||||||
|
strBuffer.append("#FILE INFORMATION\n");
|
||||||
|
strBuffer.append(" SampleMeasID: "+measurementID+"\n");
|
||||||
|
strBuffer.append(" GASBKMeasID: "+gasBkgdMeasurementID+"\n");
|
||||||
|
strBuffer.append(" DETBKMeasID: "+bkgdMeasurementID+"\n");
|
||||||
|
strBuffer.append(" SRID: "+sampleRefId+"\n");
|
||||||
|
strBuffer.append("\n");
|
||||||
|
strBuffer.append("#COLLECTION INFORMATION\n");
|
||||||
|
strBuffer.append(" Station CODE: "+sit_det_code+"\n");
|
||||||
|
strBuffer.append(" Detector CODE: "+detect_code+"\n");
|
||||||
|
strBuffer.append(" Sample ID: "+sample_id+"\n");
|
||||||
|
strBuffer.append(" Collection Start: "+collect_start+"\n");
|
||||||
|
strBuffer.append(" Collection Stop: "+collect_stop+"\n");
|
||||||
|
strBuffer.append(" Collection TIME: "+collection_time_value+"\n");
|
||||||
|
strBuffer.append(" Sample Volume[m3]: "+s_volume_of_Xe+"\n");
|
||||||
|
strBuffer.append(" Xe Volume[cm3]: "+s_xe_stable_volume+"\n");
|
||||||
|
strBuffer.append("\n");
|
||||||
|
strBuffer.append("#ACQUISITION INFORMATION\n");
|
||||||
|
strBuffer.append(" Acquisition Start: "+acquisition_start+"\n");
|
||||||
|
strBuffer.append(" Acq Real Time: "+acquisition_real_sec+"\n");
|
||||||
|
strBuffer.append(" Acq Live Time: "+acquisition_live_sec+"\n");
|
||||||
|
strBuffer.append("\n");
|
||||||
|
if (Objects.nonNull(inputStream)){
|
||||||
|
inputStream.close();
|
||||||
|
}
|
||||||
|
result.setSuccess(true);
|
||||||
|
result.setResult(strBuffer);
|
||||||
|
}
|
||||||
|
} catch (IOException e) {
|
||||||
|
throw new RuntimeException(e);
|
||||||
|
} catch (ParseException e) {
|
||||||
|
throw new RuntimeException(e);
|
||||||
|
} finally {
|
||||||
|
try {
|
||||||
|
if (Objects.nonNull(ftpClient)){
|
||||||
|
ftpClient.disconnect();
|
||||||
|
}
|
||||||
|
} catch (IOException e) {
|
||||||
|
throw new RuntimeException(e);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return result;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public Result viewQCResult(Integer sampleId) {
|
||||||
|
Result result = new Result();
|
||||||
|
QCResult qcResult = new QCResult();
|
||||||
|
Sections sections = new Sections();
|
||||||
|
List<Double> collectionTimeSections = sections.getCollectionTimeSections();
|
||||||
|
String collectionMerits = collectionTimeSections.get(1) + "~" + collectionTimeSections.get(4);
|
||||||
|
qcResult.setCollectTimeEvaluationMetrics(collectionMerits);
|
||||||
|
List<Double> acquisitionTimeSections = sections.getAcquisitionTimeSections();
|
||||||
|
String acquisitionMerits = acquisitionTimeSections.get(1) + "~" + acquisitionTimeSections.get(4);
|
||||||
|
qcResult.setAcquisitionTimeEvaluationMetrics(acquisitionMerits);
|
||||||
|
List<Double> xeVolumeSections = sections.getXeVolumeSections();
|
||||||
|
String xeMerits = xeVolumeSections.get(1) + "~ ";
|
||||||
|
qcResult.setXenonVolumeEvaluationMetrics(xeMerits);
|
||||||
|
String xe133MDCEvaluationMetrics = "0.001 ~ 5";
|
||||||
|
qcResult.setXe133MDCEvaluationMetrics(xe133MDCEvaluationMetrics);
|
||||||
|
String dbName = "RNAUTO.GARDS_ANALYSES";
|
||||||
|
SpectrumFileRecord dbSpectrumFilePath = spectrumAnalysisMapper.getDBSpectrumFilePath(dbName, sampleId);
|
||||||
|
//查询数据库中结果的Xe数据
|
||||||
|
List<GardsXeResults> xeDataList = spectrumAnalysisMapper.getXeDataList(sampleId);
|
||||||
|
if (Objects.nonNull(dbSpectrumFilePath)){
|
||||||
|
try {
|
||||||
|
String bkgdMeasurementID = "";
|
||||||
|
String gasBkgdMeasurementID = "";
|
||||||
|
if (StringUtils.isNotBlank(dbSpectrumFilePath.getSampleFilePath())){
|
||||||
|
FTPClient ftpClient = ftpUtil.LoginFTP();
|
||||||
|
if (Objects.isNull(ftpClient)){
|
||||||
|
throw new RuntimeException("ftp连接失败");
|
||||||
|
}
|
||||||
|
//切换被动模式
|
||||||
|
ftpClient.enterLocalPassiveMode();
|
||||||
|
ftpClient.setFileType(FTPClient.BINARY_FILE_TYPE);
|
||||||
|
// 设置编码,当文件中存在中文且上传后文件乱码时可使用此配置项
|
||||||
|
ftpClient.setControlEncoding("UTF-8");
|
||||||
|
ftpClient.setFileTransferMode(FTPClient.STREAM_TRANSFER_MODE);
|
||||||
|
String sampleFilePath = dbSpectrumFilePath.getSampleFilePath();
|
||||||
|
String samplePathName = sampleFilePath.substring(0, sampleFilePath.lastIndexOf(StringPool.SLASH));
|
||||||
|
String sampleFileName = sampleFilePath.substring(sampleFilePath.lastIndexOf(StringPool.SLASH)+1);
|
||||||
|
samplePathName=StringPool.SLASH + spectrumPathProperties.getRootPath() + StringPool.SLASH + samplePathName;
|
||||||
|
ftpClient.changeWorkingDirectory(samplePathName);
|
||||||
|
List<FTPFile> sampleFtpFiles = Arrays.asList(ftpClient.listFiles());
|
||||||
|
sampleFtpFiles=sampleFtpFiles.stream().filter(item -> item.getName().equals(sampleFileName)).collect(Collectors.toList());
|
||||||
|
FTPFile sampleFtpFile = sampleFtpFiles.get(0);
|
||||||
|
if (Objects.nonNull(sampleFtpFile)){
|
||||||
|
InputStream inputStream = ftpClient.retrieveFileStream(sampleFtpFile.getName());
|
||||||
|
//声明一个临时文件
|
||||||
|
File file = File.createTempFile("tmp", null);
|
||||||
|
//将ftp文件的输入流复制给临时文件
|
||||||
|
FileUtils.copyInputStreamToFile(inputStream, file);
|
||||||
|
//加载dll工具库
|
||||||
|
System.loadLibrary("ReadPHDFile");
|
||||||
|
EnergySpectrumStruct struct = EnergySpectrumHandler.getSourceData(file.getAbsolutePath());
|
||||||
|
bkgdMeasurementID = struct.detector_bk_measurement_id;
|
||||||
|
gasBkgdMeasurementID = struct.gas_bk_measurement_id;
|
||||||
|
Date collectStartDate = DateUtils.parseDate(struct.collection_start_date.replace(StringPool.SLASH, StringPool.DASH) + StringPool.SPACE + struct.collection_start_time.substring(0, struct.collection_start_time.indexOf(StringPool.DOT)), "yyyy-MM-dd HH:mm:ss");
|
||||||
|
Date collectStopDate = DateUtils.parseDate(struct.collection_stop_date.replace(StringPool.SLASH, StringPool.DASH) + StringPool.SPACE + struct.collection_stop_time.substring(0, struct.collection_stop_time.indexOf(StringPool.DOT)), "yyyy-MM-dd HH:mm:ss");
|
||||||
|
Double collection_time = Double.valueOf(collectStopDate.getTime() / 1000 - collectStartDate.getTime() / 1000);
|
||||||
|
String collection_time_value = String.format("%.2f",Double.valueOf(collectStopDate.getTime()/1000 - collectStartDate.getTime()/ 1000)/3600.0);
|
||||||
|
qcResult.setCollectTimeValue(collection_time_value);
|
||||||
|
if (collectionTimeSections.get(1) < collection_time && collection_time < collectionTimeSections.get(4)){
|
||||||
|
qcResult.setCollectTimeStatus("Pass");
|
||||||
|
}else {
|
||||||
|
qcResult.setCollectTimeStatus("Failed");
|
||||||
|
}
|
||||||
|
String acquisition_live_sec = String.format("%.2f",struct.acquisition_live_time/3600.0);
|
||||||
|
qcResult.setAcquisitionTimeValue(acquisition_live_sec);
|
||||||
|
if (acquisitionTimeSections.get(1) < struct.acquisition_live_time && struct.acquisition_live_time < acquisitionTimeSections.get(4)){
|
||||||
|
qcResult.setAcquisitionTimeStatus("Pass");
|
||||||
|
}else {
|
||||||
|
qcResult.setAcquisitionTimeStatus("Failed");
|
||||||
|
}
|
||||||
|
String s_xe_stable_volume = String.valueOf(struct.sample_volume_of_Xe);
|
||||||
|
qcResult.setXenonVolumeValue(s_xe_stable_volume);
|
||||||
|
if (xeVolumeSections.get(1) < struct.sample_volume_of_Xe){
|
||||||
|
qcResult.setXenonVolumeStatus("Pass");
|
||||||
|
}else {
|
||||||
|
qcResult.setXenonVolumeStatus("Failed");
|
||||||
|
}
|
||||||
|
if (Objects.nonNull(inputStream)){
|
||||||
|
inputStream.close();
|
||||||
|
}
|
||||||
|
ftpClient.disconnect();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if (StringUtils.isNotBlank(dbSpectrumFilePath.getGasBgFilePath())){
|
||||||
|
FTPClient ftpClient = ftpUtil.LoginFTP();
|
||||||
|
if (Objects.isNull(ftpClient)){
|
||||||
|
throw new RuntimeException("ftp连接失败");
|
||||||
|
}
|
||||||
|
//切换被动模式
|
||||||
|
ftpClient.enterLocalPassiveMode();
|
||||||
|
ftpClient.setFileType(FTPClient.BINARY_FILE_TYPE);
|
||||||
|
// 设置编码,当文件中存在中文且上传后文件乱码时可使用此配置项
|
||||||
|
ftpClient.setControlEncoding("UTF-8");
|
||||||
|
ftpClient.setFileTransferMode(FTPClient.STREAM_TRANSFER_MODE);
|
||||||
|
String gasBgFilePath = dbSpectrumFilePath.getGasBgFilePath();
|
||||||
|
String gasBgPathName = gasBgFilePath.substring(0, gasBgFilePath.lastIndexOf(StringPool.SLASH));
|
||||||
|
String gasBgFileName = gasBgFilePath.substring(gasBgFilePath.lastIndexOf(StringPool.SLASH)+1);
|
||||||
|
gasBgPathName=StringPool.SLASH + spectrumPathProperties.getRootPath() + StringPool.SLASH + gasBgPathName;
|
||||||
|
ftpClient.changeWorkingDirectory(gasBgPathName);
|
||||||
|
List<FTPFile> gasBgFtpFiles = Arrays.asList(ftpClient.listFiles());
|
||||||
|
gasBgFtpFiles=gasBgFtpFiles.stream().filter(item -> item.getName().equals(gasBgFileName)).collect(Collectors.toList());
|
||||||
|
FTPFile gasBgFtpFile = gasBgFtpFiles.get(0);
|
||||||
|
if (Objects.nonNull(gasBgFtpFile)){
|
||||||
|
InputStream inputStream = ftpClient.retrieveFileStream(gasBgFtpFile.getName());
|
||||||
|
//声明一个临时文件
|
||||||
|
File file = File.createTempFile("tmp", null);
|
||||||
|
//将ftp文件的输入流复制给临时文件
|
||||||
|
FileUtils.copyInputStreamToFile(inputStream, file);
|
||||||
|
//加载dll工具库
|
||||||
|
System.loadLibrary("ReadPHDFile");
|
||||||
|
EnergySpectrumStruct struct = EnergySpectrumHandler.getSourceData(file.getAbsolutePath());
|
||||||
|
String measurementID = struct.measurement_id;
|
||||||
|
if (measurementID.equals(gasBkgdMeasurementID)){
|
||||||
|
qcResult.setGasBgValueAndStatus(true);
|
||||||
|
}else {
|
||||||
|
qcResult.setGasBgValueAndStatus(false);
|
||||||
|
}
|
||||||
|
if (Objects.nonNull(inputStream)){
|
||||||
|
inputStream.close();
|
||||||
|
}
|
||||||
|
ftpClient.disconnect();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if (StringUtils.isNotBlank(dbSpectrumFilePath.getDetBgFilePath())){
|
||||||
|
FTPClient ftpClient = ftpUtil.LoginFTP();
|
||||||
|
if (Objects.isNull(ftpClient)){
|
||||||
|
throw new RuntimeException("ftp连接失败");
|
||||||
|
}
|
||||||
|
//切换被动模式
|
||||||
|
ftpClient.enterLocalPassiveMode();
|
||||||
|
ftpClient.setFileType(FTPClient.BINARY_FILE_TYPE);
|
||||||
|
// 设置编码,当文件中存在中文且上传后文件乱码时可使用此配置项
|
||||||
|
ftpClient.setControlEncoding("UTF-8");
|
||||||
|
ftpClient.setFileTransferMode(FTPClient.STREAM_TRANSFER_MODE);
|
||||||
|
String detBgFilePath = dbSpectrumFilePath.getDetBgFilePath();
|
||||||
|
String detBgPathName = detBgFilePath.substring(0, detBgFilePath.lastIndexOf(StringPool.SLASH));
|
||||||
|
String detBgFileName = detBgFilePath.substring(detBgFilePath.lastIndexOf(StringPool.SLASH)+1);
|
||||||
|
detBgPathName=StringPool.SLASH + spectrumPathProperties.getRootPath() + StringPool.SLASH + detBgPathName;
|
||||||
|
ftpClient.changeWorkingDirectory(detBgPathName);
|
||||||
|
List<FTPFile> detBgFtpFiles = Arrays.asList(ftpClient.listFiles());
|
||||||
|
detBgFtpFiles=detBgFtpFiles.stream().filter(item -> item.getName().equals(detBgFileName)).collect(Collectors.toList());
|
||||||
|
FTPFile detBgFtpFile = detBgFtpFiles.get(0);
|
||||||
|
if (Objects.nonNull(detBgFtpFile)){
|
||||||
|
InputStream inputStream = ftpClient.retrieveFileStream(detBgFtpFile.getName());
|
||||||
|
//声明一个临时文件
|
||||||
|
File file = File.createTempFile("tmp", null);
|
||||||
|
//将ftp文件的输入流复制给临时文件
|
||||||
|
FileUtils.copyInputStreamToFile(inputStream, file);
|
||||||
|
//加载dll工具库
|
||||||
|
System.loadLibrary("ReadPHDFile");
|
||||||
|
EnergySpectrumStruct struct = EnergySpectrumHandler.getSourceData(file.getAbsolutePath());
|
||||||
|
String measurementID = struct.measurement_id;
|
||||||
|
if (measurementID.equals(bkgdMeasurementID)){
|
||||||
|
qcResult.setDetBgValueAndStatus(true);
|
||||||
|
}else {
|
||||||
|
qcResult.setDetBgValueAndStatus(false);
|
||||||
|
}
|
||||||
|
if (Objects.nonNull(inputStream)){
|
||||||
|
inputStream.close();
|
||||||
|
}
|
||||||
|
ftpClient.disconnect();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if (CollectionUtils.isNotEmpty(xeDataList)){
|
||||||
|
List<GardsXeResults> xeData = xeDataList.stream().filter(item -> item.getNuclideName().equals("Xe133")).collect(Collectors.toList());
|
||||||
|
GardsXeResults gardsXeResults = xeData.get(0);
|
||||||
|
qcResult.setXe133MDCValue(String.valueOf(gardsXeResults.getMdc()));
|
||||||
|
if ( 0.001 < gardsXeResults.getMdc() && gardsXeResults.getMdc() < 5.0 ){
|
||||||
|
qcResult.setXe133MDCStatus("Pass");
|
||||||
|
}else {
|
||||||
|
qcResult.setXe133MDCStatus("Failed");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} catch (IOException e) {
|
||||||
|
throw new RuntimeException(e);
|
||||||
|
} catch (ParseException e) {
|
||||||
|
throw new RuntimeException(e);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
result.setSuccess(true);
|
||||||
|
result.setResult(qcResult);
|
||||||
|
return result;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public Result viewRLR(Integer sampleId) {
|
||||||
|
Result result = new Result();
|
||||||
|
RlrDataValues rlrDataValues = new RlrDataValues();
|
||||||
|
String sampleFilePath = spectrumAnalysisMapper.getSampleFilePath(sampleId);
|
||||||
|
FTPClient ftpClient = ftpUtil.LoginFTP();
|
||||||
|
if (Objects.isNull(ftpClient)){
|
||||||
|
throw new RuntimeException("ftp连接失败");
|
||||||
|
}
|
||||||
|
//查询数据库中结果的Xe数据
|
||||||
|
List<GardsXeResults> xeDataList = spectrumAnalysisMapper.getXeDataList(sampleId);
|
||||||
|
try {
|
||||||
|
//切换被动模式
|
||||||
|
ftpClient.enterLocalPassiveMode();
|
||||||
|
ftpClient.setFileType(FTPClient.BINARY_FILE_TYPE);
|
||||||
|
// 设置编码,当文件中存在中文且上传后文件乱码时可使用此配置项
|
||||||
|
ftpClient.setControlEncoding("UTF-8");
|
||||||
|
ftpClient.setFileTransferMode(FTPClient.STREAM_TRANSFER_MODE);
|
||||||
|
if (StringUtils.isNotBlank(sampleFilePath)){
|
||||||
|
String samplePathName = sampleFilePath.substring(0, sampleFilePath.lastIndexOf(StringPool.SLASH));
|
||||||
|
String sampleFileName = sampleFilePath.substring(sampleFilePath.lastIndexOf(StringPool.SLASH)+1);
|
||||||
|
samplePathName=StringPool.SLASH + spectrumPathProperties.getRootPath() + StringPool.SLASH + samplePathName;
|
||||||
|
ftpClient.changeWorkingDirectory(samplePathName);
|
||||||
|
List<FTPFile> sampleFtpFiles = Arrays.asList(ftpClient.listFiles());
|
||||||
|
sampleFtpFiles=sampleFtpFiles.stream().filter(item -> item.getName().equals(sampleFileName)).collect(Collectors.toList());
|
||||||
|
FTPFile sampleFtpFile = sampleFtpFiles.get(0);
|
||||||
|
if (Objects.nonNull(sampleFtpFile)){
|
||||||
|
InputStream inputStream = ftpClient.retrieveFileStream(sampleFtpFile.getName());
|
||||||
|
//声明一个临时文件
|
||||||
|
File file = File.createTempFile("tmp", null);
|
||||||
|
//将ftp文件的输入流复制给临时文件
|
||||||
|
FileUtils.copyInputStreamToFile(inputStream, file);
|
||||||
|
//加载dll工具库
|
||||||
|
System.loadLibrary("ReadPHDFile");
|
||||||
|
EnergySpectrumStruct struct = EnergySpectrumHandler.getSourceData(file.getAbsolutePath());
|
||||||
|
|
||||||
|
rlrDataValues.setSrid(struct.sample_ref_id);
|
||||||
|
rlrDataValues.setColloct_start_date(struct.collection_start_date);
|
||||||
|
rlrDataValues.setColloct_start_time(struct.collection_start_time);
|
||||||
|
rlrDataValues.setColloct_stop_date(struct.collection_stop_date);
|
||||||
|
rlrDataValues.setColloct_stop_time(struct.collection_stop_time);
|
||||||
|
|
||||||
|
rlrDataValues.setAcq_start_date(struct.acquisition_start_date);
|
||||||
|
rlrDataValues.setAcq_start_time(struct.acquisition_start_time);
|
||||||
|
rlrDataValues.setAcq_live_time(String.valueOf(struct.acquisition_live_time));
|
||||||
|
|
||||||
|
if (CollectionUtils.isNotEmpty(xeDataList)){
|
||||||
|
List<GardsXeResults> xe131mDataList = xeDataList.stream().filter(item -> item.getNuclideName().equals("Xe131m")).collect(Collectors.toList());
|
||||||
|
GardsXeResults xe131m = xe131mDataList.get(0);
|
||||||
|
rlrDataValues.setXe131m_conc(String.valueOf(xe131m.getConc()));
|
||||||
|
rlrDataValues.setXe131m_uncert_conc(String.valueOf(xe131m.getConcErr()));
|
||||||
|
rlrDataValues.setXe131m_MDC(String.valueOf(xe131m.getMdc()));
|
||||||
|
rlrDataValues.setXe131m_LC(String.valueOf(xe131m.getLc()));
|
||||||
|
|
||||||
|
List<GardsXeResults> xe133DataList = xeDataList.stream().filter(item -> item.getNuclideName().equals("Xe133")).collect(Collectors.toList());
|
||||||
|
GardsXeResults xe133 = xe133DataList.get(0);
|
||||||
|
rlrDataValues.setXe133_conc(String.valueOf(xe133.getConc()));
|
||||||
|
rlrDataValues.setXe133_uncert_conc(String.valueOf(xe133.getConcErr()));
|
||||||
|
rlrDataValues.setXe133_MDC(String.valueOf(xe133.getMdc()));
|
||||||
|
rlrDataValues.setXe133_LC(String.valueOf(xe133.getLc()));
|
||||||
|
|
||||||
|
List<GardsXeResults> xe133mDataList = xeDataList.stream().filter(item -> item.getNuclideName().equals("Xe133m")).collect(Collectors.toList());
|
||||||
|
GardsXeResults xe133m = xe133mDataList.get(0);
|
||||||
|
rlrDataValues.setXe133m_conc(String.valueOf(xe133m.getConc()));
|
||||||
|
rlrDataValues.setXe133m_uncert_conc(String.valueOf(xe133m.getConcErr()));
|
||||||
|
rlrDataValues.setXe133m_MDC(String.valueOf(xe133m.getMdc()));
|
||||||
|
rlrDataValues.setXe133m_LC(String.valueOf(xe133m.getLc()));
|
||||||
|
|
||||||
|
List<GardsXeResults> xe135DataList = xeDataList.stream().filter(item -> item.getNuclideName().equals("Xe135")).collect(Collectors.toList());
|
||||||
|
GardsXeResults xe135 = xe135DataList.get(0);
|
||||||
|
rlrDataValues.setXe135_conc(String.valueOf(xe135.getConc()));
|
||||||
|
rlrDataValues.setXe135_uncert_conc(String.valueOf(xe135.getConcErr()));
|
||||||
|
rlrDataValues.setXe135_MDC(String.valueOf(xe135.getMdc()));
|
||||||
|
rlrDataValues.setXe135_LC(String.valueOf(xe135.getLc()));
|
||||||
|
}
|
||||||
|
if (Objects.nonNull(inputStream)){
|
||||||
|
inputStream.close();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} catch (IOException e) {
|
||||||
|
throw new RuntimeException(e);
|
||||||
|
} finally {
|
||||||
|
try {
|
||||||
|
if (Objects.nonNull(ftpClient)){
|
||||||
|
ftpClient.disconnect();
|
||||||
|
}
|
||||||
|
} catch (IOException e) {
|
||||||
|
throw new RuntimeException(e);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
result.setSuccess(true);
|
||||||
|
result.setResult(rlrDataValues);
|
||||||
|
return result;
|
||||||
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -3,13 +3,12 @@ package org.jeecg.modules.service.impl;
|
||||||
import com.baomidou.dynamic.datasource.annotation.DS;
|
import com.baomidou.dynamic.datasource.annotation.DS;
|
||||||
import com.baomidou.mybatisplus.core.conditions.query.LambdaQueryWrapper;
|
import com.baomidou.mybatisplus.core.conditions.query.LambdaQueryWrapper;
|
||||||
import com.baomidou.mybatisplus.core.toolkit.CollectionUtils;
|
import com.baomidou.mybatisplus.core.toolkit.CollectionUtils;
|
||||||
import com.baomidou.mybatisplus.core.toolkit.StringPool;
|
|
||||||
import com.baomidou.mybatisplus.extension.service.impl.ServiceImpl;
|
import com.baomidou.mybatisplus.extension.service.impl.ServiceImpl;
|
||||||
import org.jeecg.modules.entity.SysDict;
|
import org.jeecg.modules.entity.SysDict;
|
||||||
import org.jeecg.modules.entity.SysDictItem;
|
import org.jeecg.modules.entity.SysDictItem;
|
||||||
import org.jeecg.modules.mapper.SysDictItemMapper;
|
import org.jeecg.modules.mapper.SysDictItemSpectrumMapper;
|
||||||
import org.jeecg.modules.mapper.SysDictMapper;
|
import org.jeecg.modules.mapper.SysDictSpectrumMapper;
|
||||||
import org.jeecg.modules.service.ISysDictService;
|
import org.jeecg.modules.service.ISysDictSpectrumService;
|
||||||
import org.springframework.beans.factory.annotation.Autowired;
|
import org.springframework.beans.factory.annotation.Autowired;
|
||||||
import org.springframework.stereotype.Service;
|
import org.springframework.stereotype.Service;
|
||||||
import org.springframework.transaction.annotation.Propagation;
|
import org.springframework.transaction.annotation.Propagation;
|
||||||
|
@ -18,15 +17,14 @@ import org.springframework.transaction.annotation.Transactional;
|
||||||
import java.util.Collections;
|
import java.util.Collections;
|
||||||
import java.util.LinkedList;
|
import java.util.LinkedList;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
import java.util.Objects;
|
|
||||||
import java.util.stream.Collectors;
|
import java.util.stream.Collectors;
|
||||||
|
|
||||||
@Service("sysDictService")
|
@Service("sysDictSpectrumService")
|
||||||
@DS("master")
|
@DS("master")
|
||||||
public class SysDictServiceImpl extends ServiceImpl<SysDictMapper, SysDict> implements ISysDictService {
|
public class SysDictSpectrumServiceImpl extends ServiceImpl<SysDictSpectrumMapper, SysDict> implements ISysDictSpectrumService {
|
||||||
|
|
||||||
@Autowired
|
@Autowired
|
||||||
private SysDictItemMapper sysDictItemMapper;
|
private SysDictItemSpectrumMapper sysDictItemSpectrumMapper;
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
@Transactional(propagation = Propagation.REQUIRES_NEW)
|
@Transactional(propagation = Propagation.REQUIRES_NEW)
|
||||||
|
@ -45,7 +43,7 @@ public class SysDictServiceImpl extends ServiceImpl<SysDictMapper, SysDict> impl
|
||||||
List<String> dictIds = sysDicts.stream().map(SysDict::getId).distinct().collect(Collectors.toList());
|
List<String> dictIds = sysDicts.stream().map(SysDict::getId).distinct().collect(Collectors.toList());
|
||||||
LambdaQueryWrapper<SysDictItem> dictItemQueryWrapper = new LambdaQueryWrapper<>();
|
LambdaQueryWrapper<SysDictItem> dictItemQueryWrapper = new LambdaQueryWrapper<>();
|
||||||
dictItemQueryWrapper.in(SysDictItem::getDictId, dictIds);
|
dictItemQueryWrapper.in(SysDictItem::getDictId, dictIds);
|
||||||
List<SysDictItem> sysDictItems = sysDictItemMapper.selectList(dictItemQueryWrapper);
|
List<SysDictItem> sysDictItems = sysDictItemSpectrumMapper.selectList(dictItemQueryWrapper);
|
||||||
if (CollectionUtils.isNotEmpty(sysDictItems)){
|
if (CollectionUtils.isNotEmpty(sysDictItems)){
|
||||||
List<String> result = sysDictItems.stream().map(SysDictItem::getItemValue).distinct().collect(Collectors.toList());
|
List<String> result = sysDictItems.stream().map(SysDictItem::getItemValue).distinct().collect(Collectors.toList());
|
||||||
return result;
|
return result;
|
|
@ -0,0 +1,205 @@
|
||||||
|
package org.jeecg.modules.system.entity;
|
||||||
|
|
||||||
|
import com.baomidou.mybatisplus.annotation.IdType;
|
||||||
|
import com.baomidou.mybatisplus.annotation.TableField;
|
||||||
|
import com.baomidou.mybatisplus.annotation.TableId;
|
||||||
|
import com.baomidou.mybatisplus.annotation.TableLogic;
|
||||||
|
import com.fasterxml.jackson.annotation.JsonFormat;
|
||||||
|
import com.fasterxml.jackson.annotation.JsonProperty;
|
||||||
|
import lombok.Data;
|
||||||
|
import lombok.EqualsAndHashCode;
|
||||||
|
import lombok.experimental.Accessors;
|
||||||
|
import org.jeecg.common.aspect.annotation.Dict;
|
||||||
|
import org.jeecgframework.poi.excel.annotation.Excel;
|
||||||
|
import org.springframework.format.annotation.DateTimeFormat;
|
||||||
|
|
||||||
|
import java.io.Serializable;
|
||||||
|
import java.util.Date;
|
||||||
|
import java.util.List;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* <p>
|
||||||
|
* 用户表
|
||||||
|
* </p>
|
||||||
|
*
|
||||||
|
* @Author scott
|
||||||
|
* @since 2018-12-20
|
||||||
|
*/
|
||||||
|
@Data
|
||||||
|
@EqualsAndHashCode(callSuper = false)
|
||||||
|
@Accessors(chain = true)
|
||||||
|
public class SysUser implements Serializable {
|
||||||
|
|
||||||
|
private static final long serialVersionUID = 1L;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* id
|
||||||
|
*/
|
||||||
|
@TableId(type = IdType.ASSIGN_ID)
|
||||||
|
private String id;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* 登录账号
|
||||||
|
*/
|
||||||
|
@Excel(name = "登录账号", width = 15)
|
||||||
|
private String username;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* 真实姓名
|
||||||
|
*/
|
||||||
|
@Excel(name = "真实姓名", width = 15)
|
||||||
|
private String realname;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* 密码
|
||||||
|
*/
|
||||||
|
@JsonProperty(access = JsonProperty.Access.WRITE_ONLY)
|
||||||
|
private String password;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* md5密码盐
|
||||||
|
*/
|
||||||
|
@JsonProperty(access = JsonProperty.Access.WRITE_ONLY)
|
||||||
|
private String salt;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* 头像
|
||||||
|
*/
|
||||||
|
@Excel(name = "头像", width = 15,type = 2)
|
||||||
|
private String avatar;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* 生日
|
||||||
|
*/
|
||||||
|
@Excel(name = "生日", width = 15, format = "yyyy-MM-dd")
|
||||||
|
@JsonFormat(timezone = "GMT+8", pattern = "yyyy-MM-dd")
|
||||||
|
@DateTimeFormat(pattern = "yyyy-MM-dd")
|
||||||
|
private Date birthday;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* 性别(1:男 2:女)
|
||||||
|
*/
|
||||||
|
@Excel(name = "性别", width = 15,dicCode="sex")
|
||||||
|
@Dict(dicCode = "sex")
|
||||||
|
private Integer sex;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* 电子邮件
|
||||||
|
*/
|
||||||
|
@Excel(name = "电子邮件", width = 15)
|
||||||
|
private String email;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* 电话
|
||||||
|
*/
|
||||||
|
@Excel(name = "电话", width = 15)
|
||||||
|
private String phone;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* 登录选择部门编码
|
||||||
|
*/
|
||||||
|
private String orgCode;
|
||||||
|
/**
|
||||||
|
* 登录选择租户ID
|
||||||
|
*/
|
||||||
|
private Integer loginTenantId;
|
||||||
|
|
||||||
|
/**部门名称*/
|
||||||
|
private transient String orgCodeTxt;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* 状态(1:正常 2:冻结 )
|
||||||
|
*/
|
||||||
|
@Excel(name = "状态", width = 15,dicCode="user_status")
|
||||||
|
@Dict(dicCode = "user_status")
|
||||||
|
private Integer status;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* 删除状态(0,正常,1已删除)
|
||||||
|
*/
|
||||||
|
@Excel(name = "删除状态", width = 15,dicCode="del_flag")
|
||||||
|
@TableLogic
|
||||||
|
private Integer delFlag;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* 工号,唯一键
|
||||||
|
*/
|
||||||
|
@Excel(name = "工号", width = 15)
|
||||||
|
private String workNo;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* 职务,关联职务表
|
||||||
|
*/
|
||||||
|
@Excel(name = "职务", width = 15)
|
||||||
|
@Dict(dictTable ="sys_position",dicText = "name",dicCode = "code")
|
||||||
|
private String post;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* 座机号
|
||||||
|
*/
|
||||||
|
@Excel(name = "座机号", width = 15)
|
||||||
|
private String telephone;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* 创建人
|
||||||
|
*/
|
||||||
|
private String createBy;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* 创建时间
|
||||||
|
*/
|
||||||
|
private Date createTime;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* 更新人
|
||||||
|
*/
|
||||||
|
private String updateBy;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* 更新时间
|
||||||
|
*/
|
||||||
|
private Date updateTime;
|
||||||
|
/**
|
||||||
|
* 同步工作流引擎1同步0不同步
|
||||||
|
*/
|
||||||
|
private Integer activitiSync;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* 身份(0 普通成员 1 上级)
|
||||||
|
*/
|
||||||
|
@Excel(name="(1普通成员 2上级)",width = 15)
|
||||||
|
private Integer userIdentity;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* 负责部门
|
||||||
|
*/
|
||||||
|
@Excel(name="负责部门",width = 15,dictTable ="sys_depart",dicText = "depart_name",dicCode = "id")
|
||||||
|
@Dict(dictTable ="sys_depart",dicText = "depart_name",dicCode = "id")
|
||||||
|
private String departIds;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* 多租户ids临时用,不持久化数据库(数据库字段不存在)
|
||||||
|
*/
|
||||||
|
@TableField(exist = false)
|
||||||
|
private String relTenantIds;
|
||||||
|
|
||||||
|
/**设备id uniapp推送用*/
|
||||||
|
private String clientId;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* 登录首页地址
|
||||||
|
*/
|
||||||
|
@TableField(exist = false)
|
||||||
|
private String homePath;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* 职位名称
|
||||||
|
*/
|
||||||
|
@TableField(exist = false)
|
||||||
|
private String postText;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* 流程状态
|
||||||
|
*/
|
||||||
|
private String bpmStatus;
|
||||||
|
}
|
|
@ -34,6 +34,7 @@ import java.io.UnsupportedEncodingException;
|
||||||
import java.net.URLEncoder;
|
import java.net.URLEncoder;
|
||||||
import java.text.ParseException;
|
import java.text.ParseException;
|
||||||
import java.text.SimpleDateFormat;
|
import java.text.SimpleDateFormat;
|
||||||
|
import java.time.LocalDate;
|
||||||
import java.util.*;
|
import java.util.*;
|
||||||
|
|
||||||
@Slf4j
|
@Slf4j
|
||||||
|
@ -113,4 +114,9 @@ public class SysTaskController {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@RequestMapping("findUserTaskStations")
|
||||||
|
public List<String> findUserTaskStations(@RequestParam String userId, @RequestParam String nowDate){
|
||||||
|
return sysTaskService.findUserTaskStations(userId, nowDate);
|
||||||
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -27,4 +27,13 @@ public interface SysTaskMapper extends BaseMapper<SysTask> {
|
||||||
List<SysTask> selectTaskByDate(String firstDay, String lastDay);
|
List<SysTask> selectTaskByDate(String firstDay, String lastDay);
|
||||||
|
|
||||||
List<SysTaskStation> selectList(LambdaQueryWrapper<SysTaskStation> sysTaskStationQueryWrapper);
|
List<SysTaskStation> selectList(LambdaQueryWrapper<SysTaskStation> sysTaskStationQueryWrapper);
|
||||||
|
|
||||||
|
/**
|
||||||
|
* 查询用户当前日期下值班任务台站信息
|
||||||
|
* @param userId
|
||||||
|
* @param nowDate
|
||||||
|
* @return
|
||||||
|
*/
|
||||||
|
List<String> findUserTaskStations(String userId, String nowDate);
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -28,4 +28,14 @@
|
||||||
where t.scheduling_date BETWEEN to_date(#{firstDay}, 'YYYY-MM-DD') and to_date(#{lastDay}, 'YYYY-MM-DD')
|
where t.scheduling_date BETWEEN to_date(#{firstDay}, 'YYYY-MM-DD') and to_date(#{lastDay}, 'YYYY-MM-DD')
|
||||||
</select>
|
</select>
|
||||||
|
|
||||||
|
<select id="findUserTaskStations" resultType="java.lang.String">
|
||||||
|
SELECT
|
||||||
|
stt.station_id
|
||||||
|
FROM
|
||||||
|
sys_task st
|
||||||
|
LEFT JOIN sys_task_station stt on st.id = stt.task_id
|
||||||
|
where st.user_id = #{userId}
|
||||||
|
and st.scheduling_date = to_date(#{nowDate}, 'YYYY-MM-DD')
|
||||||
|
</select>
|
||||||
|
|
||||||
</mapper>
|
</mapper>
|
|
@ -11,6 +11,7 @@ import org.springframework.web.multipart.MultipartFile;
|
||||||
|
|
||||||
import javax.servlet.http.HttpServletRequest;
|
import javax.servlet.http.HttpServletRequest;
|
||||||
import javax.servlet.http.HttpServletResponse;
|
import javax.servlet.http.HttpServletResponse;
|
||||||
|
import java.time.LocalDate;
|
||||||
import java.util.Date;
|
import java.util.Date;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
|
@ -81,4 +82,12 @@ public interface ISysTaskService extends IService<SysTask> {
|
||||||
*/
|
*/
|
||||||
ImportViewVo importExcel(List<SysTaskExportVo> dataList, int headerRow);
|
ImportViewVo importExcel(List<SysTaskExportVo> dataList, int headerRow);
|
||||||
|
|
||||||
|
/**
|
||||||
|
* 查询当前用户的排班任务台站信息
|
||||||
|
* @param userId
|
||||||
|
* @param nowDate
|
||||||
|
* @return
|
||||||
|
*/
|
||||||
|
List<String> findUserTaskStations(String userId, String nowDate);
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -39,6 +39,7 @@ import java.io.IOException;
|
||||||
import java.lang.reflect.Field;
|
import java.lang.reflect.Field;
|
||||||
import java.text.ParseException;
|
import java.text.ParseException;
|
||||||
import java.text.SimpleDateFormat;
|
import java.text.SimpleDateFormat;
|
||||||
|
import java.time.LocalDate;
|
||||||
import java.util.*;
|
import java.util.*;
|
||||||
import java.util.stream.Collectors;
|
import java.util.stream.Collectors;
|
||||||
|
|
||||||
|
@ -533,6 +534,11 @@ public class SysTaskServiceImpl extends ServiceImpl<SysTaskMapper, SysTask> impl
|
||||||
return importViewVo;
|
return importViewVo;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public List<String> findUserTaskStations(String userId, String nowDate) {
|
||||||
|
return this.baseMapper.findUserTaskStations(userId, nowDate);
|
||||||
|
}
|
||||||
|
|
||||||
private void addOrUpdate(List<SysTask> sysTaskList,List<SysTask> taskList){
|
private void addOrUpdate(List<SysTask> sysTaskList,List<SysTask> taskList){
|
||||||
//获取request
|
//获取request
|
||||||
HttpServletRequest request = SpringContextUtils.getHttpServletRequest();
|
HttpServletRequest request = SpringContextUtils.getHttpServletRequest();
|
||||||
|
|
|
@ -1321,6 +1321,7 @@ public class SysUserServiceImpl extends ServiceImpl<SysUserMapper, SysUser> impl
|
||||||
roles.add(role);
|
roles.add(role);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
roles = roles.stream().distinct().collect(Collectors.toList());
|
||||||
}
|
}
|
||||||
sysUser.setRoles(roles);
|
sysUser.setRoles(roles);
|
||||||
}
|
}
|
||||||
|
|
|
@ -5,6 +5,7 @@ import cn.hutool.core.util.ArrayUtil;
|
||||||
import cn.hutool.core.util.ObjectUtil;
|
import cn.hutool.core.util.ObjectUtil;
|
||||||
import com.baomidou.mybatisplus.core.toolkit.CollectionUtils;
|
import com.baomidou.mybatisplus.core.toolkit.CollectionUtils;
|
||||||
import com.baomidou.mybatisplus.core.toolkit.StringPool;
|
import com.baomidou.mybatisplus.core.toolkit.StringPool;
|
||||||
|
import com.baomidou.mybatisplus.core.toolkit.StringUtils;
|
||||||
import lombok.extern.slf4j.Slf4j;
|
import lombok.extern.slf4j.Slf4j;
|
||||||
import org.apache.commons.io.FileUtils;
|
import org.apache.commons.io.FileUtils;
|
||||||
import org.apache.commons.net.ftp.FTP;
|
import org.apache.commons.net.ftp.FTP;
|
||||||
|
@ -13,6 +14,7 @@ import org.apache.commons.net.ftp.FTPFile;
|
||||||
import org.jeecg.common.api.vo.Result;
|
import org.jeecg.common.api.vo.Result;
|
||||||
import org.jeecg.common.constant.SymbolConstant;
|
import org.jeecg.common.constant.SymbolConstant;
|
||||||
import org.jeecg.common.enums.SampleFileHeader;
|
import org.jeecg.common.enums.SampleFileHeader;
|
||||||
|
import org.jeecg.common.properties.SpectrumPathProperties;
|
||||||
import org.jeecg.modules.entity.data.HistogramData;
|
import org.jeecg.modules.entity.data.HistogramData;
|
||||||
import org.springframework.beans.factory.annotation.Autowired;
|
import org.springframework.beans.factory.annotation.Autowired;
|
||||||
import org.springframework.beans.factory.annotation.Value;
|
import org.springframework.beans.factory.annotation.Value;
|
||||||
|
@ -29,6 +31,8 @@ public class ReadLineUtil {
|
||||||
|
|
||||||
@Autowired
|
@Autowired
|
||||||
private FTPUtil ftpUtil;
|
private FTPUtil ftpUtil;
|
||||||
|
@Autowired
|
||||||
|
private SpectrumPathProperties spectrumPathProperties;
|
||||||
|
|
||||||
@Value("${ftp.encoding}")
|
@Value("${ftp.encoding}")
|
||||||
private String encoding;
|
private String encoding;
|
||||||
|
@ -41,19 +45,13 @@ public class ReadLineUtil {
|
||||||
throw new RuntimeException("ftp连接失败!");
|
throw new RuntimeException("ftp连接失败!");
|
||||||
}
|
}
|
||||||
try {
|
try {
|
||||||
String fileName = filePath.substring(filePath.lastIndexOf(StringPool.SLASH) + 1);
|
|
||||||
String parameterFilePath = filePath.substring(0, filePath.lastIndexOf(StringPool.SLASH));
|
|
||||||
//根据字符切割文件路径
|
|
||||||
List<String> paths = Arrays.asList(parameterFilePath.split(StringPool.SLASH));
|
|
||||||
//判断文件路径是否为空
|
|
||||||
if (CollectionUtils.isNotEmpty(paths)){
|
|
||||||
//遍历文件路径
|
|
||||||
for (String path:paths) {
|
|
||||||
//切换工作路径
|
|
||||||
ftpClient.changeWorkingDirectory(path);
|
|
||||||
}
|
|
||||||
//在当前工作路径下读取文件
|
|
||||||
ftpClient.enterLocalPassiveMode();
|
ftpClient.enterLocalPassiveMode();
|
||||||
|
String fileName = filePath.substring(filePath.lastIndexOf(StringPool.SLASH) + 1);
|
||||||
|
String parameterFilePath = StringPool.SLASH + spectrumPathProperties.getRootPath() + StringPool.SLASH + filePath.substring(0, filePath.lastIndexOf(StringPool.SLASH));
|
||||||
|
//判断文件路径是否为空
|
||||||
|
if (StringUtils.isNotBlank(parameterFilePath)){
|
||||||
|
//在当前工作路径下读取文件
|
||||||
|
ftpClient.changeWorkingDirectory(parameterFilePath);
|
||||||
ftpClient.setFileType(FTPClient.BINARY_FILE_TYPE);
|
ftpClient.setFileType(FTPClient.BINARY_FILE_TYPE);
|
||||||
// 设置编码,当文件中存在中文且上传后文件乱码时可使用此配置项
|
// 设置编码,当文件中存在中文且上传后文件乱码时可使用此配置项
|
||||||
ftpClient.setControlEncoding(encoding);
|
ftpClient.setControlEncoding(encoding);
|
||||||
|
@ -89,7 +87,6 @@ public class ReadLineUtil {
|
||||||
throw new RuntimeException(e);
|
throw new RuntimeException(e);
|
||||||
} finally {
|
} finally {
|
||||||
try {
|
try {
|
||||||
|
|
||||||
ftpClient.disconnect();
|
ftpClient.disconnect();
|
||||||
} catch (IOException e) {
|
} catch (IOException e) {
|
||||||
throw new RuntimeException(e);
|
throw new RuntimeException(e);
|
||||||
|
@ -185,6 +182,7 @@ public class ReadLineUtil {
|
||||||
OutputStream outputStream = null;
|
OutputStream outputStream = null;
|
||||||
InputStream inputStream = null;
|
InputStream inputStream = null;
|
||||||
try {
|
try {
|
||||||
|
filePath = StringPool.SLASH + spectrumPathProperties.getRootPath() + StringPool.SLASH + filePath;
|
||||||
// 切换工作目录为 /
|
// 切换工作目录为 /
|
||||||
ftpClient.changeWorkingDirectory(SymbolConstant.SINGLE_SLASH);
|
ftpClient.changeWorkingDirectory(SymbolConstant.SINGLE_SLASH);
|
||||||
|
|
||||||
|
|
Loading…
Reference in New Issue
Block a user