Merge remote-tracking branch 'origin/station' into station

This commit is contained in:
nieziyan 2023-10-30 11:16:12 +08:00
commit 010f06f695
10 changed files with 113 additions and 42 deletions

View File

@ -55,6 +55,10 @@ public class FTPUtil {
return this.encoding;
}
public String getFtpRootPath() {
return this.ftpRootPath;
}
/**
* 登录ftp
* @return
@ -88,7 +92,7 @@ public class FTPUtil {
ServletOutputStream out = null;
FTPClient ftpClient = this.LoginFTP();
if (Objects.isNull(ftpClient)){
throw new RuntimeException("ftp连接失败!");
throw new RuntimeException("ftp connection failed!");
}
//传输模式
try {

View File

@ -26,7 +26,7 @@ public class LogManageServiceImpl implements ILogManageService {
List<LogManage> result = new ArrayList<>();
FTPClient ftpClient = ftpUtil.LoginFTP();
if(Objects.isNull(ftpClient)){
throw new RuntimeException("ftp连接失败!");
throw new RuntimeException("ftp connection failed!");
}
try {
//切换被动模式
@ -36,6 +36,7 @@ public class LogManageServiceImpl implements ILogManageService {
ftpClient.setControlEncoding("UTF-8");
ftpClient.setFileTransferMode(FTPClient.STREAM_TRANSFER_MODE);
//切换工作文件路径
workPath = ftpUtil.getFtpRootPath()+StringPool.SLASH+workPath;
ftpClient.changeWorkingDirectory(workPath);
List<FTPFile> ftpFiles = Arrays.asList(ftpClient.listDirectories());
if (CollectionUtils.isNotEmpty(ftpFiles)){
@ -79,7 +80,7 @@ public class LogManageServiceImpl implements ILogManageService {
List<FileInfo> result = new ArrayList<>();
FTPClient ftpClient = ftpUtil.LoginFTP();
if (Objects.isNull(ftpClient)){
throw new RuntimeException("ftp连接失败!");
throw new RuntimeException("ftp connection failed!");
}
try {
//切换被动模式

View File

@ -3512,14 +3512,14 @@ public class GammaServiceImpl extends AbstractLogOrReport implements IGammaServi
// 获取自动处理生成的报告地址
String reportPath = spectrumAnalysisMapper.viewARR(sampleId);
if (StringUtils.isBlank(reportPath)) {
throw new RuntimeException("自动处理程序生成报告不存在");
throw new RuntimeException("The automatic handler generated report does not exist");
}
String pathName = StringPool.SLASH + spectrumPathProperties.getSaveFilePath() + StringPool.SLASH + reportPath.substring(0, reportPath.lastIndexOf(StringPool.SLASH));
String fileName = reportPath.substring(reportPath.lastIndexOf(StringPool.SLASH) + 1) + ".txt";
// 连接ftp
FTPClient ftpClient = ftpUtil.LoginFTP();
if (Objects.isNull(ftpClient)) {
throw new RuntimeException("ftp连接失败");
throw new RuntimeException("ftp connection failed");
}
InputStream inputStream = null;
ServletOutputStream outputStream = null;
@ -3569,7 +3569,7 @@ public class GammaServiceImpl extends AbstractLogOrReport implements IGammaServi
// 连接ftp
FTPClient ftpClient = ftpUtil.LoginFTP();
if (Objects.isNull(ftpClient)) {
throw new RuntimeException("ftp连接失败");
throw new RuntimeException("ftp connection failed");
}
InputStream inputStream = null;
ServletOutputStream outputStream = null;
@ -4100,14 +4100,14 @@ public class GammaServiceImpl extends AbstractLogOrReport implements IGammaServi
public void viewAutomaticAnalysisLog(Integer sampleId, HttpServletResponse response) {
String logPath = spectrumAnalysisMapper.findAutomaticLogPath(sampleId);
if (StringUtils.isBlank(logPath)) {
throw new RuntimeException("自动处理程序生成日志不存在");
throw new RuntimeException("The log generated by the automatic processor does not exist");
}
String pathName = StringPool.SLASH + spectrumPathProperties.getLogPath() + StringPool.SLASH + logPath.substring(0, logPath.lastIndexOf(StringPool.SLASH));
String fileName = logPath.substring(logPath.lastIndexOf(StringPool.SLASH) + 1);
// 连接ftp
FTPClient ftpClient = ftpUtil.LoginFTP();
if (Objects.isNull(ftpClient)) {
throw new RuntimeException("ftp连接失败");
throw new RuntimeException("ftp connection failed");
}
InputStream inputStream = null;
ServletOutputStream outputStream = null;
@ -4476,7 +4476,10 @@ public class GammaServiceImpl extends AbstractLogOrReport implements IGammaServi
}
String efficiency = NumberFormatUtil.numberFormat(String.valueOf(nuc.getEfficiency()));
String activity = NumberFormatUtil.numberFormat(String.valueOf(nuc.getActivity()));
String actErr = NumberFormatUtil.numberFormat(String.valueOf(nuc.getAct_err() / nuc.getActivity() * 100));
String actErr = "";
if (nuc.getActivity() > 0) {
actErr = NumberFormatUtil.numberFormat(String.valueOf(nuc.getAct_err() / nuc.getActivity() * 100));
}
String mda = NumberFormatUtil.numberFormat(String.valueOf(nuc.getMda()));
String conc = NumberFormatUtil.numberFormat(String.valueOf(nuc.getConcentration()));
String mdc = NumberFormatUtil.numberFormat(String.valueOf(nuc.getMdc()));
@ -4603,7 +4606,10 @@ public class GammaServiceImpl extends AbstractLogOrReport implements IGammaServi
double activityValue = nuclideActMda.getActivity();
String activity = NumberFormatUtil.numberFormat(String.valueOf(activityValue));
nuclideActMdaDto.setActivity(activity);
String actErr = NumberFormatUtil.numberFormat(String.valueOf(actErrValue / activityValue * 100));
String actErr = "";
if (activityValue > 0) {
actErr = NumberFormatUtil.numberFormat(String.valueOf(actErrValue / activityValue * 100));
}
nuclideActMdaDto.setActErr(actErr);
String mda = NumberFormatUtil.numberFormat(String.valueOf(nuclideActMda.getMda()));
nuclideActMdaDto.setMda(mda);

View File

@ -183,6 +183,7 @@ public class SpectrumAnalysisServiceImpl implements ISpectrumAnalysisService {
}
System.out.println(sbSql.toString());
List<GardsSampleDataSpectrum> sampleData = Lists.newArrayList();
long start = System.currentTimeMillis();
try (Connection connection = jdbcTemplate.getDataSource().getConnection();
PreparedStatement statement = connection.prepareStatement(sbSql.toString());
ResultSet resultSet = statement.executeQuery()) {
@ -202,6 +203,7 @@ public class SpectrumAnalysisServiceImpl implements ISpectrumAnalysisService {
} catch (SQLException e) {
e.printStackTrace();
}
System.out.println(">>>>>>>>>>>>>"+(System.currentTimeMillis() - start));
// List<GardsSampleDataSpectrum> sampleData = spectrumAnalysisMapper.getDBSearchList(dbName, stationTypes, userStations, AllUsers);
//获取台站编码
List<String> stationCodes = new LinkedList<>();
@ -587,7 +589,7 @@ public class SpectrumAnalysisServiceImpl implements ISpectrumAnalysisService {
Map<String, Object> map = new HashMap<>();
FTPClient ftpClient = ftpUtil.LoginFTP();
if (Objects.isNull(ftpClient)){
throw new RuntimeException("ftp连接失败");
throw new RuntimeException("ftp connection failed");
}
InputStream inputStream = null;
File file = null;
@ -719,14 +721,14 @@ public class SpectrumAnalysisServiceImpl implements ISpectrumAnalysisService {
//获取自动处理生成的报告地址
String reportPath = spectrumAnalysisMapper.viewARR(sampleId);
if (StringUtils.isBlank(reportPath)){
throw new RuntimeException("自动处理程序生成报告不存在");
throw new RuntimeException("The automatic handler generated report does not exist");
}
String pathName = StringPool.SLASH + spectrumPathProperties.getSaveFilePath() + StringPool.SLASH + reportPath.substring(0, reportPath.lastIndexOf(StringPool.SLASH));
String fileName = reportPath.substring(reportPath.lastIndexOf(StringPool.SLASH)+1)+".txt";
//连接ftp
FTPClient ftpClient = ftpUtil.LoginFTP();
if (Objects.isNull(ftpClient)){
throw new RuntimeException("ftp连接失败");
throw new RuntimeException("ftp connection failed");
}
InputStream inputStream = null;
ServletOutputStream outputStream = null;
@ -2855,8 +2857,11 @@ public class SpectrumAnalysisServiceImpl implements ISpectrumAnalysisService {
Cache<String, Map<String, Object>> cache = betaCache.getBetaCache();
Map<String, Object> cacheMap = new HashMap<>();
Map<String, Object> map = new HashMap<>();
if ((CollectionUtils.isNotEmpty(tempPoints) && Objects.nonNull(count) && tempPoints.size() != count) || (Objects.isNull(paramA) || StringUtils.isBlank(String.valueOf(paramA)))
|| (Objects.isNull(paramB) || StringUtils.isBlank(String.valueOf(paramB))) || (Objects.isNull(paramC) || StringUtils.isBlank(String.valueOf(paramC))) ){
//tempPoint数组大小需要大于2个值
if ((CollectionUtils.isNotEmpty(tempPoints) && tempPoints.size() > 2 && Objects.nonNull(count) && tempPoints.size() != count)
|| (Objects.isNull(paramA) || StringUtils.isBlank(String.valueOf(paramA)))
|| (Objects.isNull(paramB) || StringUtils.isBlank(String.valueOf(paramB)))
|| (Objects.isNull(paramC) || StringUtils.isBlank(String.valueOf(paramC))) ){
List<Double> xs = new LinkedList<>();
List<Double> ys = new LinkedList<>();
for (int i=0; i<tempPoints.size(); i++){
@ -3869,14 +3874,14 @@ public class SpectrumAnalysisServiceImpl implements ISpectrumAnalysisService {
public void viewAutomaticAnalysisLog(Integer sampleId, HttpServletResponse response) {
String logPath = spectrumAnalysisMapper.findAutomaticLogPath(sampleId);
if (StringUtils.isBlank(logPath)){
throw new RuntimeException("自动处理程序生成日志不存在");
throw new RuntimeException("The log generated by the automatic processor does not exist");
}
String pathName = StringPool.SLASH + spectrumPathProperties.getLogPath() + StringPool.SLASH + logPath.substring(0, logPath.lastIndexOf(StringPool.SLASH));
String fileName = logPath.substring(logPath.lastIndexOf(StringPool.SLASH) + 1);
//连接ftp
FTPClient ftpClient = ftpUtil.LoginFTP();
if (Objects.isNull(ftpClient)){
throw new RuntimeException("ftp连接失败");
throw new RuntimeException("ftp connection failed");
}
InputStream inputStream = null;
ServletOutputStream outputStream = null;
@ -4560,12 +4565,11 @@ public class SpectrumAnalysisServiceImpl implements ISpectrumAnalysisService {
if (!CollUtil.contains(skip, line))
newLines.add(line);
}
String export = "SaveToTxt.txt";
writer = ExportUtil.streamWriter(response, export);
for (String newLine : newLines) {
writer.println(newLine);
}
String export = "SaveToTxt.txt";
writer = ExportUtil.streamWriter(response, export);
for (String newLine : newLines) {
writer.println(newLine);
}
} catch (IOException e) {
e.printStackTrace();
}finally {

View File

@ -60,10 +60,11 @@ public class StationOperationController {
@GetMapping("getDataReceivingStatus")
@ApiOperation(value = "查询台站监测数据信息", notes = "查询台站监测数据信息")
public Result getDataReceivingStatus(String userId){
return stationOperationService.getDataReceivingStatus(userId);
public Result getDataReceivingStatus(String userId, String oneStationId){
return stationOperationService.getDataReceivingStatus(userId, oneStationId);
}
@GetMapping("getDataProvisionEfficiency")
@ApiOperation(value = "查询台站数据提供率及有效率", notes = "查询台站数据提供率及有效率")
public Result getDataProvisionEfficiency(){

View File

@ -46,9 +46,10 @@ public interface IStationOperationService extends IService<StationOperation> {
/**
* 查询台站监测数据
* @param userId
* @param stationId
* @return
*/
Result getDataReceivingStatus(String userId);
Result getDataReceivingStatus(String userId, String oneStationId);
/**
* 获取台站数据的提供率及有效率

View File

@ -251,16 +251,18 @@ public class StationOperationServiceImpl extends ServiceImpl<StationOperationMap
Point point = new Point();
point.setNuclearFacilityId(String.valueOf(facilityInfoValue.getFacilityId()));
point.setNuclearFacilityName(facilityInfoValue.getFacilityName());
String lon = "";
String lat = "";
if (StringUtils.isNotBlank(facilityInfoValue.getLongitude())){
String pointValue = PointUtil.calculate(facilityInfoValue.getLongitude());
// facilityInfoValue.setLatitude(pointValue);
point.setLat(pointValue);
lat = PointUtil.calculate(facilityInfoValue.getLongitude());
point.setLat(lat);
}
if (StringUtils.isNotBlank(facilityInfoValue.getLatitude())){
String pointValue = PointUtil.calculate(facilityInfoValue.getLatitude());
// facilityInfoValue.setLongitude(pointValue);
point.setLon(pointValue);
lon = PointUtil.calculate(facilityInfoValue.getLatitude());
point.setLon(lon);
}
facilityInfoValue.setLatitude(lat);
facilityInfoValue.setLongitude(lon);
nuclearPoints.add(point);
}
for (String stationId:stationIds) {
@ -384,7 +386,7 @@ public class StationOperationServiceImpl extends ServiceImpl<StationOperationMap
}
@Override
public Result getDataReceivingStatus(String userId) {
public Result getDataReceivingStatus(String userId, String oneStationId) {
Result result = new Result();
Map<String, StationData> stationDataMap = (Map<String, StationData>) redisUtil.get("stationDataMap");
// //获取四项缓存数据的对应内容
@ -401,18 +403,24 @@ public class StationOperationServiceImpl extends ServiceImpl<StationOperationMap
// result.error500("The cache time cannot be empty");
// return result;
// }
//根据用户id查询出当前用户关注的台站信息
LambdaQueryWrapper<SysUserFocusStationStation> userFocusStationQueryWrapper = new LambdaQueryWrapper<>();
userFocusStationQueryWrapper.eq(SysUserFocusStationStation::getUserId, userId);
List<SysUserFocusStationStation> userFocusStations = sysUserFocusStationMapper.selectList(userFocusStationQueryWrapper);
List<String> stationIds = userFocusStations.stream().map(SysUserFocusStation::getStationId).collect(Collectors.toList());
//过滤出当前用户关注的台站信息
List<StationData> stationDataList = new LinkedList<>();
for (String stationId:stationIds) {
if (Objects.nonNull(stationDataMap.get(stationId))) {
stationDataList.add(stationDataMap.get(stationId));
if (StringUtils.isNotBlank(oneStationId)) {
StationData stationData = stationDataMap.get(oneStationId);
stationDataList.add(stationData);
} else {
//根据用户id查询出当前用户关注的台站信息
LambdaQueryWrapper<SysUserFocusStationStation> userFocusStationQueryWrapper = new LambdaQueryWrapper<>();
userFocusStationQueryWrapper.eq(SysUserFocusStationStation::getUserId, userId);
List<SysUserFocusStationStation> userFocusStations = sysUserFocusStationMapper.selectList(userFocusStationQueryWrapper);
List<String> stationIds = userFocusStations.stream().map(SysUserFocusStation::getStationId).collect(Collectors.toList());
for (String stationId:stationIds) {
if (Objects.nonNull(stationDataMap.get(stationId))) {
stationDataList.add(stationDataMap.get(stationId));
}
}
}
// //从redis中获取台站信息
// Map<Integer, String> stationInfoMap = (Map<Integer, String>)redisUtil.get("stationMap");
// //从redis中获取探测器信息

View File

@ -41,7 +41,7 @@ public class ReadLineUtil {
FTPClient ftpClient = ftpUtil.LoginFTP();
//判断ftp是否连接成功
if (Objects.isNull(ftpClient)){
throw new RuntimeException("ftp连接失败!");
throw new RuntimeException("ftp connection failed!");
}
InputStream iStream = null;
File file = null;

View File

@ -15,4 +15,6 @@ public interface GardsSampleDataWebMapper extends BaseMapper<GardsSampleDataWeb>
Page<GardsSampleDataWeb> findReviewedPage(String startDate, String endDate, List<Integer> stationIdList, Page<GardsSampleDataWeb> page);
Page<GardsSampleDataWeb> findParticulatePage(String dataType, String spectralQualifie, String startDate, String endDate, List<Integer> stationIdList, Page<GardsSampleDataWeb> page);
}

View File

@ -82,4 +82,48 @@
ORDER BY sam.ACQUISITION_START DESC
</select>
<select id="findParticulatePage" resultType="org.jeecg.modules.entity.GardsSampleDataWeb">
SELECT
SAMPLE_ID,
SITE_DET_CODE,
STATION_ID,
DETECTOR_ID,
INPUT_FILE_NAME,
SAMPLE_TYPE,
DATA_TYPE,
GEOMETRY,
SPECTRAL_QUALIFIE,
TRANSMIT_DTG,
COLLECT_START,
COLLECT_STOP,
ACQUISITION_START,
ACQUISITION_STOP,
ACQUISITION_REAL_SEC,
ACQUISITION_LIVE_SEC,
QUANTITY,
STATUS,
MODDATE
FROM
ORIGINAL.GARDS_SAMPLE_DATA
<where>
DATA_TYPE = '${dataType}'
<if test=" spectralQualifie != '' and spectralQualifie != null ">
AND SPECTRAL_QUALIFIE = '${spectralQualifie}'
AND COLLECT_START >= TO_DATE( '${startDate}', 'YYYY-MM-DD hh24:mi:ss' )
AND COLLECT_STOP &lt;= TO_DATE( '${endDate}', 'YYYY-MM-DD hh24:mi:ss' )
</if>
<if test=" spectralQualifie == '' or spectralQualifie == null ">
AND ACQUISITION_START >= TO_DATE( '${startDate}', 'YYYY-MM-DD hh24:mi:ss' )
AND ACQUISITION_STOP &lt;= TO_DATE( '${endDate}', 'YYYY-MM-DD hh24:mi:ss' )
</if>
<if test="stationIdList.size>0 and stationIdList != null">
AND STATION_ID in
<foreach collection="stationIdList" item="stationId" open="(" close=")" separator=",">
${stationId}
</foreach>
</if>
</where>
ORDER BY ACQUISITION_START DESC
</select>
</mapper>