|
@ -52,7 +52,7 @@ public class CrawlerManager {
|
|
|
private AdapterSchemeService adapterSchemeService;
|
|
|
|
|
|
private static DataCollectDispatcher dispatch = DataCollectDispatcher.getInstance();
|
|
|
private static SysConfig sysConfig=SysConfig.getInstance();
|
|
|
private static SysConfig sysConfig = SysConfig.getInstance();
|
|
|
private static Map<String, Map<String, AdapterDataSet>> adapterDataSetVersionMap = new HashMap<>();
|
|
|
private static Map<String, Map<String, StdDataSet>> stdDataSetVersionMap = new HashMap<>();
|
|
|
|
|
@ -74,7 +74,7 @@ public class CrawlerManager {
|
|
|
RsJobConfig rsJobConfig = datacollect.getJobById(jobId);
|
|
|
|
|
|
String random = UUID.randomUUID().toString();
|
|
|
logger.info("档案采集上传开始,流水号:" + random + ",jobId:"+jobId);
|
|
|
logger.info("档案采集上传开始,流水号:" + random + ",jobId:" + jobId);
|
|
|
Date begin = rsJobConfig.getRepeatStartTime();
|
|
|
Date end = rsJobConfig.getRepeatEndTime();
|
|
|
|
|
@ -82,12 +82,12 @@ public class CrawlerManager {
|
|
|
int totalOffset = rsJobConfig.getDelayTime();
|
|
|
Calendar instance = Calendar.getInstance();
|
|
|
instance.setTime(new Date(DateUtil.getSysDateTime().getTime()));
|
|
|
instance.add(Calendar.SECOND, - totalOffset); //调整截止时间,当前时间-偏移量
|
|
|
instance.add(Calendar.SECOND, -totalOffset); //调整截止时间,当前时间-偏移量
|
|
|
end = instance.getTime();
|
|
|
}
|
|
|
|
|
|
if ((end.getTime() - begin.getTime()) <= 0) {
|
|
|
return Result.error("错误:采集结束时间小于开始时间!"); //结束时间小于开始时间时,不获取
|
|
|
return Result.error("错误:采集上传结束时间小于开始时间!"); //结束时间小于开始时间时,不获取
|
|
|
}
|
|
|
|
|
|
end = DateUtil.fromatDateToTimestamp(end);
|
|
@ -96,7 +96,7 @@ public class CrawlerManager {
|
|
|
//调整截止时间,当前时间-偏移量
|
|
|
end = DateUtil.addDate(-rsJobConfig.getDelayTime(), DateUtil.getSysDateTime());
|
|
|
if ((end.getTime() - begin.getTime()) <= 0) {
|
|
|
return Result.success("错误:采集结束时间小于开始时间!"); //结束时间小于开始时间时,不获取
|
|
|
return Result.success("错误:采集上传结束时间小于开始时间!"); //结束时间小于开始时间时,不获取
|
|
|
}
|
|
|
}
|
|
|
|
|
@ -108,13 +108,14 @@ public class CrawlerManager {
|
|
|
DataGridResult dataGridResult = datacollect.getJobDatasetByJobId(jobId);
|
|
|
this.datasetList = dataGridResult.getDetailModelList();
|
|
|
this.schemeVersion = rsJobConfig.getSchemeVersion();
|
|
|
Result result = dataCrawler(begin, end);
|
|
|
if (!rsJobConfig.getJobType().equals("0")) {
|
|
|
rsJobConfig.setRepeatStartTime(end);
|
|
|
// rsJobConfig.setRepeatEndTime(DateUtil.formatYMDToYMDHMS(DateUtil.getSysDateTime().toString()));
|
|
|
}
|
|
|
datacollect.updateJob(rsJobConfig);
|
|
|
logger.info("档案采集上传结束,流水号:" + random + ",jobId:" + jobId + ",message:" + result.getMessage());
|
|
|
Result result = dataCrawler(begin, end);
|
|
|
|
|
|
logger.info("档案采集上传结束,上传结果:" + result.isSuccessFlg() + "流水号:" + random + ",jobId:" + jobId + ",message:" + result.getMessage());
|
|
|
return result;
|
|
|
}
|
|
|
|
|
@ -144,7 +145,7 @@ public class CrawlerManager {
|
|
|
}
|
|
|
}
|
|
|
}
|
|
|
message = "本次采集病人共" + totalCount + "条,成功采集信息"+ count + "条";
|
|
|
message = "本次采集上传病人共" + totalCount + "条,成功采集上传信息" + count + "条";
|
|
|
return Result.success(message);
|
|
|
}
|
|
|
|
|
@ -181,7 +182,7 @@ public class CrawlerManager {
|
|
|
}
|
|
|
}
|
|
|
|
|
|
if(StringUtil.isEmpty(sysConfig.getVersionMap().get(patient.getOrgCode()))){
|
|
|
if (StringUtil.isEmpty(sysConfig.getVersionMap().get(patient.getOrgCode()))) {
|
|
|
logger.error("版本获取失败");
|
|
|
return false;
|
|
|
}
|
|
@ -211,12 +212,12 @@ public class CrawlerManager {
|
|
|
}
|
|
|
}
|
|
|
}
|
|
|
logger.info("采集病人成功,patient_id:" + patient.getPatientId() + ", event_no:" + patient.getEventNo());
|
|
|
logger.info("上传-采集病人信息成功,patient_id:" + patient.getPatientId() + ", event_no:" + patient.getEventNo());
|
|
|
//上传档案
|
|
|
logger.info("上传病人档案");
|
|
|
try {
|
|
|
if (!CollectionUtil.isEmpty(dataMap.keySet())) {
|
|
|
if (!dispatch.upload(dataMap, patient, dataSetMap,stdDataSetMap)) {
|
|
|
if (!dispatch.upload(dataMap, patient, dataSetMap, stdDataSetMap)) {
|
|
|
logger.error("上传档案失败");
|
|
|
return false;
|
|
|
}
|
|
@ -255,7 +256,7 @@ public class CrawlerManager {
|
|
|
// return false;
|
|
|
// }
|
|
|
|
|
|
if(StringUtil.isEmpty(sysConfig.getVersionMap().get(patient.getOrgCode()))){
|
|
|
if (StringUtil.isEmpty(sysConfig.getVersionMap().get(patient.getOrgCode()))) {
|
|
|
logger.error("版本获取失败");
|
|
|
return false;
|
|
|
}
|
|
@ -263,9 +264,9 @@ public class CrawlerManager {
|
|
|
Map<String, AdapterDataSet> dataSetMap = new HashMap<>();
|
|
|
Map<String, JsonNode> dataMap = new HashMap<>();
|
|
|
|
|
|
if (adapterDataSetMap.containsKey(unstructured)){
|
|
|
if (adapterDataSetMap.containsKey(unstructured)) {
|
|
|
//TODO 非结构化档处理
|
|
|
}else {
|
|
|
} else {
|
|
|
//TODO 结构化档案处理
|
|
|
}
|
|
|
for (String key : adapterDataSetMap.keySet()) {
|
|
@ -291,12 +292,12 @@ public class CrawlerManager {
|
|
|
}
|
|
|
}
|
|
|
}
|
|
|
logger.info("采集病人成功,patient_id:" + patient.getPatientId() + ", event_no:" + patient.getEventNo());
|
|
|
logger.info("上传-采集病人信息成功,patient_id:" + patient.getPatientId() + ", event_no:" + patient.getEventNo());
|
|
|
//上传档案
|
|
|
logger.info("上传病人档案");
|
|
|
try {
|
|
|
if (!CollectionUtil.isEmpty(dataMap.keySet())) {
|
|
|
if (!dispatch.upload(dataMap, patient, dataSetMap,stdDataSetMap)) {
|
|
|
if (!dispatch.upload(dataMap, patient, dataSetMap, stdDataSetMap)) {
|
|
|
logger.error("上传档案失败");
|
|
|
return false;
|
|
|
}
|
|
@ -339,13 +340,13 @@ public class CrawlerManager {
|
|
|
datasetIdList.add(dataSetId);
|
|
|
}
|
|
|
adapterDataSetModelList = adapterDatasetService.getAdapterDatasetByAdapterIdList(adapterVersion, datasetIdList);
|
|
|
if (adapterDataSetModelList!=null && !adapterDataSetModelList.isEmpty()){
|
|
|
if (adapterDataSetModelList != null && !adapterDataSetModelList.isEmpty()) {
|
|
|
AdapterSchemeModel adapterScheme = adapterSchemeService.getEntity(AdapterSchemeModel.class, adapterDataSetModelList.get(0).getSchemeId());
|
|
|
standardVersion = new StandardVersion(adapterScheme.getStdVersion());
|
|
|
for (DictItem dictItem : datasetList) {
|
|
|
Integer dataSetId = Integer.parseInt(dictItem.getCode());
|
|
|
if (!stdMetaDataMap.containsKey(dataSetId)){
|
|
|
stdMetaDataMap.put(dataSetId,new StdDataSet(dataSetId, standardVersion).getStdMetaDataModelList());
|
|
|
if (!stdMetaDataMap.containsKey(dataSetId)) {
|
|
|
stdMetaDataMap.put(dataSetId, new StdDataSet(dataSetId, standardVersion).getStdMetaDataModelList());
|
|
|
}
|
|
|
}
|
|
|
}
|
|
@ -372,7 +373,7 @@ public class CrawlerManager {
|
|
|
stdFlg = true;
|
|
|
}
|
|
|
|
|
|
if (adapterFlg && stdFlg){
|
|
|
if (adapterFlg && stdFlg) {
|
|
|
return true;
|
|
|
}
|
|
|
|
|
@ -401,10 +402,10 @@ public class CrawlerManager {
|
|
|
*/
|
|
|
for (AdapterDatasetModel adapterDatasetModel : adapterDataSetModelList) {
|
|
|
adapterDataSetMap.put(adapterDatasetModel.getStdDatasetCode(), new AdapterDataSet(adapterDatasetModel, adapterVersion, adapterDictEntryModelMap));
|
|
|
stdDataSetMap.put(adapterDatasetModel.getStdDatasetCode(), new StdDataSet(adapterDatasetModel.getStdDatasetId(),standardVersion));
|
|
|
stdDataSetMap.put(adapterDatasetModel.getStdDatasetCode(), new StdDataSet(adapterDatasetModel.getStdDatasetId(), standardVersion));
|
|
|
}
|
|
|
adapterDataSetVersionMap.put(schemeVersion, adapterDataSetMap);
|
|
|
stdDataSetVersionMap.put(schemeVersion,stdDataSetMap);
|
|
|
stdDataSetVersionMap.put(schemeVersion, stdDataSetMap);
|
|
|
adapterFlg = true;
|
|
|
stdFlg = true;
|
|
|
return true;
|