|
@ -7,21 +7,25 @@ import com.yihu.hos.core.datatype.DateUtil;
|
|
|
import com.yihu.hos.core.datatype.StringUtil;
|
|
|
import com.yihu.hos.core.log.Logger;
|
|
|
import com.yihu.hos.core.log.LoggerFactory;
|
|
|
import com.yihu.hos.rest.common.dao.DatacollectDao;
|
|
|
import com.yihu.hos.rest.common.format.AdapterBase;
|
|
|
import com.yihu.hos.rest.common.format.StdBase;
|
|
|
import com.yihu.hos.rest.models.crawler.adapter.AdapterDataSet;
|
|
|
import com.yihu.hos.rest.models.crawler.adapter.StdDataSet;
|
|
|
import com.yihu.hos.rest.models.crawler.config.SysConfig;
|
|
|
import com.yihu.hos.rest.models.crawler.patient.Patient;
|
|
|
import com.yihu.hos.rest.models.rs.RsJobConfig;
|
|
|
import com.yihu.hos.rest.models.standard.adapter.AdapterDatasetModel;
|
|
|
import com.yihu.hos.rest.models.standard.adapter.AdapterDictEntryModel;
|
|
|
import com.yihu.hos.rest.models.standard.adapter.AdapterSchemeModel;
|
|
|
import com.yihu.hos.rest.models.standard.adapter.AdapterSchemeVersionModel;
|
|
|
import com.yihu.hos.rest.models.standard.bo.AdapterVersion;
|
|
|
import com.yihu.hos.rest.models.standard.bo.StandardVersion;
|
|
|
import com.yihu.hos.rest.models.standard.standard.StdMetaDataModel;
|
|
|
import com.yihu.hos.rest.services.standard.adapter.*;
|
|
|
import com.yihu.hos.rest.services.standard.standard.StdMetadataService;
|
|
|
import com.yihu.hos.web.framework.model.DataGridResult;
|
|
|
import com.yihu.hos.web.framework.model.DictItem;
|
|
|
import com.yihu.hos.web.framework.model.Result;
|
|
|
import org.springframework.beans.factory.annotation.Autowired;
|
|
|
import org.springframework.stereotype.Service;
|
|
|
|
|
|
import javax.annotation.Resource;
|
|
@ -42,12 +46,23 @@ public class CrawlerManager {
|
|
|
private AdapterSchemeVersionService adapterSchemeVersionService;
|
|
|
@Resource
|
|
|
private DatacollectManager datacollect;
|
|
|
@Resource
|
|
|
private StdMetadataService StdMetadataService;
|
|
|
@Resource(name = AdapterSchemeService.BEAN_ID)
|
|
|
private AdapterSchemeService adapterSchemeService;
|
|
|
|
|
|
private static DataCollectDispatcher dispatch = DataCollectDispatcher.getInstance();
|
|
|
private static SysConfig sysConfig=SysConfig.getInstance();
|
|
|
private static Map<String, Map<String, AdapterDataSet>> adapterDataSetVersionMap = new HashMap<>();
|
|
|
private static Map<String, Map<String, StdDataSet>> stdDataSetVersionMap = new HashMap<>();
|
|
|
|
|
|
private static Map<Integer, List<StdMetaDataModel>> stdMetaDataMap = new HashMap<>();
|
|
|
|
|
|
private Map<String, AdapterDataSet> adapterDataSetMap;
|
|
|
private Map<String, StdDataSet> stdDataSetMap;
|
|
|
|
|
|
private Boolean adapterFlg = false;
|
|
|
private Boolean stdFlg = false;
|
|
|
private String schemeVersion;
|
|
|
private List<DictItem> datasetList;
|
|
|
|
|
@ -62,21 +77,41 @@ public class CrawlerManager {
|
|
|
logger.info("档案采集上传开始,流水号:" + random + ",jobId:"+jobId);
|
|
|
Date begin = rsJobConfig.getRepeatStartTime();
|
|
|
Date end = rsJobConfig.getRepeatEndTime();
|
|
|
|
|
|
if (end == null) {
|
|
|
int totalOffset = rsJobConfig.getDelayTime();
|
|
|
Calendar instance = Calendar.getInstance();
|
|
|
instance.setTime(new Date(DateUtil.getSysDateTime().getTime()));
|
|
|
instance.add(Calendar.SECOND, - totalOffset); //调整截止时间,当前时间-偏移量
|
|
|
end = instance.getTime();
|
|
|
}
|
|
|
|
|
|
if ((end.getTime() - begin.getTime()) <= 0) {
|
|
|
return Result.error("错误:采集结束时间小于开始时间!"); //结束时间小于开始时间时,不获取
|
|
|
}
|
|
|
|
|
|
end = DateUtil.fromatDateToTimestamp(end);
|
|
|
|
|
|
if (!rsJobConfig.getJobType().equals("0")) {
|
|
|
//调整截止时间,当前时间-偏移量
|
|
|
end = DateUtil.addDate(-rsJobConfig.getDelayTime(), DateUtil.getSysDateTime());
|
|
|
if ((end.getTime() - begin.getTime()) <= 0) {
|
|
|
return Result.success(""); //结束时间小于开始时间时,不获取
|
|
|
return Result.success("错误:采集结束时间小于开始时间!"); //结束时间小于开始时间时,不获取
|
|
|
}
|
|
|
}
|
|
|
|
|
|
if (DateUtil.getDifferenceOfDays(begin, end) > 1) {
|
|
|
end = DateUtil.addDate(1, begin);
|
|
|
end = DateUtil.fromatDateToTimestamp(end);
|
|
|
}
|
|
|
|
|
|
DataGridResult dataGridResult = datacollect.getJobDatasetByJobId(jobId);
|
|
|
this.datasetList = dataGridResult.getDetailModelList();
|
|
|
this.schemeVersion = rsJobConfig.getSchemeVersion();
|
|
|
Result result = dataCrawler(begin, end);
|
|
|
if (!rsJobConfig.getJobType().equals("0")) {
|
|
|
rsJobConfig.setRepeatStartTime(end);
|
|
|
rsJobConfig.setRepeatEndTime(DateUtil.formatYMDToYMDHMS(DateUtil.getSysDateTime().toString()));
|
|
|
// rsJobConfig.setRepeatEndTime(DateUtil.formatYMDToYMDHMS(DateUtil.getSysDateTime().toString()));
|
|
|
}
|
|
|
datacollect.updateJob(rsJobConfig);
|
|
|
logger.info("档案采集上传结束,流水号:" + random + ",jobId:" + jobId + ",message:" + result.getMessage());
|
|
@ -123,10 +158,10 @@ public class CrawlerManager {
|
|
|
logger.trace("采集->注册->打包上传,patient_id:" + patient.getPatientId() + ", event_no:" + patient.getEventNo());
|
|
|
try {
|
|
|
//getToken
|
|
|
if (!dispatch.getToken()) {
|
|
|
logger.error("token获取失败");
|
|
|
return false;
|
|
|
}
|
|
|
// if (!dispatch.getToken()) {
|
|
|
// logger.error("token获取失败");
|
|
|
// return false;
|
|
|
// }
|
|
|
|
|
|
//getRemoteVersion
|
|
|
if (!dispatch.getRemoteVersion(patient.getOrgCode())) {
|
|
@ -169,7 +204,7 @@ public class CrawlerManager {
|
|
|
logger.info("上传病人档案");
|
|
|
try {
|
|
|
if (!CollectionUtil.isEmpty(dataMap.keySet())) {
|
|
|
if (!dispatch.upload(dataMap, patient, dataSetMap)) {
|
|
|
if (!dispatch.upload(dataMap, patient, dataSetMap,stdDataSetMap)) {
|
|
|
logger.error("上传档案失败");
|
|
|
return false;
|
|
|
}
|
|
@ -247,7 +282,7 @@ public class CrawlerManager {
|
|
|
logger.info("上传病人档案");
|
|
|
try {
|
|
|
if (!CollectionUtil.isEmpty(dataMap.keySet())) {
|
|
|
if (!dispatch.upload(dataMap, patient, dataSetMap)) {
|
|
|
if (!dispatch.upload(dataMap, patient, dataSetMap,stdDataSetMap)) {
|
|
|
logger.error("上传档案失败");
|
|
|
return false;
|
|
|
}
|
|
@ -265,16 +300,19 @@ public class CrawlerManager {
|
|
|
|
|
|
|
|
|
public Boolean getDataForPrepare() {
|
|
|
if (adapterFlg) {
|
|
|
if (adapterFlg && stdFlg) {
|
|
|
return true;
|
|
|
}
|
|
|
AdapterBase.setAdapterMetadataService(adapterMetadataService);
|
|
|
AdapterBase.setAdapterDictService(adapterDictService);
|
|
|
AdapterBase.setAdapterSchemeVersionService(adapterSchemeVersionService);
|
|
|
StdBase.setStdMetadataService(StdMetadataService);
|
|
|
logger.info("适配基本相关数据准备");
|
|
|
try {
|
|
|
adapterDataSetMap = new HashMap<>();
|
|
|
stdDataSetMap = new HashMap<>();
|
|
|
AdapterVersion adapterVersion;
|
|
|
StandardVersion standardVersion = null;
|
|
|
List<AdapterDatasetModel> adapterDataSetModelList;
|
|
|
if (!CollectionUtil.isEmpty(datasetList)) {
|
|
|
/**
|
|
@ -283,9 +321,21 @@ public class CrawlerManager {
|
|
|
adapterVersion = new AdapterVersion(schemeVersion);
|
|
|
List<Integer> datasetIdList = new ArrayList<>();
|
|
|
for (DictItem dictItem : datasetList) {
|
|
|
datasetIdList.add(Integer.parseInt(dictItem.getCode()));
|
|
|
Integer dataSetId = Integer.parseInt(dictItem.getCode());
|
|
|
datasetIdList.add(dataSetId);
|
|
|
}
|
|
|
adapterDataSetModelList = adapterDatasetService.getAdapterDatasetByAdapterIdList(adapterVersion, datasetIdList);
|
|
|
if (adapterDataSetModelList!=null && !adapterDataSetModelList.isEmpty()){
|
|
|
AdapterSchemeModel adapterScheme = adapterSchemeService.getEntity(AdapterSchemeModel.class, adapterDataSetModelList.get(0).getSchemeId());
|
|
|
standardVersion = new StandardVersion(adapterScheme.getStdVersion());
|
|
|
for (DictItem dictItem : datasetList) {
|
|
|
Integer dataSetId = Integer.parseInt(dictItem.getCode());
|
|
|
if (!stdMetaDataMap.containsKey(dataSetId)){
|
|
|
stdMetaDataMap.put(dataSetId,new StdDataSet(dataSetId, standardVersion).getStdMetaDataModelList());
|
|
|
}
|
|
|
}
|
|
|
}
|
|
|
|
|
|
} else {
|
|
|
/**
|
|
|
* 推模式接口调用,默认只使用最新版本的适配
|
|
@ -301,8 +351,17 @@ public class CrawlerManager {
|
|
|
if (adapterDataSetVersionMap.get(schemeVersion) != null) {
|
|
|
adapterDataSetMap = adapterDataSetVersionMap.get(schemeVersion);
|
|
|
adapterFlg = true;
|
|
|
}
|
|
|
|
|
|
if (stdDataSetVersionMap.get(schemeVersion) != null) {
|
|
|
stdDataSetMap = stdDataSetVersionMap.get(schemeVersion);
|
|
|
stdFlg = true;
|
|
|
}
|
|
|
|
|
|
if (adapterFlg && stdFlg){
|
|
|
return true;
|
|
|
}
|
|
|
|
|
|
Map<String, String> condition = new HashMap<>();
|
|
|
condition.put("column", "adapter_dataset_code");
|
|
|
ObjectMapper mapper = new ObjectMapper();
|
|
@ -328,9 +387,12 @@ public class CrawlerManager {
|
|
|
*/
|
|
|
for (AdapterDatasetModel adapterDatasetModel : adapterDataSetModelList) {
|
|
|
adapterDataSetMap.put(adapterDatasetModel.getStdDatasetCode(), new AdapterDataSet(adapterDatasetModel, adapterVersion, adapterDictEntryModelMap));
|
|
|
stdDataSetMap.put(adapterDatasetModel.getStdDatasetCode(), new StdDataSet(adapterDatasetModel.getStdDatasetId(),standardVersion));
|
|
|
}
|
|
|
adapterDataSetVersionMap.put(schemeVersion, adapterDataSetMap);
|
|
|
stdDataSetVersionMap.put(schemeVersion,stdDataSetMap);
|
|
|
adapterFlg = true;
|
|
|
stdFlg = true;
|
|
|
return true;
|
|
|
} catch (Exception e) {
|
|
|
return false;
|