CrawlerFlowManager.java 15 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323
  1. package com.yihu.hos.crawler.service;
  2. import com.fasterxml.jackson.databind.JsonNode;
  3. import com.fasterxml.jackson.databind.ObjectMapper;
  4. import com.yihu.hos.core.datatype.CollectionUtil;
  5. import com.yihu.hos.core.datatype.StringUtil;
  6. import com.yihu.hos.core.log.Logger;
  7. import com.yihu.hos.core.log.LoggerFactory;
  8. import com.yihu.hos.crawler.dao.CrawlerDatasetDao;
  9. import com.yihu.hos.crawler.dao.CrawlerFlowDao;
  10. import com.yihu.hos.crawler.dao.CrawlerFlowHeadDao;
  11. import com.yihu.hos.crawler.model.adapter.AdapterDataSet;
  12. import com.yihu.hos.crawler.model.config.SysConfig;
  13. import com.yihu.hos.crawler.model.flow.CrawlerDataSetModel;
  14. import com.yihu.hos.crawler.model.flow.CrawlerFlowHeadModel;
  15. import com.yihu.hos.crawler.model.flow.CrawlerFlowModel;
  16. import com.yihu.hos.crawler.model.patient.Patient;
  17. import com.yihu.hos.standard.model.adapter.AdapterDatasetModel;
  18. import com.yihu.hos.standard.model.adapter.AdapterDictEntryModel;
  19. import com.yihu.hos.standard.model.adapter.AdapterSchemeVersionModel;
  20. import com.yihu.hos.standard.service.adapter.AdapterDatasetService;
  21. import com.yihu.hos.standard.service.adapter.AdapterSchemeVersionService;
  22. import com.yihu.hos.standard.service.bo.AdapterVersion;
  23. import com.yihu.hos.web.framework.model.DictItem;
  24. import com.yihu.hos.web.framework.util.springutil.SpringBeanUtil;
  25. import java.util.*;
  26. public class CrawlerFlowManager {
  27. private static Logger logger = LoggerFactory.getLogger(CrawlerFlowManager.class);
  28. private static DataCollectDispatcher dispatch = DataCollectDispatcher.getInstance();
  29. private static SysConfig sysConfig = SysConfig.getInstance();
  30. private Map<String, AdapterDataSet> adapterDataSetMap;
  31. private List<CrawlerFlowHeadModel> crawlerFlowHeadModelList;
  32. private Map<String, List<CrawlerFlowModel>> crawlerFlowDatasetMap;
  33. private Map<String, List<CrawlerFlowModel>> crawlerFlowMetadataMap;
  34. private Boolean adapterFlg = false;
  35. private List<DictItem> datasetList;
  36. private String schemeVersion;
  37. public CrawlerFlowManager(List datasetList, String schemeVersion) {
  38. this.datasetList = datasetList;
  39. this.schemeVersion = schemeVersion;
  40. }
  41. public CrawlerFlowManager() {
  42. }
  43. public void finalize() throws Throwable {
  44. }
  45. public String dataCrawler(Map<String, Object> condition) {
  46. Integer count = 0;
  47. Integer totalCount = 0;
  48. String message;
  49. /**
  50. * 适配基本数据准备
  51. */
  52. if (!getDataForPrepare()) {
  53. message = "适配数据尚未准备";
  54. logger.error(message);
  55. return message;
  56. }
  57. List<Patient> patientList = dispatch.getPatientList(condition, adapterDataSetMap);
  58. if (!CollectionUtil.isEmpty(patientList)) {
  59. totalCount = patientList.size();
  60. for (Patient patient : patientList) {
  61. boolean result = collectProcess(patient);
  62. if (result) {
  63. count++;
  64. }
  65. }
  66. }
  67. message = "本次采集病人共" + totalCount + "条,成功采集信息" + count + "条";
  68. return message;
  69. }
  70. public boolean collectProcess(Patient patient) {
  71. if (!getDataForPrepare()) {
  72. logger.error("适配数据尚未准备完毕");
  73. return false;
  74. }
  75. patient.setReUploadFlg(StringUtil.toString(false));
  76. logger.trace("采集->注册->打包上传,任务ID:,patient_id:" + patient.getPatientId() + ", event_no:" + patient.getEventNo());
  77. try {
  78. /**
  79. * 获取token
  80. */
  81. if (!dispatch.getToken()) {
  82. logger.error("token获取失败");
  83. return false;
  84. }
  85. /**
  86. * 获取远程版本
  87. */
  88. // if (!dispatch.getRemoteVersion(patient.getOrgCode())) {
  89. // logger.error("远程版本获取失败");
  90. // return false;
  91. // }
  92. /**
  93. * 获取版本
  94. */
  95. if (StringUtil.isEmpty(sysConfig.getVersionMap().get(patient.getOrgCode()))) {
  96. logger.error("版本获取失败");
  97. return false;
  98. }
  99. Map<String, JsonNode> dataMap = new HashMap<>();
  100. for (CrawlerFlowHeadModel crawlerFlowHeadModel : crawlerFlowHeadModelList) {
  101. /**
  102. * 采集信息
  103. */
  104. String datasetCode = crawlerFlowHeadModel.getDatasetCode();
  105. AdapterDataSet adapterDataSet = adapterDataSetMap.get(datasetCode);
  106. JsonNode data = dispatch.fecthData(patient, adapterDataSet);
  107. if (StringUtil.isEmpty(data)) {
  108. continue;
  109. }
  110. dataMap.put(datasetCode, data);
  111. /**
  112. * 根据采集流程递归查询
  113. */
  114. getDataByCrawlerFlow(datasetCode, patient, dataMap);
  115. if (sysConfig.getRegisterDataSet().equals(adapterDataSet.getAdapterDataSetT().getStdDatasetCode())) {
  116. if (!StringUtil.isEmpty(data.get("data")) && !StringUtil.isEmpty(data.get("data").get(0))) {
  117. if (!StringUtil.isEmpty(data.get("data").get(0).get(SysConfig.getInstance().getRegisterIdCardNo()))) {
  118. logger.info("注册病人");
  119. dispatch.register(patient, data.toString());
  120. }
  121. }
  122. }
  123. }
  124. logger.info("采集病人成功,patient_id:" + patient.getPatientId() + ", event_no:" + patient.getEventNo());
  125. /**
  126. * 上传档案
  127. */
  128. try {
  129. if (!CollectionUtil.isEmpty(dataMap.keySet())) {
  130. if (!dispatch.upload(dataMap, patient, adapterDataSetMap)) {
  131. logger.error("上传档案失败");
  132. return false;
  133. }
  134. }
  135. } catch (Exception e) {
  136. logger.error("档案上传失败,patient_id:" + patient.getPatientId() + ", event_no:" + patient.getEventNo());
  137. return false;
  138. }
  139. } catch (Exception e) {
  140. logger.error("采集病人失败,patient_id:" + patient.getPatientId() + ", event_no:" + patient.getEventNo(), e);
  141. return false;
  142. }
  143. return true;
  144. }
  145. public Boolean getDataByCrawlerFlow(String preDatasetCode, Patient patient, Map<String, JsonNode> dataMap) {
  146. try {
  147. JsonNode preData = dataMap.get(preDatasetCode);
  148. Map<String, String> relationValueMap = new HashMap<>();
  149. List<CrawlerFlowModel> crawlerFlowDatasetList = crawlerFlowDatasetMap.get(preDatasetCode);
  150. for (CrawlerFlowModel crawlerFlowDataset : crawlerFlowDatasetList) {
  151. List<CrawlerFlowModel> crawlerFlowMetadataList = crawlerFlowMetadataMap.get(crawlerFlowDataset.getDatasetCode());
  152. for (CrawlerFlowModel crawlerFlowMetadata : crawlerFlowMetadataList) {
  153. String metadataCode = crawlerFlowMetadata.getMetadataCode();
  154. metadataCode = StringUtil.substring(metadataCode, metadataCode.indexOf("-") + 1, metadataCode.length());
  155. String inputMetadataCode = crawlerFlowMetadata.getInputMetadataCode();
  156. inputMetadataCode = StringUtil.substring(inputMetadataCode, inputMetadataCode.indexOf("-") + 1, inputMetadataCode.length());
  157. Iterator<JsonNode> array = preData.get("data").iterator();
  158. while (array.hasNext()) {
  159. JsonNode dataNode = array.next();
  160. relationValueMap.put(metadataCode, dataNode.get(inputMetadataCode).asText());
  161. }
  162. }
  163. String datasetCode = crawlerFlowDataset.getDatasetCode();
  164. AdapterDataSet adapterDataSet = adapterDataSetMap.get(datasetCode);
  165. String data = dispatch.fecthData(patient, adapterDataSet, relationValueMap);
  166. if (StringUtil.isEmpty(data)) {
  167. continue;
  168. } else {
  169. ObjectMapper objectMapper = new ObjectMapper();
  170. JsonNode jsonObject = objectMapper.readTree(data);
  171. dataMap.put(datasetCode, jsonObject);
  172. }
  173. getDataByCrawlerFlow(datasetCode, patient, dataMap);
  174. }
  175. return true;
  176. } catch (Exception e) {
  177. return false;
  178. }
  179. }
  180. public Boolean getDataForPrepare() {
  181. if (adapterFlg) {
  182. return true;
  183. }
  184. logger.info("适配基本相关数据准备");
  185. try {
  186. adapterDataSetMap = new HashMap<>();
  187. AdapterVersion adapterVersion;
  188. List<Integer> datasetIdList = new ArrayList<>();
  189. /**
  190. * 推模式接口调用,默认只使用最新版本的适配
  191. */
  192. AdapterSchemeVersionService adapterSchemeVersionService = SpringBeanUtil.getService(AdapterSchemeVersionService.BEAN_ID);
  193. AdapterSchemeVersionModel adapterSchemeVersionModel;
  194. if (datasetList.isEmpty()) {
  195. adapterSchemeVersionModel = adapterSchemeVersionService.getEhrAdapterVersionLasted();
  196. if (adapterSchemeVersionModel == null) {
  197. logger.error("获取最新ehr适配版本错误");
  198. return false;
  199. } else {
  200. this.schemeVersion = adapterSchemeVersionModel.getVersion();
  201. adapterVersion = new AdapterVersion(schemeVersion);
  202. }
  203. /**
  204. * 获取该版本下数据集
  205. */
  206. CrawlerDatasetDao crawlerDatasetDao = SpringBeanUtil.getService(CrawlerDatasetDao.BEAN_ID);
  207. List<CrawlerDataSetModel> crawlerDataSetModelList = crawlerDatasetDao.getCrawlerDatasetList(adapterSchemeVersionModel.getId());
  208. if (CollectionUtil.isEmpty(crawlerDataSetModelList)) {
  209. return false;
  210. }
  211. for (CrawlerDataSetModel crawlerDataSetModel : crawlerDataSetModelList) {
  212. datasetIdList.add(crawlerDataSetModel.getDatasetId());
  213. }
  214. } else {
  215. /**
  216. * 拉模式接口调用,由任务配置决定适配版本
  217. */
  218. adapterSchemeVersionModel = adapterSchemeVersionService.getByVersion(schemeVersion);
  219. adapterVersion = new AdapterVersion(schemeVersion);
  220. for (DictItem dictItem : datasetList) {
  221. datasetIdList.add(Integer.valueOf(dictItem.getCode()));
  222. }
  223. }
  224. AdapterDatasetService adapterDatasetService = SpringBeanUtil.getService(AdapterDatasetService.BEAN_ID);
  225. /**
  226. * 字典项初始化
  227. */
  228. List<AdapterDictEntryModel> adapterDictEntryModelList = adapterDatasetService.getList(AdapterDictEntryModel.class, adapterVersion.getDictEntryTableName(), null, null, null, null);
  229. Map<Integer, List<AdapterDictEntryModel>> adapterDictEntryModelMap = new HashMap<>();
  230. for (AdapterDictEntryModel adapterDictEntryModel : adapterDictEntryModelList) {
  231. List<AdapterDictEntryModel> entryModelList = adapterDictEntryModelMap.get(adapterDictEntryModel.getStdDictId());
  232. if (CollectionUtil.isEmpty(entryModelList)) {
  233. entryModelList = new ArrayList<>();
  234. }
  235. entryModelList.add(adapterDictEntryModel);
  236. adapterDictEntryModelMap.put(adapterDictEntryModel.getStdDictId(), entryModelList);
  237. }
  238. /**
  239. * 数据集初始化
  240. */
  241. List<AdapterDatasetModel> adapterDataSetModelList = adapterDatasetService.getListByAdapterDatasetIdList(adapterVersion, datasetIdList);
  242. for (AdapterDatasetModel adapterDatasetModel : adapterDataSetModelList) {
  243. adapterDataSetMap.put(adapterDatasetModel.getStdDatasetCode(), new AdapterDataSet(adapterDatasetModel, adapterVersion, adapterDictEntryModelMap));
  244. }
  245. /**
  246. * 获取采集流程表头
  247. */
  248. CrawlerFlowHeadDao crawlerFlowHeadDao = SpringBeanUtil.getService(CrawlerFlowHeadDao.BEAN_ID);
  249. CrawlerFlowDao crawlerFlowDao = SpringBeanUtil.getService(CrawlerFlowDao.BEAN_ID);
  250. crawlerFlowHeadModelList = crawlerFlowHeadDao.getCrawlerFlowHeadList(adapterSchemeVersionModel.getId());
  251. List<CrawlerFlowModel> crawlerFlowModelList = crawlerFlowDao.getCrawlerFlowList(adapterSchemeVersionModel.getId());
  252. crawlerFlowDatasetMap = new HashMap<>();
  253. crawlerFlowMetadataMap = new HashMap<>();
  254. /**
  255. * 获取关联表
  256. */
  257. for (CrawlerFlowModel crawlerFlowModel : crawlerFlowModelList) {
  258. List<CrawlerFlowModel> datasetList = new ArrayList<>();
  259. List<CrawlerFlowModel> metadataList = new ArrayList<>();
  260. String inputDatasetCode = crawlerFlowModel.getInputDatasetCode();
  261. String datasetCode = crawlerFlowModel.getDatasetCode();
  262. if (StringUtil.isEmpty(inputDatasetCode)) {
  263. continue;
  264. }
  265. if (crawlerFlowDatasetMap.containsKey(inputDatasetCode)) {
  266. datasetList = crawlerFlowDatasetMap.get(inputDatasetCode);
  267. }
  268. datasetList.add(crawlerFlowModel);
  269. crawlerFlowDatasetMap.put(inputDatasetCode, datasetList);
  270. if (crawlerFlowMetadataMap.containsKey(datasetCode)) {
  271. metadataList = crawlerFlowMetadataMap.get(datasetCode);
  272. }
  273. metadataList.add(crawlerFlowModel);
  274. crawlerFlowMetadataMap.put(datasetCode, metadataList);
  275. }
  276. // SysConfig.getInstance().setVersionMap(new HashMap<>());
  277. adapterFlg = true;
  278. return true;
  279. } catch (Exception e) {
  280. adapterFlg = false;
  281. return false;
  282. }
  283. }
  284. /**
  285. * 解析病人索引信息
  286. *
  287. * @param patientInfo 病人索引信息
  288. * @return
  289. */
  290. public Patient parsePatient(String patientInfo) {
  291. try {
  292. ObjectMapper mapper = new ObjectMapper();
  293. Patient patient = mapper.readValue(patientInfo, Patient.class);
  294. return patient;
  295. } catch (Exception e) {
  296. logger.error("patient参数错误:" + patientInfo, e);
  297. return null;
  298. }
  299. }
  300. public void setAdapterFlg(Boolean adapterFlg) {
  301. this.adapterFlg = adapterFlg;
  302. }
  303. }