浏览代码

任务编排任务

yingjie 9 年之前
父节点
当前提交
06d706d031

+ 24 - 2
Hos-resource/src/main/java/com/yihu/ehr/crawler/controller/CrawlerController.java

@ -7,9 +7,7 @@ import com.yihu.ehr.framework.model.ActionResult;
import com.yihu.ehr.framework.model.DetailModelResult;
import com.yihu.ehr.framework.model.Result;
import com.yihu.ehr.framework.util.operator.StringUtil;
import com.yihu.ehr.standard.model.adapter.AdapterSchemeVersionModel;
import com.yihu.ehr.standard.service.adapter.AdapterSchemeService;
import com.yihu.ehr.standard.service.adapter.AdapterSchemeVersionService;
import io.swagger.annotations.Api;
import io.swagger.annotations.ApiOperation;
import io.swagger.annotations.ApiParam;
@ -226,6 +224,8 @@ public class CrawlerController {
        return crawlerService.getDataSetResult(rows, page);
    }
    @RequestMapping(value = "getSchemeList", method = RequestMethod.POST)
    @ApiOperation(value = "获取适配方案-方案版本下拉框", produces = "application/json", notes = "获取适配方案-方案版本下拉框")
    @ResponseBody
@ -235,4 +235,26 @@ public class CrawlerController {
    }
    /**
     * 获取任务编排保存数据集
     */
    @RequestMapping(value = "savedJobData", method = RequestMethod.POST)
    @ApiOperation(value = "获取保存的数据集",produces = "application/json", notes = "保存的数据集")
    @ResponseBody
    public DetailModelResult ListSavedJobData() {
        return crawlerService.getDataSetSavedResult();
    }
    /**
     * 获取人物编排保存适配方案-方案版本
     */
    @RequestMapping(value = "savedSchemeList", method = RequestMethod.POST)
    @ApiOperation(value = "获取保存的适配方案",produces = "application/json", notes = "保存的适配方案")
    @ResponseBody
    public DetailModelResult SavedSchemeList() {
        return crawlerService.getSchemeSavedResult();
    }
}

+ 34 - 32
Hos-resource/src/main/java/com/yihu/ehr/crawler/service/CrawlerFlowManager.java

@ -11,7 +11,6 @@ import com.yihu.ehr.crawler.model.flow.CrawlerDataSetModel;
import com.yihu.ehr.crawler.model.flow.CrawlerFlowHeadModel;
import com.yihu.ehr.crawler.model.flow.CrawlerFlowModel;
import com.yihu.ehr.crawler.model.patient.Patient;
import com.yihu.ehr.framework.constrant.Constants;
import com.yihu.ehr.framework.util.operator.CollectionUtil;
import com.yihu.ehr.framework.util.operator.StringUtil;
import com.yihu.ehr.framework.util.springutil.SpringBeanUtil;
@ -31,7 +30,7 @@ public class CrawlerFlowManager {
    private static Logger logger = LogManager.getLogger(CrawlerFlowManager.class);
    private static volatile CrawlerFlowManager instance;
    private static DataCollectDispatcher dispatch;
    private static DataCollectDispatcher dispatch = DataCollectDispatcher.getInstance();
    private Map<String, AdapterDataSet> adapterDataSetMap;
    private List<CrawlerFlowHeadModel> crawlerFlowHeadModelList;
    private Map<String, List<CrawlerFlowModel>> crawlerFlowDatasetMap;
@ -64,31 +63,33 @@ public class CrawlerFlowManager {
        return instance;
    }
    public String dataCrawler() {
    public String dataCrawler(Map<String, Object> condition) {
        Integer count = 0;
        Integer totalCount = 0;
        Map<String, Object> condition = new HashMap<>();
        String message;
        /**
         * 适配基本数据准备
         */
        if (!getDataForPrepare()) {
            message = "适配数据尚未准备";
            logger.error(message);
            return message;
        }
        patientList = dispatch.getPatientList(condition, adapterDataSetMap);
        if (!CollectionUtil.isEmpty(patientList)) {
            totalCount = patientList.size();
            for (Patient patient : patientList) {
                collectProcess(patient);
                boolean result = collectProcess(patient);
                if (result) {
                    count++;
                }
            }
        }
        String message = "本次采集病人共" + totalCount + "条,成功采集信息"+ count + "条";
        logger.info(message);
        message = "本次采集病人共" + totalCount + "条,成功采集信息"+ count + "条";
        return message;
    }
    public String collectProcess(Patient patient) {
        String message;
        if (!getDataForPrepare()) {
            message = "适配数据尚未准备";
            logger.error(message);
            logger.trace("trance");
            logger.info("info");
            return message;
        }
    public boolean collectProcess(Patient patient) {
        patient.setReUploadFlg(StringUtil.toString(false));
        logger.trace("采集->注册->打包上传,任务ID:,patient_id:" + patient.getPatientId() + ", event_no:" + patient.getEventNo());
        try {
@ -96,20 +97,24 @@ public class CrawlerFlowManager {
             * 获取token
             */
            if (!dispatch.getToken()) {
                message = "token获取失败";
                logger.error(message);
                return message;
                logger.error("token获取失败");
                return false;
            }
            /**
             * 获取远程版本
             */
            if (!dispatch.getRemoteVersion(patient.getOrgCode())) {
                message = "远程版本获取失败";
                logger.error(message);
                return message;
//            if (!dispatch.getRemoteVersion(patient.getOrgCode())) {
//                logger.error("远程版本获取失败");
//                return false;
//            }
            /**
             * 获取版本
             */
            if (StringUtil.isEmpty(SysConfig.getInstance().getVersionMap().get(patient.getOrgCode()))) {
                logger.error("版本获取失败");
                return false;
            }
            Map<String, JsonNode> dataMap = new HashMap<>();
            for (CrawlerFlowHeadModel crawlerFlowHeadModel : crawlerFlowHeadModelList) {
                /**
@ -143,22 +148,19 @@ public class CrawlerFlowManager {
            try {
                if (!CollectionUtil.isEmpty(dataMap.keySet())) {
                    if (!dispatch.upload(dataMap, patient, adapterDataSetMap)) {
                        message = "上传档案失败";
                        logger.error(message);
                        return message;
                        logger.error("上传档案失败");
                        return false;
                    }
                }
            } catch (Exception e) {
                message = "上传档案失败";
                logger.error("档案上传失败,patient_id:" + patient.getPatientId() + ", event_no:" + patient.getEventNo());
                return message;
                return false;
            }
        } catch (Exception e) {
            message = "采集病人失败";
            logger.error("采集病人失败,patient_id:" + patient.getPatientId() + ", event_no:" + patient.getEventNo(), e);
            return message;
            return false;
        }
        return Constants.EMPTY;
        return true;
    }
    public Boolean getDataByCrawlerFlow(String preDatasetCode, Patient patient, Map<String, JsonNode> dataMap) {

+ 105 - 54
Hos-resource/src/main/java/com/yihu/ehr/crawler/service/CrawlerService.java

@ -16,8 +16,10 @@ import com.yihu.ehr.framework.util.operator.StringUtil;
import com.yihu.ehr.standard.model.adapter.AdapterDatasetModel;
import com.yihu.ehr.standard.model.adapter.AdapterMetadataModel;
import com.yihu.ehr.standard.model.adapter.AdapterSchemeVersionModel;
import com.yihu.ehr.standard.model.adapter.resultModel.AdapterSchemeResultModel;
import com.yihu.ehr.standard.service.adapter.AdapterDatasetService;
import com.yihu.ehr.standard.service.adapter.AdapterMetadataService;
import com.yihu.ehr.standard.service.adapter.AdapterSchemeService;
import com.yihu.ehr.standard.service.adapter.AdapterSchemeVersionService;
import com.yihu.ehr.standard.service.bo.AdapterVersion;
import net.sf.json.JSONArray;
@ -48,11 +50,14 @@ public class CrawlerService {
    private CrawlerFlowHeadDao crawlerFlowHeadDao;
    @Resource(name = AdapterMetadataService.BEAN_ID)
    private AdapterMetadataService adapterMetadataService;
    private static Map<Integer,List<FlowLines>> lineCache= new HashMap<>();
    private static Map<Integer, List<FlowLines>> lineCache = new HashMap<>();
    @Resource(name = AdapterSchemeService.BEAN_ID)
    private AdapterSchemeService adapterSchemeService;
    public static Map<Integer, List<FlowLines>> getLineCache() {
        return lineCache;
    }
    /**
     * 保存编排映射关系
     *
@ -110,14 +115,14 @@ public class CrawlerService {
     * @return
     * @throws Exception
     */
    public DetailModelResult getSchemeDataset(Integer schemeVersionId,String datasetName) throws Exception {
    public DetailModelResult getSchemeDataset(Integer schemeVersionId, String datasetName) throws Exception {
        AdapterSchemeVersionModel versionModel = (AdapterSchemeVersionModel) adapterSchemeVersionService.get(schemeVersionId);
        //获取适配数据集总和
        Map<String,Object> map = new HashMap<String,Object>();
        String condition=null;
        if (datasetName!=null && !"".equals(datasetName)){
        Map<String, Object> map = new HashMap<String, Object>();
        String condition = null;
        if (datasetName != null && !"".equals(datasetName)) {
            map.put("name", datasetName);
            condition=net.sf.json.JSONObject.fromObject(map).toString();
            condition = net.sf.json.JSONObject.fromObject(map).toString();
        }
        List<AdapterDatasetModel> adapterDatasetModelList = adapterDatasetService.getDatasetList(AdapterDatasetModel.class, versionModel.getVersion(), condition, null, null, null);
        //获取编排数据集
@ -125,28 +130,28 @@ public class CrawlerService {
        DetailModelResult re = new DetailModelResult();
        List<CrawlerDatasetResultDetailModel> list = new ArrayList<>();
        for (AdapterDatasetModel datasetModel : adapterDatasetModelList) {
            if (!StringUtil.isStrEmpty(datasetModel.getAdapterDatasetCode())){
            if (!StringUtil.isStrEmpty(datasetModel.getAdapterDatasetCode())) {
                List<AdapterMetadataModel> metadatas = adapterMetadataService.getAdapterMetadataByDataset(versionModel.getVersion(), datasetModel.getStdDatasetId());
            if (metadatas != null && metadatas.size() > 0) {
                CrawlerDatasetResultDetailModel obj = new CrawlerDatasetResultDetailModel();
                obj.setSchemeVersionId(schemeVersionId);
                obj.setDatasetId(datasetModel.getStdDatasetId());
                obj.setDatasetCode(datasetModel.getStdDatasetCode());
                obj.setDatasetName(datasetModel.getStdDatasetName());
                obj.setSchemeId(datasetModel.getSchemeId());
                if (crawlerDataset != null && crawlerDataset.size() > 0) {
                    for (CrawlerDataSetModel cDataSet : crawlerDataset) {
                        if (cDataSet.getDatasetId().equals(datasetModel.getStdDatasetId())) {
                            obj.setSchemeVersionId(cDataSet.getSchemeVersionId());
                            obj.setChecked("1");
                            break;
                if (metadatas != null && metadatas.size() > 0) {
                    CrawlerDatasetResultDetailModel obj = new CrawlerDatasetResultDetailModel();
                    obj.setSchemeVersionId(schemeVersionId);
                    obj.setDatasetId(datasetModel.getStdDatasetId());
                    obj.setDatasetCode(datasetModel.getStdDatasetCode());
                    obj.setDatasetName(datasetModel.getStdDatasetName());
                    obj.setSchemeId(datasetModel.getSchemeId());
                    if (crawlerDataset != null && crawlerDataset.size() > 0) {
                        for (CrawlerDataSetModel cDataSet : crawlerDataset) {
                            if (cDataSet.getDatasetId().equals(datasetModel.getStdDatasetId())) {
                                obj.setSchemeVersionId(cDataSet.getSchemeVersionId());
                                obj.setChecked("1");
                                break;
                            }
                        }
                    }
                    list.add(obj);
                }
                list.add(obj);
            }
        }
        }
        re.setDetailModelList(list);
        return re;
@ -157,7 +162,7 @@ public class CrawlerService {
     *
     * @param json
     * @param rows
     *@param page @return
     * @param page @return
     * @throws Exception
     */
    public void saveJobData(String json, Integer rows, Integer page) throws Exception {
@ -183,7 +188,7 @@ public class CrawlerService {
                        List<Integer> newDatasetIdList = new ArrayList<>();
                        String dataSetStr = obj.getString("dataSets");
                        if (StringUtils.isNotBlank(dataSetStr)) {
                            String[] IdList =  dataSetStr.split(",");
                            String[] IdList = dataSetStr.split(",");
                            for (String aIdList : IdList) {
                                if (!Objects.equals(aIdList, "")) {
                                    Integer DaSetId = Integer.valueOf(aIdList);
@ -206,7 +211,7 @@ public class CrawlerService {
                    //如果保存传入编排映射关系,进行保存操作
                    if (obj.containsKey("relation") && !Objects.equals(obj.getString("relation"), "")) {
                        saveDataSetRelation(versionId,obj.getString("relation"));
                        saveDataSetRelation(versionId, obj.getString("relation"));
                    }
                }
            }
@ -241,13 +246,13 @@ public class CrawlerService {
    }
    /**
     * 删除编排数据
     *
     * @param version
     */
    @Transactional
    public String  deleteJobData(String version) {
    public String deleteJobData(String version) {
        try {
            AdapterSchemeVersionModel versionModel = (AdapterSchemeVersionModel) adapterSchemeVersionService.get(Integer.valueOf(version));
@ -268,11 +273,12 @@ public class CrawlerService {
    /**
     * 数据集列表
     * @param limit rows
     *
     * @param limit  rows
     * @param offset page
     * @return
     */
    public DetailModelResult getDataSetResult(Integer limit, Integer offset){
    public DetailModelResult getDataSetResult(Integer limit, Integer offset) {
        try {
            StringBuffer stringBuffer = new StringBuffer();
            String sql = "SELECT 1 as status, a.scheme_id, a.scheme_version_id, GROUP_CONCAT(a.dataset_id SEPARATOR ',') AS datasetId, GROUP_CONCAT(a.dataset_name SEPARATOR ',') AS datasetName" +
@ -288,7 +294,7 @@ public class CrawlerService {
            Integer total = crawlerDatasetDao.getTotalRows();
            List<Map<String, Object>>list = crawlerDatasetDao.queryListBySql(stringBuffer.toString());
            List<Map<String, Object>> list = crawlerDatasetDao.queryListBySql(stringBuffer.toString());
            DetailModelResult detailModelResult = DetailModelResult.success("获取数据集成功");
            detailModelResult.setDetailModelList(list);
            detailModelResult.setTotalCount(total);
@ -300,7 +306,6 @@ public class CrawlerService {
    }
    /**
     * 获取编排已选择的适配数据集
     *
@ -332,18 +337,18 @@ public class CrawlerService {
        List<MappingDataset> list = new ArrayList<>();
        for (AdapterDatasetModel datasetModel : adapterDatasetModelList) {
//            if (crawlerDataset != null && crawlerDataset.size() > 0) {
                MappingDataset obj = new MappingDataset();
            MappingDataset obj = new MappingDataset();
//                for (CrawlerDataSetModel cDataSet : crawlerDataset) {
                    if ( !StringUtil.isStrEmpty(datasetModel.getAdapterDatasetCode())) {
                        List<MappingMetadata> metadatas = getMappingMetaDatasByDataset(versionModel.getVersion(), datasetModel.getStdDatasetId());
                        obj.setId(datasetModel.getStdDatasetId());
                        obj.setCode(datasetModel.getStdDatasetCode());
                        obj.setName(datasetModel.getStdDatasetName());
                        obj.setData(metadatas);
            if (!StringUtil.isStrEmpty(datasetModel.getAdapterDatasetCode())) {
                List<MappingMetadata> metadatas = getMappingMetaDatasByDataset(versionModel.getVersion(), datasetModel.getStdDatasetId());
                obj.setId(datasetModel.getStdDatasetId());
                obj.setCode(datasetModel.getStdDatasetCode());
                obj.setName(datasetModel.getStdDatasetName());
                obj.setData(metadatas);
//                        break;
                    }
            }
//                }
                list.add(obj);
            list.add(obj);
//            }
        }
        return list;
@ -358,14 +363,14 @@ public class CrawlerService {
     */
    public List<MappingMetadata> getMappingMetaDatasByDataset(String adapterVersion, Integer dataSetId) {
        List<AdapterMetadataModel> adapterMetadataModels = adapterMetadataService.getAdapterMetadataByDataset(adapterVersion, dataSetId);
        AdapterDatasetModel  adapterDatasetModel= adapterMetadataService.getAdapterDataset(adapterVersion, dataSetId);
        AdapterDatasetModel adapterDatasetModel = adapterMetadataService.getAdapterDataset(adapterVersion, dataSetId);
        List<MappingMetadata> resultList = new ArrayList<>();
        if (adapterMetadataModels != null && adapterMetadataModels.size() > 0) {
            for (AdapterMetadataModel metadataModel : adapterMetadataModels) {
                if(!StringUtil.isStrEmpty( metadataModel.getAdapterMetadataCode())){
                if (!StringUtil.isStrEmpty(metadataModel.getAdapterMetadataCode())) {
                    MappingMetadata metadata = new MappingMetadata();
                    metadata.setId(metadataModel.getAdapterMetadataId());
                    metadata.setCode(adapterDatasetModel.getAdapterDatasetCode()+"-"+metadataModel.getAdapterMetadataCode());
                    metadata.setCode(adapterDatasetModel.getAdapterDatasetCode() + "-" + metadataModel.getAdapterMetadataCode());
                    metadata.setName(metadataModel.getAdapterMetadataName());
                    resultList.add(metadata);
                }
@ -381,15 +386,15 @@ public class CrawlerService {
     * @param datasetIdStr
     * @return
     */
    public String getRelations(Integer schemeVersionId, String datasetIdStr,String lineStr) {
    public String getRelations(Integer schemeVersionId, String datasetIdStr, String lineStr) {
        JSONObject jsonObject = new JSONObject();
        try {
            List<MappingDataset> datasets = getSchemeDatasetByChecked(schemeVersionId, datasetIdStr);
            List<FlowLines> lines = getFlowLines(schemeVersionId);
            if (StringUtil.isEmpty(lineStr)){
            if (StringUtil.isEmpty(lineStr)) {
                lines = getFlowLines(schemeVersionId);
            }else {
                lines=lineCache.get(schemeVersionId);
            } else {
                lines = lineCache.get(schemeVersionId);
            }
            jsonObject.put("tables", datasets);
            jsonObject.put("rels", lines);
@ -400,20 +405,66 @@ public class CrawlerService {
    }
    public void setLinesCache(Integer schemeVersionId,String lines) throws IOException {
        ObjectMapper objectMapper=new ObjectMapper();
        ObjectNode rootNode=objectMapper.readValue(lines,ObjectNode.class);
        String lineJson=rootNode.get("lines").toString();
        List<FlowLines> line=objectMapper.readValue(lineJson,List.class);
        lineCache.put(schemeVersionId,line);
    public void setLinesCache(Integer schemeVersionId, String lines) throws IOException {
        ObjectMapper objectMapper = new ObjectMapper();
        ObjectNode rootNode = objectMapper.readValue(lines, ObjectNode.class);
        String lineJson = rootNode.get("lines").toString();
        List<FlowLines> line = objectMapper.readValue(lineJson, List.class);
        lineCache.put(schemeVersionId, line);
    }
    public void deleteCurrentPage(Integer rows, Integer page) {
        DetailModelResult currentResut = getDataSetResult(rows,page);
        DetailModelResult currentResut = getDataSetResult(rows, page);
        List<Map<String, Object>> list = currentResut.getDetailModelList();
        for (Map<String, Object> map : list) {
            String version = String.valueOf(map.get("scheme_version_id"));
            deleteJobData(version);
        }
    }
    public DetailModelResult getDataSetSavedResult() {
        DetailModelResult checkedSchemeLs = getDataSetResult(null, null);
        List<Map<String, Object>> list = checkedSchemeLs.getDetailModelList();
        return null;
    }
    public DetailModelResult getSchemeSavedResult() {
        DetailModelResult allScheme = adapterSchemeService.getAdapterSchemeResultModelList();
        List<AdapterSchemeResultModel> allSchemeLs = allScheme.getDetailModelList();
        if (allSchemeLs != null && allSchemeLs.size() > 0) {
            DetailModelResult checkedSchemeLs = getDataSetResult(null, null);
            List<Map<String, Object>> list = checkedSchemeLs.getDetailModelList();
            Set<String> schemeIdSets = new HashSet<>();
            Set<String> versionSets = new HashSet<>();
            DetailModelResult result = new DetailModelResult();
            List<AdapterSchemeVersionModel> versionModelList = new ArrayList<>();
            if (list != null && list.size() > 0) {
                for (Map<String, Object> objectMap : list) {
                    if (objectMap.get("scheme_id") != null && objectMap.get("scheme_version_id") != null) {
                        schemeIdSets.add(objectMap.get("scheme_id").toString());
                        versionSets.add(objectMap.get("scheme_version_id").toString());
                    }
                }
                if (schemeIdSets.size() > 0 && versionSets.size() > 0) {
                    for (AdapterSchemeResultModel schemeL : allSchemeLs) {
                        String schemeID = schemeL.getSchemeId().toString();
                        if (schemeIdSets.contains(schemeID)) {
                            List<AdapterSchemeVersionModel> versionModels = schemeL.getVersionList();
                            for (AdapterSchemeVersionModel versionModel : versionModels) {
                                String versionID = versionModel.getId().toString();
                                if (versionSets.contains(versionID)) {
                                    versionModelList.add(versionModel);
                                }
                            }
                        }
                    }
                    result.setDetailModelList(versionModelList);
                    return result;
                }
            }
        }
        return DetailModelResult.error("获取已编排任务适配方案失败!");
    }
}

+ 54 - 0
Hos-resource/src/main/java/com/yihu/ehr/job/service/CrawlerFlowUploadJob.java

@ -0,0 +1,54 @@
package com.yihu.ehr.job.service;
import com.yihu.ehr.common.Services;
import com.yihu.ehr.crawler.service.CrawlerFlowManager;
import com.yihu.ehr.datacollect.model.RsJobConfig;
import com.yihu.ehr.datacollect.service.intf.IDatacollectManager;
import com.yihu.ehr.framework.util.DateUtil;
import com.yihu.ehr.framework.util.springutil.SpringBeanUtil;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import java.util.Date;
import java.util.HashMap;
import java.util.Map;
import java.util.UUID;
/**
 * 任务编排任务执行
 *
 * Created by chenyingjie on 16/7/14.
 */
public class CrawlerFlowUploadJob implements IBaseJob {
    private static Logger logger = LogManager.getLogger(CrawlerFlowUploadJob.class);
    @Override
    public void execute(String jobId) throws Exception {
        String random = UUID.randomUUID().toString();
        logger.info("任务编排——档案采集上传开始,流水号:" + random + ",jobId:"+jobId);
        IDatacollectManager datacollect = SpringBeanUtil.getService(Services.Datacollect);
        RsJobConfig job = datacollect.getJobById(jobId);
        Map<String, Object> condition = new HashMap<>();
        Date begin = job.getRepeatStartTime();
        Date end = job.getRepeatEndTime();
        if (!job.getJobType().equals("0")) {
            //调整截止时间,当前时间-偏移量
            end = DateUtil.addDate(-job.getDelayTime(), DateUtil.getSysDateTime());
            if ((end.getTime() - begin.getTime()) <= 0) {
                return; //结束时间小于开始时间时,不获取
            }
        }
        condition.put("beginDate", begin);
        condition.put("endDate", end);
        CrawlerFlowManager crawlerFlowManager = new CrawlerFlowManager();
        String message = crawlerFlowManager.dataCrawler(condition);
        if (!job.getJobType().equals("0")) {
            job.setRepeatStartTime(end);
            job.setRepeatEndTime(DateUtil.formatYMDToYMDHMS(DateUtil.getSysDateTime().toString()));
        }
        datacollect.updateJob(job);
        logger.info("任务编排——档案采集上传结束,流水号:" + random + ",jobId:" + jobId + ",message:" + message);
        return;
    }
}