Explorar el Código

esb拉模式采集逻辑修改,任务编排推模式测试

lingfeng hace 9 años
padre
commit
15839eade6

+ 84 - 1
Hos-Framework/src/main/java/com/yihu/ehr/framework/util/DateUtil.java

@ -1,5 +1,6 @@
package com.yihu.ehr.framework.util;
import java.sql.Timestamp;
import java.text.DateFormat;
import java.text.ParseException;
import java.text.ParsePosition;
@ -7,10 +8,13 @@ import java.text.SimpleDateFormat;
import java.util.Calendar;
import java.util.Date;
import java.util.GregorianCalendar;
import java.util.TimeZone;
public class DateUtil {
    public static final String DEFAULT_DATE_YMD_FORMAT = "yyyy-MM-dd";
    public final static String DEFAULT_YMDHMSDATE_FORMAT = "yyyy-MM-dd HH:mm:ss";
    /**
     * 日期比较,判断endDate - fromDate 是否超过expiresIn,是则返回true,否则返回false.
     *
@ -155,4 +159,83 @@ public class DateUtil {
        }
        return new java.sql.Date(date.getTime());
    }
}  
    public static Timestamp getSysDateTime() {
        return new Timestamp(Calendar.getInstance().getTime().getTime());
    }
    public static Timestamp formatYMDToYMDHMS(String str) {
        String format = DEFAULT_YMDHMSDATE_FORMAT;
        if (str == null || str.trim().length() == 0) {
            return null;
        }
        str += " 00:00:00";
        SimpleDateFormat sdf = new SimpleDateFormat(format);
        ParsePosition pos = new ParsePosition(0);
        Date date = sdf.parse(str, pos);
        if (date == null) {
            return null;
        }
        Timestamp ts = DateUtil.fromatDateToTimestamp(new java.sql.Date(date.getTime()));
        return ts;
    }
    public static Timestamp fromatDateToTimestamp(Date date) {
        try {
            SimpleDateFormat df = new SimpleDateFormat(DEFAULT_YMDHMSDATE_FORMAT);
            String time = df.format(date);
            Timestamp ts = Timestamp.valueOf(time);
            return ts;
        } catch (Exception e) {
            return null;
        }
    }
    public static String getCurrentString() {
        return getCurrentString(DEFAULT_YMDHMSDATE_FORMAT);
    }
    public static String getCurrentString(String pattern) {
        SimpleDateFormat f = new SimpleDateFormat(pattern);
        return f.format(Calendar.getInstance(TimeZone.getDefault()).getTime());
    }
    public static Date addDate(int add, Date d) {
        Calendar cal = Calendar.getInstance();
        cal.setTime(d);
        cal.add(Calendar.DATE, add);
        return formatCharToYMDHMS((new SimpleDateFormat(DateUtil.DEFAULT_YMDHMSDATE_FORMAT)).format(cal.getTime()), DateUtil.DEFAULT_YMDHMSDATE_FORMAT);
    }
    public static Timestamp formatCharToYMDHMS(String str, String format) {
        if (str == null || str.trim().length() == 0) {
            return null;
        }
        SimpleDateFormat sdf = new SimpleDateFormat(format);
        ParsePosition pos = new ParsePosition(0);
        Date date = sdf.parse(str, pos);
        if (date == null) {
            return null;
        }
        Timestamp ts = DateUtil.fromatDateToTimestamp(new Date(date.getTime()));
        return ts;
    }
}

+ 11 - 0
Hos-resource/src/main/java/com/yihu/ehr/crawler/controller/CrawlerController.java

@ -1,6 +1,7 @@
package com.yihu.ehr.crawler.controller;
import com.yihu.ehr.crawler.model.patient.Patient;
import com.yihu.ehr.crawler.service.CrawlerFlowManager;
import com.yihu.ehr.crawler.service.CrawlerManager;
import com.yihu.ehr.crawler.service.CrawlerService;
import com.yihu.ehr.framework.model.ActionResult;
@ -11,6 +12,8 @@ import com.yihu.ehr.standard.service.adapter.AdapterSchemeService;
import io.swagger.annotations.Api;
import io.swagger.annotations.ApiOperation;
import io.swagger.annotations.ApiParam;
import net.sf.json.JSONObject;
import org.junit.Test;
import org.springframework.stereotype.Controller;
import org.springframework.ui.Model;
import org.springframework.web.bind.annotation.RequestMapping;
@ -232,4 +235,12 @@ public class CrawlerController {
        return result;
    }
    @Test
    public void crawlerTest() {
        Patient patient = new Patient();
        patient.setPatientId("11509006");
        patient.setEventNo("1001000001");
        patient.setOrgCode("FZFY");
        CrawlerFlowManager.getInstance().collectProcess(patient);
    }
}

+ 16 - 9
Hos-resource/src/main/java/com/yihu/ehr/crawler/service/CrawlerFlowManager.java

@ -22,10 +22,7 @@ import com.yihu.ehr.standard.service.adapter.AdapterDatasetService;
import com.yihu.ehr.standard.service.adapter.AdapterSchemeVersionService;
import com.yihu.ehr.standard.service.bo.AdapterVersion;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.*;
public class CrawlerFlowManager {
@ -160,8 +157,14 @@ public class CrawlerFlowManager {
                List<CrawlerFlowModel> crawlerFlowMetadataList = crawlerFlowMetadataMap.get(crawlerFlowDataset.getDatasetCode());
                for (CrawlerFlowModel crawlerFlowMetadata : crawlerFlowMetadataList) {
                    String metadataCode = crawlerFlowMetadata.getMetadataCode();
                    metadataCode = StringUtil.substring(metadataCode, metadataCode.indexOf("-")+1, metadataCode.length());
                    String inputMetadataCode = crawlerFlowMetadata.getInputMetadataCode();
                    relationValueMap.put(metadataCode, preData.get(inputMetadataCode).asText());
                    inputMetadataCode = StringUtil.substring(inputMetadataCode, inputMetadataCode.indexOf("-")+1, inputMetadataCode.length());
                    Iterator<JsonNode> array = preData.get("data").iterator();
                    while (array.hasNext()) {
                        JsonNode dataNode = array.next();
                        relationValueMap.put(metadataCode, dataNode.get(inputMetadataCode).asText());
                    }
                }
                String datasetCode = crawlerFlowDataset.getDatasetCode();
                AdapterDataSet adapterDataSet = adapterDataSetMap.get(datasetCode);
@ -222,6 +225,9 @@ public class CrawlerFlowManager {
                List<CrawlerFlowModel> crawlerFlowModelList = crawlerFlowDao.getCrawlerFlowList(adapterSchemeVersionModel.getId());
                crawlerFlowDatasetMap = new HashMap<>();
                crawlerFlowMetadataMap = new HashMap<>();
                /**
                 * 获取关联表
                 */
                for (CrawlerFlowModel crawlerFlowModel : crawlerFlowModelList) {
                    List<CrawlerFlowModel> datasetList = new ArrayList<>();
                    List<CrawlerFlowModel> metadataList = new ArrayList<>();
@ -234,12 +240,13 @@ public class CrawlerFlowManager {
                        datasetList = crawlerFlowDatasetMap.get(inputDatasetCode);
                    }
                    datasetList.add(crawlerFlowModel);
                    crawlerFlowMetadataMap.put(datasetCode, datasetList);
                    if (crawlerFlowDatasetMap.containsKey(datasetCode)) {
                        metadataList = crawlerFlowDatasetMap.get(datasetCode);
                    crawlerFlowDatasetMap.put(inputDatasetCode, datasetList);
                    if (crawlerFlowMetadataMap.containsKey(datasetCode)) {
                        metadataList = crawlerFlowMetadataMap.get(datasetCode);
                    }
                    metadataList.add(crawlerFlowModel);
                    crawlerFlowDatasetMap.put(datasetCode, metadataList);
                    crawlerFlowMetadataMap.put(datasetCode, metadataList);
                }
            } catch (Exception e) {
                return false;

+ 14 - 1
Hos-resource/src/main/java/com/yihu/ehr/crawler/service/CrawlerManager.java

@ -6,6 +6,7 @@ import com.yihu.ehr.crawler.model.adapter.AdapterDataSet;
import com.yihu.ehr.crawler.model.config.SysConfig;
import com.yihu.ehr.crawler.model.patient.Patient;
import com.yihu.ehr.framework.constrant.Constants;
import com.yihu.ehr.framework.model.DictItem;
import com.yihu.ehr.framework.util.log.LogService;
import com.yihu.ehr.framework.util.operator.CollectionUtil;
import com.yihu.ehr.framework.util.operator.StringUtil;
@ -28,6 +29,7 @@ public class CrawlerManager {
    private static DataCollectDispatcher dispatch;
    private Map<String, AdapterDataSet> adapterDataSetMap;
    private Boolean adapterFlg = false;
    private List<DictItem> datasetList;
    public CrawlerManager() {
        dispatch = DataCollectDispatcher.getInstance();
    }
@ -57,6 +59,7 @@ public class CrawlerManager {
        Integer count = 0;
        Integer totalCount = 0;
        String message;
        datasetList = (List<DictItem>) condition.get("datasetList");
        if (!getDataForPrepare()) {
            message = "适配数据尚未准备";
            LogService.getLogger().error(message);
@ -157,9 +160,19 @@ public class CrawlerManager {
                condition.put("column", "adapter_dataset_code");
                JSONObject jsonpObject = JSONObject.fromObject(condition);
                List<AdapterDatasetModel> adapterDataSetModelList = adapterDatasetService.getAdapterDatasetNotNullList(adapterVersion, jsonpObject.toString());
                List<Integer> datasetIdList = new ArrayList<>();
                if (!CollectionUtil.isEmpty(datasetList)) {
                    for (DictItem dictItem : datasetList) {
                        datasetIdList.add(Integer.parseInt(dictItem.getCode()));
                    }
                }
                for (AdapterDatasetModel adapterDatasetModel : adapterDataSetModelList) {
                    adapterDataSetMap.put(adapterDatasetModel.getStdDatasetCode(), new AdapterDataSet(adapterDatasetModel, adapterVersion));
                    if (CollectionUtil.isEmpty(datasetIdList) || datasetIdList.contains(adapterDatasetModel.getAdapterDatasetId())) {
                        adapterDataSetMap.put(adapterDatasetModel.getStdDatasetCode(), new AdapterDataSet(adapterDatasetModel, adapterVersion));
                    }
                }
                adapterFlg = true;
            } catch (Exception e) {
                adapterFlg = false;

+ 1 - 2
Hos-resource/src/main/java/com/yihu/ehr/crawler/service/DataCollectDispatcher.java

@ -298,12 +298,11 @@ public class DataCollectDispatcher {
    public JsonNode matchAdapterData(JsonNode data, AdapterDataSet adapterDataSet) {
        ObjectMapper mapper = new ObjectMapper();
        JsonNode dataNode = null;
        ArrayNode result = mapper.createArrayNode();
        List<AdapterMetaData> metaDatas = adapterDataSet.getAdapterMetaDataList();
        Iterator<JsonNode> array = data.iterator();
        while (array.hasNext()) {
            dataNode = array.next();
            JsonNode dataNode = array.next();
            ObjectNode jsonNode = mapper.createObjectNode();
            for (AdapterMetaData adapterMetaData : metaDatas) {
                AdapterMetadataModel adapterMetadataModel = adapterMetaData.getAdapterMetadataModel();

+ 11 - 0
Hos-resource/src/main/java/com/yihu/ehr/datacollect/service/DatacollectManager.java

@ -384,6 +384,17 @@ public class DatacollectManager implements IDatacollectManager {
        return new ActionResult(true,"修改成功!");
    }
    /**
     * 修改任务
     */
    @Override
    @Transactional
    public ActionResult updateJob(RsJobConfig obj) throws Exception
    {
        datacollectDao.updateEntity(obj);
        return new ActionResult(true,"修改成功!");
    }
    /**
     * 修改任务状态
     */

+ 5 - 0
Hos-resource/src/main/java/com/yihu/ehr/datacollect/service/intf/IDatacollectManager.java

@ -52,6 +52,11 @@ public interface IDatacollectManager {
     */
    public ActionResult updateJob(RsJobConfig obj, String cron, String jobDataset) throws Exception;
    /**
     * 修改任务
     */
    public ActionResult updateJob(RsJobConfig obj) throws Exception;
    /**
     * 删除任务

+ 22 - 2
Hos-resource/src/main/java/com/yihu/ehr/job/service/ArchiveUploadJob.java

@ -5,6 +5,8 @@ import com.yihu.ehr.crawler.service.CrawlerManager;
import com.yihu.ehr.datacollect.model.RsJobConfig;
import com.yihu.ehr.datacollect.service.intf.IDatacollectManager;
import com.yihu.ehr.framework.constrant.DateConvert;
import com.yihu.ehr.framework.model.DataGridResult;
import com.yihu.ehr.framework.util.DateUtil;
import com.yihu.ehr.framework.util.springutil.SpringBeanUtil;
import java.util.Date;
@ -23,9 +25,27 @@ public class ArchiveUploadJob implements IBaseJob {
        IDatacollectManager datacollect = SpringBeanUtil.getService(Services.Datacollect);
        RsJobConfig job = datacollect.getJobById(jobId);
        Map<String, Object> condition = new HashMap<>();
        condition.put("beginDate", job.getRepeatStartTime());
        condition.put("endDate", job.getRepeatEndTime());
        Date begin = job.getRepeatStartTime();
        Date end = job.getRepeatEndTime();
        if (end == null) {
            //调整截止时间,当前时间-偏移量
            end = DateUtil.addDate(-job.getDelayTime(), DateUtil.getSysDateTime());
        }
        if ((end.getTime() - begin.getTime()) <= 0) {
            return; //结束时间小于开始时间时,不获取
        }
        condition.put("beginDate", begin);
        condition.put("endDate", end);
        DataGridResult result = datacollect.getJobDatasetByJobId(jobId);
        condition.put("datasetList", result.getDetailModelList());
        CrawlerManager.getInstance().dataCrawler(condition);
        job.setRepeatStartTime(end);
        job.setRepeatEndTime(DateUtil.formatYMDToYMDHMS(DateUtil.getSysDateTime().toString()));
        datacollect.updateJob(job);
        return;
    }
}