浏览代码

清理esb-admin无用文件,修订采集编排报存报错问题

Airhead 8 年之前
父节点
当前提交
78eca3d934

+ 6 - 30
src/main/java/com/yihu/hos/crawler/controller/CrawlerController.java

@ -1,13 +1,11 @@
package com.yihu.hos.crawler.controller;
import com.yihu.hos.crawler.model.patient.Patient;
import com.yihu.hos.crawler.service.CrawlerManager;
import com.yihu.hos.core.datatype.StringUtil;
import com.yihu.hos.crawler.service.CrawlerService;
import com.yihu.hos.standard.service.adapter.AdapterSchemeService;
import com.yihu.hos.web.framework.model.ActionResult;
import com.yihu.hos.web.framework.model.DetailModelResult;
import com.yihu.hos.web.framework.model.Result;
import com.yihu.hos.core.datatype.StringUtil;
import com.yihu.hos.standard.service.adapter.AdapterSchemeService;
import io.swagger.annotations.Api;
import io.swagger.annotations.ApiOperation;
import io.swagger.annotations.ApiParam;
@ -39,8 +37,8 @@ public class CrawlerController {
    private AdapterSchemeService adapterSchemeService;
    /**
    任务编排
    */
     * 任务编排
     */
    @RequestMapping("jobLayout")
    public String jobLayout(Model model) {
        model.addAttribute("contentPage", "/crawler/jobLayout");
@ -73,27 +71,6 @@ public class CrawlerController {
        return "pageView";
    }
    @RequestMapping(value = "patient", method = RequestMethod.POST)
    @ApiOperation(value = "采集病人健康档案", produces = "application/json", notes = "采集病人健康档案")
    @ResponseBody
    public Result crawler(
            @ApiParam(name = "patient", value = "病人索引信息", required = true)
            @RequestParam(value = "patient") String patientInfo) {
        CrawlerManager crawlerManager = new CrawlerManager();
        Patient patient = crawlerManager.parsePatient(patientInfo);
        if (patient != null) {
            Boolean result = crawlerManager.collectProcess(patient);
            if (result) {
                return Result.success("采集上传成功");
            } else {
                return Result.error("采集上传失败");
            }
        } else {
            return Result.error("参数转换病人实体失败");
        }
    }
    /**
     * 保存任务编排数据
     */
@ -225,7 +202,6 @@ public class CrawlerController {
    }
    @RequestMapping(value = "getSchemeList", method = RequestMethod.POST)
    @ApiOperation(value = "获取适配方案-方案版本下拉框", produces = "application/json", notes = "获取适配方案-方案版本下拉框")
    @ResponseBody
@ -239,7 +215,7 @@ public class CrawlerController {
     * 获取任务编排保存数据集
     */
    @RequestMapping(value = "savedJobData", method = RequestMethod.POST)
    @ApiOperation(value = "获取保存的数据集",produces = "application/json", notes = "保存的数据集")
    @ApiOperation(value = "获取保存的数据集", produces = "application/json", notes = "保存的数据集")
    @ResponseBody
    public DetailModelResult ListSavedJobData(
            @ApiParam(name = "version", value = "版本号", required = true)
@ -253,7 +229,7 @@ public class CrawlerController {
     * 获取任务编排保存适配方案-方案版本
     */
    @RequestMapping(value = "savedSchemeList", method = RequestMethod.POST)
    @ApiOperation(value = "获取保存的适配方案",produces = "application/json", notes = "保存的适配方案")
    @ApiOperation(value = "获取保存的适配方案", produces = "application/json", notes = "保存的适配方案")
    @ResponseBody
    public DetailModelResult SavedSchemeList() {
        return crawlerService.getSchemeSavedResult();

+ 0 - 159
src/main/java/com/yihu/hos/crawler/format/DataSetTransformer.java

@ -1,159 +0,0 @@
package com.yihu.hos.crawler.format;
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.node.ArrayNode;
import com.fasterxml.jackson.databind.node.ObjectNode;
import com.yihu.ehr.dbhelper.jdbc.DBHelper;
import com.yihu.hos.web.framework.constrant.SqlConstants;
import com.yihu.hos.core.datatype.StringUtil;
import com.yihu.hos.crawler.model.adapter.AdapterDataSet;
import com.yihu.hos.crawler.model.adapter.AdapterDict;
import com.yihu.hos.crawler.model.adapter.AdapterMetaData;
import com.yihu.hos.crawler.model.patient.Patient;
import com.yihu.hos.crawler.model.transform.DictDataType;
import com.yihu.hos.crawler.model.transform.TransformType;
import java.util.Iterator;
import java.util.Map;
/**
 * 目前只处理json格式
 * <p>
 * json 格式
 * {
 * "inner_version":"xxxxx",
 * "patient_id":"xxxx",
 * "event_no":"xxxx",
 * "code":"dataset_code",
 * "org_code":"xxxx"
 * "data":
 * [{"metadata_code1":"5","metadata_code2":"6"},
 * [{"metadata_code1":"1","metadata_code2":"2"}]}
 * <p>
 * Created by Air on 2015/6/4.
 */
public class DataSetTransformer implements IDataTransformer{
    private JsonNode jsonObject;
    private Patient patient;
    private static DBHelper db;
    public JsonNode getJsonObject() {
        return jsonObject;
    }
    public DataSetTransformer() {
        if (db == null) {
            db = new DBHelper();
        }
    }
    public boolean transfer(Map<String, AdapterDataSet> dataSetMap) {
        if (jsonObject != null) {
            String dataSetCode = jsonObject.get("code").asText();
            ArrayNode jsonArray = (ArrayNode) jsonObject.get("data");
            AdapterDataSet adapterDataSet = dataSetMap.get(dataSetCode);
            boolean transfer = transferJson(jsonArray, adapterDataSet);
            return transfer;
        }
        return false;
    }
    public String getData() {
        //确保文档有版本信息
        return jsonObject.asText();
    }
    public void setData(JsonNode data) {
        jsonObject = data;
        setPatient();
    }
    @Override
    public Patient getPatient() {
        return patient;
    }
    public TransformType getTransformType() {
        return TransformType.DATA_SET_JSON;
    }
    /**
     * json 格式
     * {
     * "inner_version":"xxxxx",
     * "patient_id":"xxxx",
     * "event_no":"xxxx",
     * "code":"dataset_code",
     * "data":
     * [{"metadata_code1":"5","metadata_code2":"6"},
     * [{"metadata_code1":"1","metadata_code2":"2"}]}
     *
     * @param jsonArray
     * @param adapterDataSet
     * @return
     */
    public boolean transferJson(ArrayNode jsonArray, AdapterDataSet adapterDataSet) {
        for (Object objectRow : jsonArray) {
            if (objectRow instanceof JsonNode) {
                transferJsonRow((ObjectNode) objectRow, adapterDataSet);
            }
        }
        return false;
    }
    public void transferJsonRow(ObjectNode jsonObject, AdapterDataSet adapterDataSet) {
        Iterator<Map.Entry<String, JsonNode>> fields = jsonObject.fields();
        while (fields.hasNext()) {
            Map.Entry<String, JsonNode> next = fields.next();
            String key = next.getKey();
            JsonNode jsonNode = next.getValue();
            String value = jsonNode.asText();
            String stdValue = transferElem(adapterDataSet, key, value);
            if (jsonNode instanceof ObjectNode) {
                ObjectNode objectNode = (ObjectNode) next;
                objectNode.put(key, stdValue);
            }
        }
    }
    /**
     * @param adapterDataSet 数据集编码
     * @param code        数据元编码
     * @param esbData        数据
     * @return String 标准值
     * @modify 2015.09.16 airhead 增加值与编码转换
     */
    public String transferElem(AdapterDataSet adapterDataSet, String code, String esbData) {
        Map<String, AdapterMetaData> adapterMetaDataMap = adapterDataSet.getAdapterMetaDataMap();
        AdapterMetaData adapterMetaData = adapterMetaDataMap.get(code);
        AdapterDict adapterDict = adapterMetaData.getAdapterDict();
        if (adapterDict == null) {
            return esbData;
        }
        String ehrData = null;
        DictDataType adapterDictDataType = adapterMetaData.getAdapterDictDataType();
        if (adapterDictDataType == DictDataType.VALUE) {
            ehrData = adapterDict.getAdapterValueToCodeMap().get(esbData);
        } else if (adapterDictDataType == DictDataType.CODE) {
            ehrData = adapterDict.getAdapterCodeToCodeMap().get(esbData);
        }
        if (StringUtil.isEmpty(ehrData)) {
            return SqlConstants.EMPTY;
        }
        return ehrData;
    }
    /**
     * 根据DataSet信息构造Patient
     */
    private void setPatient() {
        patient = new Patient();
        patient.setPatientId(jsonObject.get("patient_id").asText());
        patient.setEventNo(jsonObject.get("event_no").asText());
        patient.setOrgCode(jsonObject.get("org_code").asText());
    }
}

+ 0 - 54
src/main/java/com/yihu/hos/crawler/format/DocumentTransformer.java

@ -1,54 +0,0 @@
package com.yihu.hos.crawler.format;
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.node.ObjectNode;
import com.yihu.hos.crawler.model.adapter.AdapterDataSet;
import com.yihu.hos.crawler.model.patient.Patient;
import com.yihu.hos.crawler.model.transform.TransformType;
import java.io.IOException;
import java.util.Map;
/**
 * Created by Administrator on 2015/10/15.
 */
public class DocumentTransformer implements IDataTransformer  {
    private JsonNode jsonObject;
    protected AdapterScheme adapterScheme;
    protected Patient patient;
    public DocumentTransformer(AdapterScheme adapterScheme) {
        this.adapterScheme = adapterScheme;
    }
    @Override
    public Patient getPatient() {
        return patient;
    }
    /**
     * 非结构化的不需要转换
     *
     * @return
     */
    public boolean transfer(Map<String, AdapterDataSet> dataSetMap) {
        return true;
    }
    public JsonNode getJsonObject() {
        return jsonObject;
    }
    public String getData() {
        return jsonObject.asText();
    }
    public void setData(JsonNode data) {
        jsonObject = data;
    }
    public TransformType getTransformType() {
        return TransformType.DOCUMENT;
    }
}

+ 0 - 27
src/main/java/com/yihu/hos/crawler/format/IDataTransformer.java

@ -1,27 +0,0 @@
package com.yihu.hos.crawler.format;
import com.fasterxml.jackson.databind.JsonNode;
import com.yihu.hos.crawler.model.adapter.AdapterDataSet;
import com.yihu.hos.crawler.model.patient.Patient;
import com.yihu.hos.crawler.model.transform.TransformType;
import java.util.Map;
/**
 * @author Airhead
 * @version 1.0
 * @created 22-5月-2015 11:24:26
 */
public interface IDataTransformer {
    boolean transfer(Map<String, AdapterDataSet> dataSetMap);
    String getData();
    void setData(JsonNode data);
    Patient getPatient();
    TransformType getTransformType();
}

+ 0 - 235
src/main/java/com/yihu/hos/crawler/origin/FileSystemOrigin.java

@ -1,235 +0,0 @@
package com.yihu.hos.crawler.origin;
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.node.ObjectNode;
import com.yihu.hos.common.Services;
import com.yihu.hos.core.datatype.DateUtil;
import com.yihu.hos.core.datatype.StringUtil;
import com.yihu.hos.core.file.FtpFileUtil;
import com.yihu.hos.core.log.Logger;
import com.yihu.hos.core.log.LoggerFactory;
import com.yihu.hos.crawler.format.AdapterScheme;
import com.yihu.hos.crawler.model.adapter.AdapterDataSet;
import com.yihu.hos.crawler.model.patient.Patient;
import com.yihu.hos.crawler.model.transform.LogicValues;
import com.yihu.hos.crawler.service.EsbHttp;
import com.yihu.hos.system.model.SystemDatasource;
import com.yihu.hos.system.model.SystemOrganization;
import com.yihu.hos.system.service.OrganizationManager;
import com.yihu.hos.web.framework.util.springutil.SpringBeanUtil;
import java.io.IOException;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.Date;
import java.util.List;
import java.util.Map;
/**
 * �ݻ�
 *
 * @author Airhead
 * @version 1.0
 * @created 22-5��-2015 11:24:24
 */
public class FileSystemOrigin implements IDataOrigin {
    private static Logger logger = LoggerFactory.getLogger(FileSystemOrigin.class);
    public static String dirHear = "/home/test/patient/";        //病人数据文件根目录
    public static String fileType = "/image/";                    //采集的文件类型文件夹
    protected AdapterScheme adapterScheme;
    public FileSystemOrigin(AdapterScheme adapterScheme) {
        this.adapterScheme=adapterScheme;
    }
    /**
     * ftp采集数据
     * 非结构化档案中,key_words格式暂定为:数据集-数据元,生成文件上传时再转成:数据集.数据源(主要因为mongodb的key不支持特殊符号"."
     * @param patient         病人ID
     * @param orgAgencyOrigin 数据源
     * @param adapterDataSet  适配数据集   @return
     */
    @Override
    public String fecthData(Patient patient, SystemDatasource orgAgencyOrigin, AdapterDataSet adapterDataSet) {
        try {
            String data = null;
            String innerVersion= EsbHttp.getRemoteVersion(patient.getOrgCode());
            List<String> datas = null;
            String agencyCode = patient.getOrgCode();
            String ftpConfig = orgAgencyOrigin.getConfig();
            FtpFileUtil ftp = genFtpUtil(ftpConfig);
            String filePath = "";//远程ftp文件路径
            ObjectNode jsonObject = null;
            boolean patientId = true;
            boolean eventNo = true;
            if (patient.getPatientId() != null && !"".equals(patient.getPatientId())) {
                if (patient.getEventNo() != null && !"".equals(patient.getEventNo())) {
                    //文件路径
                    filePath = dirHear + agencyCode + "/" + patient.getPatientId() + "/" + patient.getEventNo() +  fileType;
                } else {
                    eventNo = false;
                }
            } else {
                patientId = false;
            }
            if (!patientId || !eventNo) {
                throw new Exception("采集病人数据集必须要有病人ID,事件号,数据集名:" + adapterDataSet.getAdapterDataSetT().getStdDatasetCode());
            }
            datas = ftp.readFileData(filePath);
            if (datas != null && datas.size() > 0) {
                data = datas.get(0);
            }
            //TODO "data"内容实现,主要包括key_words和content,
            //json生成
            jsonObject.put("patient_id", patient.getPatientId());
            jsonObject.put("event_no", patient.getEventNo());
            jsonObject.put("org_code", agencyCode);
            jsonObject.put("inner_version", innerVersion);
            jsonObject.put("create_date", DateUtil.toString(new Date(), DateUtil.DEFAULT_YMDHMSDATE_FORMAT));
            jsonObject.put("event_time", patient.getReferenceTime());//DateUtil.toString(patient.getReferenceTime(), DateUtil.DEFAULT_YMDHMSDATE_FORMAT)
            if (StringUtil.isEmpty(patient.getReUploadFlg())) {
                jsonObject.put("reUploadFlg", LogicValues.LOGIC_FALSE);
            } else {
                jsonObject.put("reUploadFlg", patient.getReUploadFlg());
            }
            return jsonObject.toString();
        } catch (SQLException e) {
//            e.printStackTrace();
            logger.error("", e);
        } catch (Exception e) {
//            e.printStackTrace();
            logger.error("", e);
        }
        return null;
    }
    /**
     * 获取病人列表
     *
     * @param orgAgencyOrigin 数据源
     * @param adapterDataSet  适配数据集
     * @param condition       查询条件
     * @return 病人集合
     */
    @Override
    public List<Patient> getPatientList(SystemDatasource orgAgencyOrigin, AdapterDataSet adapterDataSet, Map<String, Object> condition) {
        ArrayList<Patient> patientList = new ArrayList<>();
        try {
            String ftpConfig = orgAgencyOrigin.getConfig();
            FtpFileUtil ftp = genFtpUtil(ftpConfig);
//			StdDataSet stdDataSet = adapterDataSet.getStdDataSet();
            OrganizationManager organizationManager= SpringBeanUtil.getService(Services.Organization);
            SystemOrganization orgAgency =organizationManager.getOrgById(orgAgencyOrigin.getOrgId());
            String agencyCode =orgAgency.getCode();
            List<Map<String, String>> patientMaps = ftp.getPatientList(dirHear, agencyCode);
            if (patientMaps != null && patientMaps.size() > 0) {
                for (Map<String, String> patientMap : patientMaps) {
                    Patient patient = new Patient();
                    String patientId = patientMap.get("patient_id");
                    String eventNo = patientMap.get("event_no");
                    if (orgAgency == null) {
                        logger.error("获取病人列表错误,无法获取机构代码.");
                        continue;
                    }
                    patient.setPatientId(patientId);
                    patient.setEventNo(eventNo);
                    patient.setReferenceTime(DateUtil.toString(new Date(), DateUtil.DEFAULT_YMDHMSDATE_FORMAT));//暂设置为当前时间
                    patient.setOrgCode(orgAgency.getCode());
                    patientList.add(patient);
                }
            }
        } catch (Exception e) {
//            e.printStackTrace();
            logger.error("", e);
        }
        return patientList;
    }
    /**
     * 清除ftp数据
     *
     * @param patient
     * @param orgAgencyOrigin
     * @param adapterDataSet  @return
     */
    @Override
    public boolean clearData(Patient patient, SystemDatasource orgAgencyOrigin, AdapterDataSet adapterDataSet) {
        try {
            boolean clear = false;
            String agencyCode = patient.getOrgCode();
            String ftpConfig = orgAgencyOrigin.getConfig();
            FtpFileUtil ftp = genFtpUtil(ftpConfig);
            String filePath = "";//远程ftp文件路径
            boolean patientId = true;
            boolean eventNo = true;
            if (patient.getPatientId() != null && !"".equals(patient.getPatientId())) {
                if (patient.getEventNo() != null && !"".equals(patient.getEventNo())) {
                    //文件路径
                    filePath = dirHear + agencyCode + "/" + patient.getPatientId() + "/" + patient.getEventNo()  + fileType;
                } else {
                    eventNo = false;
                }
            } else {
                patientId = false;
            }
            if (!patientId || !eventNo) {
                throw new Exception("清除病人数据集必须要有病人ID,事件号,数据集名:" + adapterDataSet.getAdapterDataSetT().getStdDatasetCode());
            }
            ftp.connect();
            clear = ftp.removeData(filePath);
            ftp.closeConnect();
            return clear;
        } catch (SQLException e) {
            e.printStackTrace();
            logger.error("", e);
        } catch (Exception e) {
            e.printStackTrace();
            logger.error("", e);
        }
        return false;
    }
    @Override
    public Date getServerDateTime(SystemDatasource orgAgencyOrigin) {
        return null;
    }
    public void finalize() throws Throwable {
    }
    public FtpFileUtil genFtpUtil(String ftpConfig) {
        ObjectMapper mapper = new ObjectMapper();
        FtpFileUtil ftpUtil = null;
        JsonNode rootNode = null;
        try {
            rootNode = mapper.readValue(ftpConfig, JsonNode.class);
            String username = rootNode.path("username").asText();
            String password = rootNode.path("password").asText();
            String host = rootNode.path("host").asText();
            int port = rootNode.path("port").asInt();
            ftpUtil = new FtpFileUtil(username, password, host, port);
        } catch (IOException e) {
            logger.error("获取Ftp服务器配置失败", e);
            e.printStackTrace();
        }
        return ftpUtil;
    }
}//end FileSystemOrigin

+ 0 - 47
src/main/java/com/yihu/hos/crawler/origin/IDataOrigin.java

@ -1,47 +0,0 @@
package com.yihu.hos.crawler.origin;
import com.yihu.hos.crawler.model.adapter.AdapterDataSet;
import com.yihu.hos.crawler.model.patient.Patient;
import com.yihu.hos.system.model.SystemDatasource;
import java.util.Date;
import java.util.List;
import java.util.Map;
/**
 * @author Airhead
 * @version 1.0
 * @created 22-5-2015 11:24:26
 */
public interface IDataOrigin {
    String fecthData(Patient patient, SystemDatasource orgAgencyOrigin, AdapterDataSet adapterDataSet);
    List<Patient> getPatientList(SystemDatasource orgAgencyOrigin, AdapterDataSet adapterDataSet, Map<String, Object> condition);
    boolean clearData(Patient patient, SystemDatasource orgAgencyOrigin, AdapterDataSet adapterDataSet);
    Date getServerDateTime(SystemDatasource orgAgencyOrigin);
    enum OriginType {
        /**
         * 数据库
         */
        DB,
        /**
         * RESTful Web Service
         */
        REST,
        /**
         * FileSystem
         */
        FS,
        /**
         * SOAP Web Service
         */
        SOAP
    }
}

+ 0 - 323
src/main/java/com/yihu/hos/crawler/service/CrawlerFlowManager.java

@ -1,323 +0,0 @@
package com.yihu.hos.crawler.service;
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.yihu.hos.core.datatype.CollectionUtil;
import com.yihu.hos.core.datatype.StringUtil;
import com.yihu.hos.core.log.Logger;
import com.yihu.hos.core.log.LoggerFactory;
import com.yihu.hos.crawler.dao.CrawlerDatasetDao;
import com.yihu.hos.crawler.dao.CrawlerFlowDao;
import com.yihu.hos.crawler.dao.CrawlerFlowHeadDao;
import com.yihu.hos.crawler.model.adapter.AdapterDataSet;
import com.yihu.hos.crawler.model.config.SysConfig;
import com.yihu.hos.crawler.model.flow.CrawlerDataSetModel;
import com.yihu.hos.crawler.model.flow.CrawlerFlowHeadModel;
import com.yihu.hos.crawler.model.flow.CrawlerFlowModel;
import com.yihu.hos.crawler.model.patient.Patient;
import com.yihu.hos.standard.model.adapter.AdapterDatasetModel;
import com.yihu.hos.standard.model.adapter.AdapterDictEntryModel;
import com.yihu.hos.standard.model.adapter.AdapterSchemeVersionModel;
import com.yihu.hos.standard.service.adapter.AdapterDatasetService;
import com.yihu.hos.standard.service.adapter.AdapterSchemeVersionService;
import com.yihu.hos.standard.service.bo.AdapterVersion;
import com.yihu.hos.web.framework.model.DictItem;
import com.yihu.hos.web.framework.util.springutil.SpringBeanUtil;
import java.util.*;
public class CrawlerFlowManager {
    private static Logger logger = LoggerFactory.getLogger(CrawlerFlowManager.class);
    private static DataCollectDispatcher dispatch = DataCollectDispatcher.getInstance();
    private static SysConfig sysConfig = SysConfig.getInstance();
    private Map<String, AdapterDataSet> adapterDataSetMap;
    private List<CrawlerFlowHeadModel> crawlerFlowHeadModelList;
    private Map<String, List<CrawlerFlowModel>> crawlerFlowDatasetMap;
    private Map<String, List<CrawlerFlowModel>> crawlerFlowMetadataMap;
    private Boolean adapterFlg = false;
    private List<DictItem> datasetList;
    private String schemeVersion;
    public CrawlerFlowManager(List datasetList, String schemeVersion) {
        this.datasetList = datasetList;
        this.schemeVersion = schemeVersion;
    }
    public CrawlerFlowManager() {
    }
    public void finalize() throws Throwable {
    }
    public String dataCrawler(Map<String, Object> condition) {
        Integer count = 0;
        Integer totalCount = 0;
        String message;
        /**
         * 适配基本数据准备
         */
        if (!getDataForPrepare()) {
            message = "适配数据尚未准备";
            logger.error(message);
            return message;
        }
        List<Patient> patientList = dispatch.getPatientList(condition, adapterDataSetMap);
        if (!CollectionUtil.isEmpty(patientList)) {
            totalCount = patientList.size();
            for (Patient patient : patientList) {
                boolean result = collectProcess(patient);
                if (result) {
                    count++;
                }
            }
        }
        message = "本次采集病人共" + totalCount + "条,成功采集信息" + count + "条";
        return message;
    }
    public boolean collectProcess(Patient patient) {
        if (!getDataForPrepare()) {
            logger.error("适配数据尚未准备完毕");
            return false;
        }
        patient.setReUploadFlg(StringUtil.toString(false));
        logger.trace("采集->注册->打包上传,任务ID:,patient_id:" + patient.getPatientId() + ", event_no:" + patient.getEventNo());
        try {
            /**
             * 获取token
             */
            if (!dispatch.getToken()) {
                logger.error("token获取失败");
                return false;
            }
            /**
             * 获取远程版本
             */
//            if (!dispatch.getRemoteVersion(patient.getOrgCode())) {
//                logger.error("远程版本获取失败");
//                return false;
//            }
            /**
             * 获取版本
             */
            if (StringUtil.isEmpty(sysConfig.getVersionMap().get(patient.getOrgCode()))) {
                logger.error("版本获取失败");
                return false;
            }
            Map<String, JsonNode> dataMap = new HashMap<>();
            for (CrawlerFlowHeadModel crawlerFlowHeadModel : crawlerFlowHeadModelList) {
                /**
                 * 采集信息
                 */
                String datasetCode = crawlerFlowHeadModel.getDatasetCode();
                AdapterDataSet adapterDataSet = adapterDataSetMap.get(datasetCode);
                JsonNode data = dispatch.fecthData(patient, adapterDataSet);
                if (StringUtil.isEmpty(data)) {
                    continue;
                }
                dataMap.put(datasetCode, data);
                /**
                 * 根据采集流程递归查询
                 */
                getDataByCrawlerFlow(datasetCode, patient, dataMap);
                if (sysConfig.getRegisterDataSet().equals(adapterDataSet.getAdapterDataSetT().getStdDatasetCode())) {
                    if (!StringUtil.isEmpty(data.get("data")) && !StringUtil.isEmpty(data.get("data").get(0))) {
                        if (!StringUtil.isEmpty(data.get("data").get(0).get(SysConfig.getInstance().getRegisterIdCardNo()))) {
                            logger.info("注册病人");
                            dispatch.register(patient, data.toString());
                        }
                    }
                }
            }
            logger.info("采集病人成功,patient_id:" + patient.getPatientId() + ", event_no:" + patient.getEventNo());
            /**
             * 上传档案
             */
            try {
                if (!CollectionUtil.isEmpty(dataMap.keySet())) {
                    if (!dispatch.upload(dataMap, patient, adapterDataSetMap)) {
                        logger.error("上传档案失败");
                        return false;
                    }
                }
            } catch (Exception e) {
                logger.error("档案上传失败,patient_id:" + patient.getPatientId() + ", event_no:" + patient.getEventNo());
                return false;
            }
        } catch (Exception e) {
            logger.error("采集病人失败,patient_id:" + patient.getPatientId() + ", event_no:" + patient.getEventNo(), e);
            return false;
        }
        return true;
    }
    public Boolean getDataByCrawlerFlow(String preDatasetCode, Patient patient, Map<String, JsonNode> dataMap) {
        try {
            JsonNode preData = dataMap.get(preDatasetCode);
            Map<String, String> relationValueMap = new HashMap<>();
            List<CrawlerFlowModel> crawlerFlowDatasetList = crawlerFlowDatasetMap.get(preDatasetCode);
            for (CrawlerFlowModel crawlerFlowDataset : crawlerFlowDatasetList) {
                List<CrawlerFlowModel> crawlerFlowMetadataList = crawlerFlowMetadataMap.get(crawlerFlowDataset.getDatasetCode());
                for (CrawlerFlowModel crawlerFlowMetadata : crawlerFlowMetadataList) {
                    String metadataCode = crawlerFlowMetadata.getMetadataCode();
                    metadataCode = StringUtil.substring(metadataCode, metadataCode.indexOf("-") + 1, metadataCode.length());
                    String inputMetadataCode = crawlerFlowMetadata.getInputMetadataCode();
                    inputMetadataCode = StringUtil.substring(inputMetadataCode, inputMetadataCode.indexOf("-") + 1, inputMetadataCode.length());
                    Iterator<JsonNode> array = preData.get("data").iterator();
                    while (array.hasNext()) {
                        JsonNode dataNode = array.next();
                        relationValueMap.put(metadataCode, dataNode.get(inputMetadataCode).asText());
                    }
                }
                String datasetCode = crawlerFlowDataset.getDatasetCode();
                AdapterDataSet adapterDataSet = adapterDataSetMap.get(datasetCode);
                String data = dispatch.fecthData(patient, adapterDataSet, relationValueMap);
                if (StringUtil.isEmpty(data)) {
                    continue;
                } else {
                    ObjectMapper objectMapper = new ObjectMapper();
                    JsonNode jsonObject = objectMapper.readTree(data);
                    dataMap.put(datasetCode, jsonObject);
                }
                getDataByCrawlerFlow(datasetCode, patient, dataMap);
            }
            return true;
        } catch (Exception e) {
            return false;
        }
    }
    public Boolean getDataForPrepare() {
        if (adapterFlg) {
            return true;
        }
        logger.info("适配基本相关数据准备");
        try {
            adapterDataSetMap = new HashMap<>();
            AdapterVersion adapterVersion;
            List<Integer> datasetIdList = new ArrayList<>();
            /**
             * 推模式接口调用,默认只使用最新版本的适配
             */
            AdapterSchemeVersionService adapterSchemeVersionService = SpringBeanUtil.getService(AdapterSchemeVersionService.BEAN_ID);
            AdapterSchemeVersionModel adapterSchemeVersionModel;
            if (datasetList.isEmpty()) {
                adapterSchemeVersionModel = adapterSchemeVersionService.getEhrAdapterVersionLasted();
                if (adapterSchemeVersionModel == null) {
                    logger.error("获取最新ehr适配版本错误");
                    return false;
                } else {
                    this.schemeVersion = adapterSchemeVersionModel.getVersion();
                    adapterVersion = new AdapterVersion(schemeVersion);
                }
                /**
                 * 获取该版本下数据集
                 */
                CrawlerDatasetDao crawlerDatasetDao = SpringBeanUtil.getService(CrawlerDatasetDao.BEAN_ID);
                List<CrawlerDataSetModel> crawlerDataSetModelList = crawlerDatasetDao.getCrawlerDatasetList(adapterSchemeVersionModel.getId());
                if (CollectionUtil.isEmpty(crawlerDataSetModelList)) {
                    return false;
                }
                for (CrawlerDataSetModel crawlerDataSetModel : crawlerDataSetModelList) {
                    datasetIdList.add(crawlerDataSetModel.getDatasetId());
                }
            } else {
                /**
                 * 拉模式接口调用,由任务配置决定适配版本
                 */
                adapterSchemeVersionModel = adapterSchemeVersionService.getByVersion(schemeVersion);
                adapterVersion = new AdapterVersion(schemeVersion);
                for (DictItem dictItem : datasetList) {
                    datasetIdList.add(Integer.valueOf(dictItem.getCode()));
                }
            }
            AdapterDatasetService adapterDatasetService = SpringBeanUtil.getService(AdapterDatasetService.BEAN_ID);
            /**
             * 字典项初始化
             */
            List<AdapterDictEntryModel> adapterDictEntryModelList = adapterDatasetService.getList(AdapterDictEntryModel.class, adapterVersion.getDictEntryTableName(), null, null, null, null);
            Map<Integer, List<AdapterDictEntryModel>> adapterDictEntryModelMap = new HashMap<>();
            for (AdapterDictEntryModel adapterDictEntryModel : adapterDictEntryModelList) {
                List<AdapterDictEntryModel> entryModelList = adapterDictEntryModelMap.get(adapterDictEntryModel.getStdDictId());
                if (CollectionUtil.isEmpty(entryModelList)) {
                    entryModelList = new ArrayList<>();
                }
                entryModelList.add(adapterDictEntryModel);
                adapterDictEntryModelMap.put(adapterDictEntryModel.getStdDictId(), entryModelList);
            }
            /**
             * 数据集初始化
             */
            List<AdapterDatasetModel> adapterDataSetModelList = adapterDatasetService.getListByAdapterDatasetIdList(adapterVersion, datasetIdList);
            for (AdapterDatasetModel adapterDatasetModel : adapterDataSetModelList) {
                adapterDataSetMap.put(adapterDatasetModel.getStdDatasetCode(), new AdapterDataSet(adapterDatasetModel, adapterVersion, adapterDictEntryModelMap));
            }
            /**
             * 获取采集流程表头
             */
            CrawlerFlowHeadDao crawlerFlowHeadDao = SpringBeanUtil.getService(CrawlerFlowHeadDao.BEAN_ID);
            CrawlerFlowDao crawlerFlowDao = SpringBeanUtil.getService(CrawlerFlowDao.BEAN_ID);
            crawlerFlowHeadModelList = crawlerFlowHeadDao.getCrawlerFlowHeadList(adapterSchemeVersionModel.getId());
            List<CrawlerFlowModel> crawlerFlowModelList = crawlerFlowDao.getCrawlerFlowList(adapterSchemeVersionModel.getId());
            crawlerFlowDatasetMap = new HashMap<>();
            crawlerFlowMetadataMap = new HashMap<>();
            /**
             * 获取关联表
             */
            for (CrawlerFlowModel crawlerFlowModel : crawlerFlowModelList) {
                List<CrawlerFlowModel> datasetList = new ArrayList<>();
                List<CrawlerFlowModel> metadataList = new ArrayList<>();
                String inputDatasetCode = crawlerFlowModel.getInputDatasetCode();
                String datasetCode = crawlerFlowModel.getDatasetCode();
                if (StringUtil.isEmpty(inputDatasetCode)) {
                    continue;
                }
                if (crawlerFlowDatasetMap.containsKey(inputDatasetCode)) {
                    datasetList = crawlerFlowDatasetMap.get(inputDatasetCode);
                }
                datasetList.add(crawlerFlowModel);
                crawlerFlowDatasetMap.put(inputDatasetCode, datasetList);
                if (crawlerFlowMetadataMap.containsKey(datasetCode)) {
                    metadataList = crawlerFlowMetadataMap.get(datasetCode);
                }
                metadataList.add(crawlerFlowModel);
                crawlerFlowMetadataMap.put(datasetCode, metadataList);
            }
//            SysConfig.getInstance().setVersionMap(new HashMap<>());
            adapterFlg = true;
            return true;
        } catch (Exception e) {
            adapterFlg = false;
            return false;
        }
    }
    /**
     * 解析病人索引信息
     *
     * @param patientInfo 病人索引信息
     * @return
     */
    public Patient parsePatient(String patientInfo) {
        try {
            ObjectMapper mapper = new ObjectMapper();
            Patient patient = mapper.readValue(patientInfo, Patient.class);
            return patient;
        } catch (Exception e) {
            logger.error("patient参数错误:" + patientInfo, e);
            return null;
        }
    }
    public void setAdapterFlg(Boolean adapterFlg) {
        this.adapterFlg = adapterFlg;
    }
}

+ 0 - 243
src/main/java/com/yihu/hos/crawler/service/CrawlerManager.java

@ -1,243 +0,0 @@
package com.yihu.hos.crawler.service;
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.yihu.hos.core.datatype.CollectionUtil;
import com.yihu.hos.core.datatype.StringUtil;
import com.yihu.hos.core.log.Logger;
import com.yihu.hos.core.log.LoggerFactory;
import com.yihu.hos.crawler.model.adapter.AdapterDataSet;
import com.yihu.hos.crawler.model.config.SysConfig;
import com.yihu.hos.crawler.model.patient.Patient;
import com.yihu.hos.standard.model.adapter.AdapterDatasetModel;
import com.yihu.hos.standard.model.adapter.AdapterDictEntryModel;
import com.yihu.hos.standard.model.adapter.AdapterSchemeVersionModel;
import com.yihu.hos.standard.service.adapter.AdapterDatasetService;
import com.yihu.hos.standard.service.adapter.AdapterSchemeVersionService;
import com.yihu.hos.standard.service.bo.AdapterVersion;
import com.yihu.hos.web.framework.model.DictItem;
import com.yihu.hos.web.framework.util.springutil.SpringBeanUtil;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
public class CrawlerManager {
    private static Logger logger = LoggerFactory.getLogger(CrawlerManager.class);
    private static DataCollectDispatcher dispatch = DataCollectDispatcher.getInstance();
    private static SysConfig sysConfig=SysConfig.getInstance();
    private static Map<String, Map<String, AdapterDataSet>> adapterDataSetVersionMap = new HashMap<>();
    private Map<String, AdapterDataSet> adapterDataSetMap;
    private Boolean adapterFlg = false;
    private String schemeVersion;
    private List<DictItem> datasetList;
    public CrawlerManager(List datasetList, String schemeVersion) {
        this.datasetList = datasetList;
        this.schemeVersion = schemeVersion;
    }
    public CrawlerManager() {
    }
    public void finalize() throws Throwable {
    }
    public String dataCrawler(Map<String, Object> condition) {
        Integer count = 0;
        Integer totalCount = 0;
        String message;
        /**
         * 适配基本数据准备
         */
        if (!getDataForPrepare()) {
            message = "适配数据尚未准备";
            logger.error(message);
            return message;
        }
        List<Patient> patientList = dispatch.getPatientList(condition, adapterDataSetMap);
        if (!CollectionUtil.isEmpty(patientList)) {
            totalCount = patientList.size();
            for (Patient patient : patientList) {
                Boolean result = collectProcess(patient);
                if (result) {
                    count++;
                }
            }
        }
        message = "本次采集病人共" + totalCount + "条,成功采集信息"+ count + "条";
        return message;
    }
    //单个病人采集上传
    public Boolean collectProcess(Patient patient) {
        if (!getDataForPrepare()) {
            logger.error("适配数据尚未准备");
            return false;
        }
        patient.setReUploadFlg(StringUtil.toString(false));
        logger.trace("采集->注册->打包上传,patient_id:" + patient.getPatientId() + ", event_no:" + patient.getEventNo());
        try {
            //getToken
            if (!dispatch.getToken()) {
                logger.error("token获取失败");
                return false;
            }
            //getRemoteVersion
//            if (!dispatch.getRemoteVersion(patient.getOrgCode())) {
//                logger.error("远程版本获取失败");
//                return false;
//            }
            if(StringUtil.isEmpty(sysConfig.getVersionMap().get(patient.getOrgCode()))){
                logger.error("版本获取失败");
                return false;
            }
            Map<String, AdapterDataSet> dataSetMap = new HashMap<>();
            Map<String, JsonNode> dataMap = new HashMap<>();
            for (String key : adapterDataSetMap.keySet()) {
                /**
                 * 获取数据
                 */
                AdapterDataSet adapterDataSet = adapterDataSetMap.get(key);
                JsonNode jsonObject = dispatch.fecthData(patient, adapterDataSet);
                if (StringUtil.isEmpty(jsonObject)) {
                    continue;
                }
                dataSetMap.put(adapterDataSet.getAdapterDataSetT().getStdDatasetCode(), adapterDataSet);
                dataMap.put(key, jsonObject);
                /**
                 * 注册病人
                 */
                if (SysConfig.getInstance().getRegisterDataSet().equals(adapterDataSet.getAdapterDataSetT().getStdDatasetCode())) {
                    if (!StringUtil.isEmpty(jsonObject.get("data")) && !StringUtil.isEmpty(jsonObject.get("data").get(0))) {
                        if (!StringUtil.isEmpty(jsonObject.get("data").get(0).get(SysConfig.getInstance().getRegisterIdCardNo()))) {
                            logger.info("注册病人");
                            dispatch.register(patient, jsonObject.toString());
                        }
                    }
                }
            }
            logger.info("采集病人成功,patient_id:" + patient.getPatientId() + ", event_no:" + patient.getEventNo());
            //上传档案
            logger.info("上传病人档案");
            try {
                if (!CollectionUtil.isEmpty(dataMap.keySet())) {
                    if (!dispatch.upload(dataMap, patient, dataSetMap)) {
                        logger.error("上传档案失败");
                        return false;
                    }
                }
            } catch (Exception e) {
                logger.error("档案上传失败,patient_id:" + patient.getPatientId() + ", event_no:" + patient.getEventNo());
                return false;
            }
        } catch (Exception e) {
            logger.error("采集病人失败,patient_id:" + patient.getPatientId() + ", event_no:" + patient.getEventNo(), e);
            return false;
        }
        return true;
    }
    public Boolean getDataForPrepare() {
        if (adapterFlg) {
            return true;
        }
        logger.info("适配基本相关数据准备");
        try {
            adapterDataSetMap = new HashMap<>();
            AdapterVersion adapterVersion;
            AdapterDatasetService adapterDatasetService = SpringBeanUtil.getService(AdapterDatasetService.BEAN_ID);
            List<AdapterDatasetModel> adapterDataSetModelList = new ArrayList<>();
            if (!CollectionUtil.isEmpty(datasetList)) {
                /**
                 * 拉模式接口调用,由任务配置决定适配版本
                 */
                adapterVersion = new AdapterVersion(schemeVersion);
                List<Integer> datasetIdList = new ArrayList<>();
                for (DictItem dictItem : datasetList) {
                    datasetIdList.add(Integer.parseInt(dictItem.getCode()));
                }
                adapterDataSetModelList = adapterDatasetService.getAdapterDatasetByAdapterIdList(adapterVersion, datasetIdList);
            } else {
                /**
                 * 推模式接口调用,默认只使用最新版本的适配
                 */
                AdapterSchemeVersionService adapterSchemeVersionService = SpringBeanUtil.getService(AdapterSchemeVersionService.BEAN_ID);
                AdapterSchemeVersionModel adapterSchemeVersionModel = adapterSchemeVersionService.getEhrAdapterVersionLasted();
                if (adapterSchemeVersionModel == null) {
                    logger.error("获取最新ehr适配版本错误");
                    return false;
                } else {
                    this.schemeVersion = adapterSchemeVersionModel.getVersion();
                    adapterVersion = new AdapterVersion(schemeVersion);
                }
                if (adapterDataSetVersionMap.get(schemeVersion) != null) {
                    adapterDataSetMap = adapterDataSetVersionMap.get(schemeVersion);
                    adapterFlg = true;
                    return true;
                }
                Map<String, String> condition = new HashMap<>();
                condition.put("column", "adapter_dataset_code");
                ObjectMapper mapper = new ObjectMapper();
                String conditionStr = mapper.writeValueAsString(condition);
                adapterDataSetModelList = adapterDatasetService.getAdapterDatasetNotNullList(adapterVersion, conditionStr);
            }
            /**
             * 字典项初始化
             */
            List<AdapterDictEntryModel> adapterDictEntryModelList = adapterDatasetService.getList(AdapterDictEntryModel.class, adapterVersion.getDictEntryTableName(), null, null, null, null);
            Map<Integer, List<AdapterDictEntryModel>> adapterDictEntryModelMap = new HashMap<>();
            for (AdapterDictEntryModel adapterDictEntryModel : adapterDictEntryModelList) {
                List<AdapterDictEntryModel> entryModelList = adapterDictEntryModelMap.get(adapterDictEntryModel.getStdDictId());
                if (CollectionUtil.isEmpty(entryModelList)) {
                    entryModelList = new ArrayList<>();
                }
                entryModelList.add(adapterDictEntryModel);
                adapterDictEntryModelMap.put(adapterDictEntryModel.getStdDictId(), entryModelList);
            }
            /**
             * 数据集初始化
             */
            for (AdapterDatasetModel adapterDatasetModel : adapterDataSetModelList) {
                adapterDataSetMap.put(adapterDatasetModel.getStdDatasetCode(), new AdapterDataSet(adapterDatasetModel, adapterVersion, adapterDictEntryModelMap));
            }
            adapterDataSetVersionMap.put(schemeVersion, adapterDataSetMap);
            adapterFlg = true;
            return true;
        } catch (Exception e) {
            return false;
        }
    }
    /**
     * 解析病人索引信息
     *
     * @param patientInfo 病人索引信息
     * @return
     */
    public Patient parsePatient(String patientInfo) {
        try {
            ObjectMapper mapper = new ObjectMapper();
            Patient patient = mapper.readValue(patientInfo, Patient.class);
            return patient;
        } catch (Exception e) {
            logger.error("patient参数错误:" + patientInfo, e);
            return null;
        }
    }
    public void setSchemeVersion(String schemeVersion) {
        this.schemeVersion = schemeVersion;
    }
    public void setDatasetList(List<DictItem> datasetList) {
        this.datasetList = datasetList;
    }
}

+ 5 - 5
src/main/java/com/yihu/hos/crawler/service/CrawlerService.java

@ -185,8 +185,8 @@ public class CrawlerService {
        for (JsonNode obj : jsonList) {
            if (obj.has("schemeId") && obj.has("versionId")) {
                String schemeId = obj.get("schemeId").toString();
                String versionId = obj.get("versionId").toString();
                String schemeId = obj.get("schemeId").asText();
                String versionId = obj.get("versionId").asText();
                AdapterSchemeVersionModel versionModel = (AdapterSchemeVersionModel) adapterSchemeVersionService.get(Integer.valueOf(versionId));
                if (versionModel != null) {
                    AdapterVersion adapterVersion = new AdapterVersion(versionModel.getVersion());
@ -196,7 +196,7 @@ public class CrawlerService {
                    //根据id字符串获取编排数据集
                    if (obj.has("dataSets")) {
                        List<Integer> newDatasetIdList = new ArrayList<>();
                        String dataSetStr = obj.get("dataSets").toString();
                        String dataSetStr = obj.get("dataSets").asText();
                        if (StringUtils.isNotBlank(dataSetStr)) {
                            String[] IdList = dataSetStr.split(",");
                            for (String aIdList : IdList) {
@ -220,8 +220,8 @@ public class CrawlerService {
                    }
                    //如果保存传入编排映射关系,进行保存操作
                    if (obj.has("relation") && !Objects.equals(obj.get("relation").toString(), "")) {
                        saveDataSetRelation(versionId, obj.get("relation").toString());
                    if (obj.has("relation") && !Objects.equals(obj.get("relation").asText(), "")) {
                        saveDataSetRelation(versionId, obj.get("relation").asText());
                    }
                }
            }

+ 0 - 381
src/main/java/com/yihu/hos/crawler/service/DataCollectDispatcher.java

@ -1,381 +0,0 @@
package com.yihu.hos.crawler.service;
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.node.ArrayNode;
import com.fasterxml.jackson.databind.node.ObjectNode;
import com.yihu.hos.core.datatype.DateUtil;
import com.yihu.hos.core.datatype.StringUtil;
import com.yihu.hos.core.file.FileUtil;
import com.yihu.hos.core.log.Logger;
import com.yihu.hos.core.log.LoggerFactory;
import com.yihu.hos.crawler.format.DataSetTransformer;
import com.yihu.hos.crawler.model.adapter.AdapterDataSet;
import com.yihu.hos.crawler.model.adapter.AdapterMetaData;
import com.yihu.hos.crawler.model.config.SysConfig;
import com.yihu.hos.crawler.model.patient.Patient;
import com.yihu.hos.crawler.model.patient.PatientIdentity;
import com.yihu.hos.crawler.model.transform.EhrCondition;
import com.yihu.hos.crawler.model.transform.LogicValues;
import com.yihu.hos.standard.model.adapter.AdapterMetadataModel;
import com.yihu.hos.web.framework.constrant.SqlConstants;
import java.io.IOException;
import java.text.SimpleDateFormat;
import java.util.*;
public class DataCollectDispatcher {
    private static DataCollectDispatcher ourInstance = new DataCollectDispatcher();
    private static Logger logger = LoggerFactory.getLogger(DataCollectDispatcher.class);
    private String token;
    private DataCollectDispatcher() {
    }
    public static DataCollectDispatcher getInstance() {
        return ourInstance;
    }
    public void finalize() throws Throwable {
    }
    public Boolean getToken() {
        try {
            token = EsbHttp.getToken();
            if (StringUtil.isEmpty(token)) {
                return false;
            }
            return true;
        } catch (Exception e) {
            logger.error("本次任务执行失败,获取token失败!");
            return false;
        }
    }
    public Boolean getRemoteVersion(String orgCode) {
        try {
            if (StringUtil.isEmpty(SysConfig.getInstance().getVersionMap().get(orgCode))) {
                String stdVersion = EsbHttp.getRemoteVersion(orgCode);
                if (StringUtil.isEmpty(stdVersion)) {
                    return false;
                }
                SysConfig.getInstance().getVersionMap().put(orgCode, stdVersion);
            }
            return true;
        } catch (Exception e) {
            logger.error("本次任务执行失败,获取token失败!");
            return false;
        }
    }
    /**
     * 获取病人列表
     *
     * @param condition
     * @return List<PatientT>
     */
    public List<Patient> getPatientList(Map<String, Object> condition, Map<String, AdapterDataSet> adapterDataSetMap) {
        ArrayList<Patient> patientList = new ArrayList<>();
        SimpleDateFormat df = new SimpleDateFormat(DateUtil.DEFAULT_YMDHMSDATE_FORMAT);
        for (String key : adapterDataSetMap.keySet()) {
            PatientIdentity patientIdentity = SysConfig.getInstance().getPatientIdentity(key);
            if (patientIdentity == null) {
                continue;
            }
            try {
                //获取病人列表字段检验
                Map<String, String> propertyMap = getItemList(adapterDataSetMap.get(key));
                if (propertyMap == null) {
                    return patientList;
                }
                //请求参数
                Date beginDate = (Date) condition.get("beginDate");
                String beginTime = df.format(beginDate);
                Date endDate = (Date) condition.get("endDate");
                String endTime = df.format(endDate);
                List<EhrCondition> queryParams = new ArrayList<>();
                queryParams.add(new EhrCondition(" > ", patientIdentity.getRefTimeCode(), beginTime));
                queryParams.add(new EhrCondition(" < ", patientIdentity.getRefTimeCode(), endTime));
                //Rest 接口请求
                String rootStr = EsbHttp.getPatientList(adapterDataSetMap.get(key), queryParams);
                if (StringUtil.isEmpty(rootStr)) {
                    return null;
                }
                ObjectMapper mapper = new ObjectMapper();
                JsonNode resultNode = mapper.readValue(rootStr, JsonNode.class);
                JsonNode patientNode = resultNode.path("detailModelList");
                boolean isArr = patientNode.isArray();
                if (isArr) {
                    Iterator<JsonNode> array = patientNode.iterator();
                    while (array.hasNext()) {
                        JsonNode node = array.next();
                        Patient patient = new Patient();
                        String patientId = node.path(propertyMap.get(SqlConstants.PATIENT_ID)).asText();
                        String eventNo = node.path(propertyMap.get(SqlConstants.EVENT_NO)).asText();
                        String refTime = node.path(propertyMap.get(SqlConstants.EVENT_TIME)).asText();
                        String orgCode = node.path(SqlConstants.ORG_CODE.toUpperCase()).asText();
                        patient.setPatientId(patientId);
                        patient.setEventNo(eventNo);
                        patient.setReferenceTime(refTime);
                        patient.setOrgCode(orgCode);
                        patientList.add(patient);
                    }
                }
            } catch (Exception e) {
                logger.error("采集病人失败", e);
            }
        }
        return patientList;
    }
    public Map<String, String> getItemList(AdapterDataSet adapterDataSet) throws Exception {
        Map<String, String> propertyMap = new HashMap<>();
        PatientIdentity patientIdentity = SysConfig.getInstance().getPatientIdentity(adapterDataSet.getAdapterDataSetT().getStdDatasetCode());
        if (adapterDataSet.isHavePatientID()) {
            AdapterMetaData adapterMetaData = adapterDataSet.getAdapterMetaDataMap().get(PatientIdentity.getPatientIDCode());
            propertyMap.put(SqlConstants.PATIENT_ID, adapterMetaData.getAdapterMetadataModel().getStdMetadataCode().toUpperCase());
        } else {
            logger.error("", new Exception("采集病人列表数据集必须有patient_id."));
            return null;
        }
        if (adapterDataSet.isHaveEventNo()) {
            AdapterMetaData adapterMetaData = adapterDataSet.getAdapterMetaDataMap().get(patientIdentity.getEventNoCode());
            propertyMap.put(SqlConstants.EVENT_NO, adapterMetaData.getAdapterMetadataModel().getStdMetadataCode().toUpperCase());
        } else {
            logger.error("采集病人列表数据集必须有event_no.");
            return null;
        }
        AdapterMetaData adapterRefMetaData = adapterDataSet.getAdapterMetaDataMap().get(patientIdentity.getRefTimeCode());
        if (adapterRefMetaData == null) {
            logger.error("采集病人列表数据集必须有采集时间.");
            return null;
        }
        propertyMap.put(SqlConstants.EVENT_TIME, adapterRefMetaData.getAdapterMetadataModel().getStdMetadataCode().toUpperCase());
        return propertyMap;
    }
    public String fecthData(Patient patient, AdapterDataSet adapterDataSet, List<EhrCondition> queryParams) {
        try {
            ObjectMapper mapper = new ObjectMapper();
            ObjectNode paramsNode = mapper.createObjectNode();
            paramsNode.put("tableCode", adapterDataSet.getAdapterDataSetT().getStdDatasetCode());
            paramsNode.put("condition", mapper.writeValueAsString(queryParams));
            Map<String, String> formParams = new HashMap<>();
//            formParams.add(new BasicNameValuePair("secret", secret));
            formParams.put("api", "collectionData");
            formParams.put("param", mapper.writeValueAsString(paramsNode));
            //调用资源服务网关
            String rootStr = EsbHttp.getFecthData(formParams);
            JsonNode resultNode = mapper.readValue(rootStr, JsonNode.class);
            JsonNode result = resultNode.path("detailModelList");
            JsonNode data = matchAdapterData(result, adapterDataSet);
            ObjectNode jsonObject = mapper.createObjectNode();
            if (data != null && data.size() > 0) {
                jsonObject.set("data", data);
                jsonObject.put("code", adapterDataSet.getAdapterDataSetT().getStdDatasetCode());
                jsonObject.put("patient_id", patient.getPatientId());
                jsonObject.put("event_no", patient.getEventNo());
                String agencyCode = patient.getOrgCode();
                jsonObject.put("org_code", agencyCode);
                jsonObject.put("inner_version", SysConfig.getInstance().getVersionMap().get(patient.getOrgCode()));
                jsonObject.put("create_date", DateUtil.toString(new Date(), DateUtil.DEFAULT_YMDHMSDATE_FORMAT));
                jsonObject.put("event_time", patient.getReferenceTime());//DateUtil.toString(patient.getReferenceTime(), DateUtil.DEFAULT_YMDHMSDATE_FORMAT)
                if (StringUtil.isEmpty(patient.getReUploadFlg())) {
                    jsonObject.put("reUploadFlg", LogicValues.LOGIC_FALSE);
                } else {
                    jsonObject.put("reUploadFlg", patient.getReUploadFlg());
                }
                return jsonObject.toString();
            } else {
                return SqlConstants.EMPTY;
            }
        } catch (Exception e) {
            return SqlConstants.EMPTY;
        }
    }
    public String fecthData(Patient patient, AdapterDataSet adapterDataSet, Map<String, String> relationValueMap) {
        List<EhrCondition> queryParams = new ArrayList<>();
        for (String key : relationValueMap.keySet()) {
            queryParams.add(new EhrCondition(" = ", key, relationValueMap.get(key)));
        }
        return fecthData(patient, adapterDataSet, queryParams);
    }
    /**
     * 根据编排任务进行采集
     *
     * @param patient
     * @param adapterDataSet
     * @return
     */
    public JsonNode fecthData(Patient patient, AdapterDataSet adapterDataSet) {
        try {
            ObjectMapper mapper = new ObjectMapper();
            List<EhrCondition> queryParams = new ArrayList<>();
            boolean patientId = true;
            if (adapterDataSet.isHavePatientID()) {
                AdapterMetaData adapterMetaData = adapterDataSet.getAdapterMetaDataMap().get(PatientIdentity.getPatientIDCode());
                queryParams.add(new EhrCondition(" = ", adapterMetaData.getAdapterMetadataModel().getStdMetadataCode(), patient.getPatientId()));
            } else {
                patientId = false;
            }
            boolean eventNo = true;
            if (adapterDataSet.isHaveEventNo()) {
                AdapterMetaData adapterMetaData = adapterDataSet.getAdapterMetaDataMap().get(adapterDataSet.getEventNoCode());
                queryParams.add(new EhrCondition(" = ", adapterMetaData.getAdapterMetadataModel().getStdMetadataCode(), patient.getEventNo()));
            } else {
                eventNo = false;
            }
            if (!patientId && !eventNo) {
                logger.error("采集病人数据集至少需要一项病人标识.数据集名:" + adapterDataSet.getAdapterDataSetT().getStdDatasetCode());
                return null;
            }
            ObjectNode paramsNode = mapper.createObjectNode();
            paramsNode.put("tableCode", adapterDataSet.getAdapterDataSetT().getStdDatasetCode());
            paramsNode.put("condition", mapper.writeValueAsString(queryParams));
            Map<String, String> formParams = new HashMap<>();
//            formParams.add(new BasicNameValuePair("secret", secret));
            formParams.put("api", "collectionData");
            formParams.put("param", mapper.writeValueAsString(paramsNode));
            //调用资源服务网关
            String rootStr = EsbHttp.getFecthData(formParams);
            JsonNode resultNode = mapper.readValue(rootStr, JsonNode.class);
            JsonNode result = resultNode.path("detailModelList");
            JsonNode data = matchAdapterData(result, adapterDataSet);
            ObjectNode jsonObject = mapper.createObjectNode();
            if (data != null && data.size() > 0) {
                jsonObject.set("data", data);
                jsonObject.put("code", adapterDataSet.getAdapterDataSetT().getStdDatasetCode());
                jsonObject.put("patient_id", patient.getPatientId());
                jsonObject.put("event_no", patient.getEventNo());
                String agencyCode = patient.getOrgCode();
                jsonObject.put("org_code", agencyCode);
                jsonObject.put("inner_version", SysConfig.getInstance().getVersionMap().get(patient.getOrgCode()));
                jsonObject.put("create_date", DateUtil.toString(new Date(), DateUtil.DEFAULT_YMDHMSDATE_FORMAT));
                jsonObject.put("event_time", patient.getReferenceTime());//DateUtil.toString(patient.getReferenceTime(), DateUtil.DEFAULT_YMDHMSDATE_FORMAT)
                if (StringUtil.isEmpty(patient.getReUploadFlg())) {
                    jsonObject.put("reUploadFlg", LogicValues.LOGIC_FALSE);
                } else {
                    jsonObject.put("reUploadFlg", patient.getReUploadFlg());
                }
                return jsonObject;
            } else {
                return null;
            }
        } catch (Exception e) {
            logger.error("", e);
        }
        return null;
    }
    public JsonNode matchAdapterData(JsonNode data, AdapterDataSet adapterDataSet) {
        ObjectMapper mapper = new ObjectMapper();
        ArrayNode result = mapper.createArrayNode();
        Iterator<JsonNode> array = data.iterator();
        while (array.hasNext()) {
            JsonNode dataNode = array.next();
            ObjectNode jsonNode = mapper.createObjectNode();
            for (AdapterMetaData adapterMetaData : adapterDataSet.getAdapterMetaDataList()) {
                AdapterMetadataModel adapterMetadataModel = adapterMetaData.getAdapterMetadataModel();
                String orgMetaDataCode = adapterMetadataModel.getAdapterMetadataCode();
                String stdMetaDataCode = adapterMetadataModel.getStdMetadataCode();
                if (!StringUtil.isEmpty(orgMetaDataCode)) {
                    jsonNode.put(orgMetaDataCode, dataNode.path(stdMetaDataCode).asText());
                }
            }
            result.add(jsonNode);
        }
        return result;
    }
    public Boolean register(Patient patient, String data) {
        return EsbHttp.register(patient, data, token);
    }
    public Boolean upload(Map<String, JsonNode> dataMap, Patient patient, Map<String, AdapterDataSet> dataSetMap) {
        Boolean result = true;
        try {
            DataSetTransformer dataTransformer = new DataSetTransformer();
            for (String key : dataMap.keySet()) {
                dataTransformer.setData(dataMap.get(key));
                if (!toFile(dataTransformer, patient, "origin")) {
                    logger.info("存储原始文件失败:patient_id=" + patient.getPatientId()
                            + "event_no=" + patient.getEventNo());
                    result = false;
                    break;
                }
                dataTransformer.transfer(dataSetMap);
                if (!toFile(dataTransformer, patient, "standard")) {
                    logger.info("存储标准文件失败:patient_id=" + patient.getPatientId()
                            + "event_no=" + patient.getEventNo());
                    result = false;
                    break;
                }
            }
            PatientCDAUpload patientCDAUpload = new PatientCDAUpload();
            if (!patientCDAUpload.upload(patient, token)) {
                result = false;
            }
        } catch (Exception e) {
            result = false;
        }
        return result;
    }
    public boolean toFile(DataSetTransformer dataTransformer, Patient patient, String fileName) {
        JsonNode jsonObject = dataTransformer.getJsonObject();
        PatientCDAIndex patientCDAIndex = new PatientCDAIndex(patient);
        String filePath = patientCDAIndex.createDataIndex(fileName, PatientCDAIndex.FileType.JSON);
        boolean writeFile = false;
        try {
            writeFile = FileUtil.writeFile(filePath, jsonObject.toString(), "UTF-8");
        } catch (IOException e) {
            logger.info("存储临时文件失败.");
            logger.error("", e);
        }
        return writeFile;
    }
    /**
     * 解析token内容
     *
     * @param responToken
     * @return
     */
    public Map<String, Object> parseToken(String responToken) {
        ObjectMapper mapper = new ObjectMapper();
        Map<String, Object> tokenMap = null;
        try {
            Map<String, Object> map = mapper.readValue(responToken, Map.class);
            String code = (String) map.get("code");
            if (SqlConstants.OK.equals(code)) {
                tokenMap = (Map<String, Object>) map.get("result");
            }
        } catch (IOException e) {
            e.printStackTrace();
        }
        return tokenMap;
    }
}//end DataCollectDispatcher

+ 0 - 148
src/main/java/com/yihu/hos/crawler/service/OldPatientCDAUpload.java

@ -1,148 +0,0 @@
package com.yihu.hos.crawler.service;
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.yihu.hos.core.compress.Zipper;
import com.yihu.hos.core.encrypt.MD5;
import com.yihu.hos.core.encrypt.RSA;
import com.yihu.hos.core.file.FileUtil;
import com.yihu.hos.core.http.HTTPResponse;
import com.yihu.hos.core.http.HttpClientKit;
import com.yihu.hos.core.log.Logger;
import com.yihu.hos.core.log.LoggerFactory;
import com.yihu.hos.crawler.model.config.SysConfig;
import com.yihu.hos.crawler.model.patient.Patient;
import java.io.File;
import java.security.Key;
import java.util.HashMap;
import java.util.Map;
import java.util.UUID;
/**
 * 档案上传
 *
 * @author Air
 * @version 1.0
 * @created 2015.07.06 15:58
 */
public class OldPatientCDAUpload {
    public static String uploadMethod;
    private static Logger logger = LoggerFactory.getLogger(OldPatientCDAUpload.class);
    /**
     * @param patient
     * @return
     * @modify 2015.09.15 airhead 修订删除目录
     * @modify 2015.09.19 airhead 修复无文档问题及错误信息
     */
    public boolean upload(Patient patient, String token) {
        ZipFile zipFile = zip(patient);
        try {
            if (zipFile == null || zipFile.file == null) {
                logger.info("压缩病人档案失败,病人文档未生成,patient_id:" + patient.getPatientId() + ",event_no:" + patient.getEventNo());
                return false;
            }
            boolean result = upload(patient, zipFile, token);
            if (!result) {
                logger.info("上传病人档案失败,patient_id:" + patient.getPatientId() + ",event_no:" + patient.getEventNo());
                FileUtil.deleteDirectory(new File(zipFile.directory));
                return false;
            }
            result = FileUtil.deleteDirectory(new File(zipFile.directory));
            if (!result) {
                logger.info("删除临时文件失败,patient_id:" + patient.getPatientId() + ",event_no:" + patient.getEventNo());
            }
        } catch (Exception e) {
            FileUtil.deleteDirectory(new File(zipFile.directory));
        }
        return true;
    }
    /**
     * @param patient
     * @return
     * @modify 从data目录生成zip数据
     */
    public ZipFile zip(Patient patient) {
        try {
            PatientCDAIndex patientCDAIndex = new PatientCDAIndex(patient);
            String dataDirectory = patientCDAIndex.getDataDirectory();
            String filePath = patientCDAIndex.createIndex(PatientCDAIndex.IndexType.ZIP, PatientCDAIndex.FileType.ZIP);
            UUID uuidPwd = UUID.randomUUID();
            String pwd = uuidPwd.toString();
            Key key = RSA.genPublicKey(SysConfig.getInstance().getPublicKeyMap().get(patient.getOrgCode()));
            if (key == null) {
                logger.info("压缩文件错误,无公钥信息.");
                FileUtil.deleteDirectory(new File(patientCDAIndex.getDirectory()));
                return null;
            }
            ZipFile zipFile = new ZipFile();
            zipFile.encryptPwd = RSA.encrypt(pwd, key);
            Zipper zipper = new Zipper();
            zipFile.file = zipper.zipFile(new File(dataDirectory), filePath, pwd);
            zipFile.dataDirectory = dataDirectory;
            zipFile.directory = patientCDAIndex.getDirectory();
            return zipFile;
        } catch (Exception e) {
            e.printStackTrace();
            logger.info("从data目录生成zip数据时,压缩文件异常", e);
        }
        return null;
    }
    private boolean upload(Patient patient, ZipFile zipFile, String token) {
        try {
            String uploadMethod = EsbHttp.defaultHttpUrl + "/packages";
            String fileMd5 = MD5.getMd5ByFile(zipFile.file);
            Map<String, String> formParams = new HashMap<>();
            formParams.put("md5", fileMd5);
            formParams.put("package_crypto", zipFile.encryptPwd);
            formParams.put("org_code", patient.getOrgCode());
            formParams.put("token", token);
            Map<String, String> header = new HashMap<>();
            header.put("Authorization", "Basic " + EsbHttp.clientKey);
            HTTPResponse response = HttpClientKit.postFile(uploadMethod, zipFile.file.getAbsolutePath(), formParams, header);
            if (response == null) {
                logger.info("上传病人档案请求失败,patient_id:" + patient.getPatientId() + ",event_no:" + patient.getEventNo());
                return false;
            }
            if (response.getStatusCode() != 200) {
                logger.info("上传病人档案请求失败,错误代码:" + response.getStatusCode() + ",patient_id:" + patient.getPatientId() + ",event_no:" + patient.getEventNo());
                return false;
            }
            ObjectMapper mapper = new ObjectMapper();
            JsonNode rootNode = mapper.readValue(response.getBody(), JsonNode.class);
            JsonNode codeNode = rootNode.get("code");
            String result = codeNode.asText();
            if (!result.equals("0")) {
                logger.info("上传病人档案失败,错误代码:" + result + ",patient_id:" + patient.getPatientId() + ",event_no:" + patient.getEventNo());
                return false;
            } else {
                logger.info("上传病人档案成功,patient_id:" + patient.getPatientId() + ",event_no:" + patient.getEventNo());
                return true;
            }
        } catch (Exception e) {
            e.printStackTrace();
            logger.info("上传病人档案异常,patient_id:" + patient.getPatientId() + ",event_no:" + patient.getEventNo());
            return false;
        }
    }
    private class ZipFile {
        public File file;
        public String encryptPwd;
        public String directory;
        public String dataDirectory;
    }
}

+ 0 - 83
src/main/java/com/yihu/hos/crawler/service/PatientCDAIndex.java

@ -1,83 +0,0 @@
package com.yihu.hos.crawler.service;
import com.yihu.hos.crawler.model.config.SysConfig;
import com.yihu.hos.crawler.model.patient.Patient;
import java.util.UUID;
/**
 * 病人文件索引类,用于生成文件路径,不确保文件路径存在
 *
 * @author Air
 * @version 1.0
 * @created 2015.07.01 18:06
 */
public class PatientCDAIndex {
    private Patient patient;
    public PatientCDAIndex(Patient patient) {
        this.patient = patient;
    }
    public String getDirectory() {
        String dir = SysConfig.getInstance().getTempFile();
        return dir + "/" + patient.getOrgCode() + "/" + patient.getOrgCode() + "-" + patient.getPatientId() + "-" + patient.getEventNo();
    }
    /**
     * 生成病人档案目录
     * orgCode-pateintId-eventNo/data
     *
     * @return
     */
    public String getDataDirectory() {
        return getDirectory() + "/" + IndexType.DATA;
    }
    public String createIndex(String indexType, String fileType) {
        UUID uuid = UUID.randomUUID();
        String index = uuid.toString();
        String dir = getDirectory() + "/" + indexType;
        return dir + "/" + index + fileType;
    }
    public String createDataSetIndex(String indexType, String fileType) {
        UUID uuid = UUID.randomUUID();
        String index = "dataset_index";
        String dir = getDirectory() + "/" + IndexType.DATA + "/" +indexType;
        return dir + "/" + index + fileType;
    }
    /**
     * 生成最终病人档案目录
     * data/cda
     * data/origin
     * data/standard
     *
     * @param indexType
     * @param fileType
     * @return
     */
    public String createDataIndex(String indexType, String fileType) {
        return createIndex(IndexType.DATA + "/" + indexType, fileType);
    }
    public class FileType {
        public final static String XML = ".xml";
        public final static String JSON = ".json";
        public final static String ZIP = ".zip";
    }
    public class IndexType {
        public final static String DATA = "data";   //病人档案数据目录
        public final static String CDA = "cda";     //病人cda档案目录
        public final static String STANDARD = "standard";   //病人标准档案目录
        public final static String ORIGIN = "origin";   //病人原始档案目录
        public final static String ZIP = "zip";         //病人压缩包目录
        public final static String DOCUMENT = "document";
    }
}

+ 0 - 113
src/main/java/com/yihu/hos/crawler/service/PatientCDAUpload.java

@ -1,113 +0,0 @@
package com.yihu.hos.crawler.service;
import com.yihu.hos.core.compress.Zipper;
import com.yihu.hos.core.encrypt.RSA;
import com.yihu.hos.core.file.FileUtil;
import com.yihu.hos.core.log.Logger;
import com.yihu.hos.core.log.LoggerFactory;
import com.yihu.hos.crawler.model.config.SysConfig;
import com.yihu.hos.crawler.model.patient.Patient;
import java.io.File;
import java.security.Key;
import java.util.UUID;
/**
 * 档案上传
 *
 * @author Air
 * @version 1.0
 * @created 2015.07.06 15:58
 */
public class PatientCDAUpload {
    private static Logger logger = LoggerFactory.getLogger(PatientCDAUpload.class);
    public static String uploadMethod;
    /**
     * @param patient
     * @return
     * @modify 2015.09.15 airhead 修订删除目录
     * @modify 2015.09.19 airhead 修复无文档问题及错误信息
     */
    public Boolean upload(Patient patient, String token) {
        ZipFile zipFile = zip(patient);
        try {
            if (zipFile == null || zipFile.file == null) {
                logger.info("压缩病人档案失败,病人文档未生成,patient_id:" + patient.getPatientId() + ",event_no:" + patient.getEventNo());
                return false;
            }
            boolean result = upload(patient, zipFile, token);
            if (!result) {
                logger.info("上传病人档案失败,patient_id:" + patient.getPatientId() + ",event_no:" + patient.getEventNo());
                return result;
            }
            logger.trace(zipFile.directory);
            result = FileUtil.deleteDirectory(new File(zipFile.directory));
            if (!result) {
                logger.info("删除临时文件失败,patient_id:" + patient.getPatientId() + ",event_no:" + patient.getEventNo());
                return result;
            }
        } catch (Exception e) {
            FileUtil.deleteDirectory(new File(zipFile.directory));
            return false;
        }
        return true;
    }
    /**
     * @param patient
     * @return
     * @modify 从data目录生成zip数据
     */
    public ZipFile zip(Patient patient) {
        try {
            PatientCDAIndex patientCDAIndex = new PatientCDAIndex(patient);
            String dataDirectory = patientCDAIndex.getDataDirectory();
            String filePath = patientCDAIndex.createIndex(PatientCDAIndex.IndexType.ZIP, PatientCDAIndex.FileType.ZIP);
            UUID uuidPwd = UUID.randomUUID();
            String pwd = uuidPwd.toString();
            String publicKey = SysConfig.getInstance().getPublicKeyMap().get(patient.getOrgCode());
            if(publicKey== null ||  publicKey.length() == 0) {
                publicKey = EsbHttp.getPublicKey(patient.getOrgCode());
                SysConfig.getInstance().getPublicKeyMap().put(patient.getOrgCode(), publicKey);
            }
            Key key = RSA.genPublicKey(publicKey);
            if (key == null) {
                logger.info("压缩文件错误,获取公钥错误.");
                return null;
            }
            ZipFile zipFile = new ZipFile();
            zipFile.encryptPwd = RSA.encrypt(pwd, key);
            Zipper zipper = new Zipper();
            zipFile.file = zipper.zipFileForAll(new File(dataDirectory), filePath, pwd);
            zipFile.dataDirectory = dataDirectory;
            zipFile.directory = patientCDAIndex.getDirectory();
            return zipFile;
        } catch (Exception e) {
            logger.error("从data目录生成zip数据时,压缩文件异常");
            logger.error(e.getCause().toString());
        }
        return null;
    }
    private boolean upload(Patient patient, ZipFile zipFile, String token) {
        return EsbHttp.upload(patient, zipFile.file, zipFile.encryptPwd, token);
    }
    private class ZipFile {
        public File file;
        public String encryptPwd;
        public String directory;
        public String dataDirectory;
    }
}

+ 0 - 37
src/main/java/com/yihu/hos/crawler/storage/DataSetStorage.java

@ -1,37 +0,0 @@
package com.yihu.hos.crawler.storage;
import com.yihu.hos.crawler.format.IDataTransformer;
import com.yihu.hos.crawler.model.patient.Patient;
import com.yihu.hos.crawler.format.AdapterScheme;
/**
 * Created by Administrator on 2015/10/15.
 */
public class DataSetStorage extends MongodbStorage {
    public static final String KEY_CODE = "code";
    public DataSetStorage(AdapterScheme adapterScheme, String dbName) {
        super(adapterScheme, dbName);
    }
    @Override
    public boolean save(IDataTransformer dataTransformer) {
        return true;
    }
    /**
     * @param patient
     * @return
     * @modify 将档案生成到到data目录
     */
    @Override
    public boolean toFile(Patient patient) {
        return true;
    }
    @Override
    public String getKey(){
        return KEY_CODE;
    }
}

+ 0 - 231
src/main/java/com/yihu/hos/crawler/storage/DocumentStorage.java

@ -1,231 +0,0 @@
package com.yihu.hos.crawler.storage;
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.node.ArrayNode;
import com.fasterxml.jackson.databind.node.ObjectNode;
import com.mongodb.client.FindIterable;
import com.mongodb.client.MongoCollection;
import com.mongodb.client.MongoCursor;
import com.mongodb.client.model.UpdateOptions;
import com.yihu.hos.common.mongo.MongoDB;
import com.yihu.hos.core.datatype.DateUtil;
import com.yihu.hos.core.encode.Base64;
import com.yihu.hos.core.file.FileUtil;
import com.yihu.hos.crawler.format.AdapterScheme;
import com.yihu.hos.crawler.format.DocumentTransformer;
import com.yihu.hos.crawler.format.IDataTransformer;
import com.yihu.hos.crawler.model.patient.Patient;
import com.yihu.hos.crawler.service.EsbHttp;
import com.yihu.hos.crawler.service.PatientCDAIndex;
import org.bson.Document;
import java.io.IOException;
import java.text.SimpleDateFormat;
import java.util.Date;
import java.util.Iterator;
import static com.mongodb.client.model.Filters.and;
import static com.mongodb.client.model.Filters.eq;
import static com.mongodb.client.model.Projections.excludeId;
//import com.yihu.common.util.log.DebugLogger;
/**
 * Created by Administrator on 2015/10/15.
 */
public class DocumentStorage extends MongodbStorage {
        public static final String KEY_CODE = "catalog";
    public DocumentStorage(AdapterScheme adapterScheme, String dbName) {
        super(adapterScheme, dbName);
    }
    @Override
    public boolean save(IDataTransformer dataTransformer) {
        DocumentTransformer documentTransformer = (DocumentTransformer) dataTransformer;
        JsonNode jsonObject = documentTransformer.getJsonObject();
        if (jsonObject != null) {
            String dataSetCode = jsonObject.get(getKey()).asText();
            String documentId = jsonObject.path(getKey()).asText();
            String patientId = jsonObject.get(PATIENT_ID).asText();
            String eventNo = jsonObject.get(EVENT_NO).asText();
            try {
                MongoCollection<Document> collection = MongoDB.db(dbName).getCollection(dataSetCode);
                createIndex(collection);    //创建索引
                Document filter = new Document();
                filter.append(PATIENT_ID, patientId);
                filter.append(EVENT_NO, eventNo);
                collection.deleteMany(filter);
                UpdateOptions updateOptions = new UpdateOptions();
                updateOptions.upsert(true);
                collection.replaceOne(filter, Document.parse(jsonObject.toString()), updateOptions);
                String url = createUrl(dataSetCode, patientId, eventNo);
                Date expiryDate = DateUtil.setDateTime(new Date(), getExpireDays().intValue());
                SimpleDateFormat sdf = new SimpleDateFormat(DateUtil.DEFAULT_YMDHMSDATE_FORMAT);
                String date = sdf.format(expiryDate);
                Document updateDoc = new Document(CREATE_AT, new Date());
                updateDoc.put("resource.url", url);
                updateDoc.put("resource.expiry_date", date);
                collection.updateMany(filter, new Document("$set", updateDoc));
            } catch (Exception e) {
                //DebugLogger.fatal("保存病人档案信息至MongoDB异常:", e);
                return false;
            }
            return true;
        }
        return false;
    }
    /**
     * @param patient
     * @return
     * @modify 将档案生成到到data目录
     */
    @Override
    public boolean toFile(Patient patient) {
        boolean result = true;
        PatientCDAIndex patientCDAIndex = new PatientCDAIndex(patient);
        ObjectMapper mapper = new ObjectMapper();
        ArrayNode arrayNode=mapper.createArrayNode();
        ObjectNode resultNode=mapper.createObjectNode();
        try {
            for (String name : MongoDB.db(dbName).listCollectionNames()) {
                MongoCollection<Document> collection = MongoDB.db(dbName).getCollection(name);
                FindIterable<Document> documents = collection.find(and(eq("patient_id", patient.getPatientId()), eq(EVENT_NO, patient.getEventNo()))).projection(excludeId());
                try (MongoCursor<Document> cursor = documents.iterator()) {
                    while (cursor.hasNext()) {
                        try {
                            String document = cursor.next().toJson();
                            ObjectNode rootNode = mapper.readValue(document, ObjectNode.class);
                            JsonNode jsonNode = rootNode.get("data");
                            boolean array = jsonNode.isArray();
                            if (!array) {
                                continue;
                            }
                            arrayNode=genunStructureData(jsonNode,patientCDAIndex);
                        } catch (IOException e) {
                            e.printStackTrace();
                            //DebugLogger.fatal("存储临时文件失败.");
                            result = false;
                        }
                    }
                } catch (Exception e) {
                    //DebugLogger.fatal("", e);
                    result = false;
                }
            }
            String innerVersion =  EsbHttp.getRemoteVersion(patient.getOrgCode());
            for (int i = 0; i != arrayNode.size(); ++i) {
                JsonNode keyWordsNode = arrayNode.get(i).path("key_words");
                ObjectNode newNode=mapper.createObjectNode();
                JsonNode jsonNode= transformKeyWords(keyWordsNode, newNode);
                ((ObjectNode) arrayNode.get(i)).set("key_words", jsonNode);
            }
            resultNode.set("data", arrayNode);
            resultNode.put("patient_id", patient.getPatientId());
            resultNode.put("event_no",patient.getEventNo());
            resultNode.put("org_code",patient.getOrgCode());
            resultNode.put("event_time",patient.getReferenceTime());
            resultNode.put("inner_version",innerVersion);
            String indexPath = patientCDAIndex.getDataDirectory()+"/"+"meta.json";
            boolean writeFile = FileUtil.writeFile(indexPath, mapper.writeValueAsString(resultNode), "UTF-8");
        } catch (Exception e) {
            //DebugLogger.fatal("", e);
            result = false;
        }
        return result;
    }
    @Override
    public String getDataSet(Patient patient, String dataSetCode) {
        return null;
    }
    @Override
    public String getKey(){
        return KEY_CODE;
    }
    /**
     * 生成非结构化 meta.json文件数据
     * @param jsonNode
     * @param patientCDAIndex
     * @return
     * @throws IOException
     */
    public ArrayNode genunStructureData(JsonNode jsonNode,PatientCDAIndex patientCDAIndex) throws IOException {
        ObjectMapper mapper=new ObjectMapper();
        ArrayNode arrayNode=mapper.createArrayNode();
        for (int i = 0; i != jsonNode.size(); ++i) {
            JsonNode documentNode = jsonNode.get(i);
            JsonNode contentNode=documentNode.path("content");
            if (contentNode.isArray()){
                for (int j = 0; j< contentNode.size(); j++) {
                    JsonNode fileArr = contentNode.get(j);
//                    String mimeType = fileArr.path("mime_type").asText();//文件类型
                    String names = fileArr.path("name").asText();
                    String fileType=names.substring(names.lastIndexOf("."));//文件后缀
                    JsonNode file=fileArr.path("file_content");//文件内容
                    Iterator<String> fileNames = file.fieldNames();
                    StringBuilder stringBuilder=new StringBuilder();
                    while (fileNames.hasNext()){
                        String key=fileNames.next();
                        String content =file.path(key).asText();
                        String filePath = patientCDAIndex.createDataIndex(dbName, fileType);
                        String fileName = filePath.substring(filePath.lastIndexOf("/")+1);
                        byte[]  fileContent = Base64.decode(content);
                        boolean writeFile = FileUtil.writeFile(filePath, fileContent, "UTF-8");
                        if (!writeFile) {
                            //DebugLogger.fatal("存储临时文件失败.");
                        } else {
                            stringBuilder.append(fileName).append(",");
                        }
                    }
                    if (file.isObject()) {
                        ((ObjectNode) fileArr).put("name", stringBuilder.toString());
                        ((ObjectNode) fileArr).remove("file_content");
                    }
                }
            }
            arrayNode.add(documentNode);
        }
        return arrayNode;
    }
    /**
     * 将key_words中key中包含的“-”转换成“."
     * @param keyWordsNode
     * @param newObjectNode
     * @return
     */
    public ObjectNode transformKeyWords(JsonNode keyWordsNode, ObjectNode newObjectNode){
        Iterator<String> iterator = keyWordsNode.fieldNames();
        while (iterator.hasNext()){
            String key=iterator.next();
            String value =keyWordsNode.path(key).asText();
            String newKey=key.replaceAll("-",".");
            newObjectNode.put(newKey,value);
        }
        return newObjectNode;
    }
}

+ 0 - 36
src/main/java/com/yihu/hos/crawler/storage/IDataStorage.java

@ -1,36 +0,0 @@
package com.yihu.hos.crawler.storage;
import com.yihu.hos.crawler.format.IDataTransformer;
import com.yihu.hos.crawler.model.patient.Patient;
import java.util.Map;
/**
 * 存储接口,应该只关心存取
 * 目前定义接口存在如下问题需修订:
 * 1.无取接口
 * 2.toFile超出职责范围
 *
 * @author Airhead
 * @version 1.0
 * @created 22-5-2015 11:24:26
 */
public interface IDataStorage {
    boolean save(IDataTransformer dataTransformer);
    boolean toFile(Patient patient);
    String getDataSet(Patient patient, String dataSetCode);
    String getArchive(String dataSetCode, Map<String, Object> params);
    Boolean isStored(String orgCode, String patientID, String eventNo);
    enum StorageType {
        MYSQL_DB,
        MONGODB,
        FILE_SYSTEM
    }
}

+ 0 - 390
src/main/java/com/yihu/hos/crawler/storage/MongodbStorage.java

@ -1,390 +0,0 @@
package com.yihu.hos.crawler.storage;
import com.fasterxml.jackson.databind.node.ObjectNode;
import com.mongodb.BasicDBObject;
import com.mongodb.client.FindIterable;
import com.mongodb.client.MongoCollection;
import com.mongodb.client.MongoCursor;
import com.mongodb.client.model.IndexOptions;
import com.mongodb.client.model.UpdateOptions;
//import com.yihu.common.util.log.DebugLogger;
import com.yihu.hos.common.mongo.MongoDB;
import com.yihu.hos.crawler.format.DataSetTransformer;
import com.yihu.hos.crawler.format.IDataTransformer;
import com.yihu.hos.crawler.model.config.SysConfig;
import com.yihu.hos.crawler.model.patient.Patient;
import com.yihu.hos.crawler.model.patient.PatientIdentity;
import com.yihu.hos.crawler.model.patient.PatientIndex;
import com.yihu.hos.crawler.format.AdapterScheme;
import com.yihu.hos.crawler.service.PatientCDAIndex;
import com.yihu.hos.web.framework.util.file.ConfigureUtil;
import com.yihu.hos.core.file.FileUtil;
import com.yihu.hos.core.datatype.DateUtil;
import com.yihu.hos.core.datatype.NumberUtil;
import org.bson.Document;
import java.io.IOException;
import java.text.SimpleDateFormat;
import java.util.*;
import java.util.concurrent.TimeUnit;
import static com.mongodb.client.model.Filters.and;
import static com.mongodb.client.model.Filters.eq;
import static com.mongodb.client.model.Projections.excludeId;
/**
 * 档案数据只使用Mongo进行存储
 * 目前阶段只会有两种数据类型
 * 1.结构化,数据集
 * 2.非结构化,文档(Pictures,Word,PDF,Video etc.)
 *
 * @author Air
 * @version 1.0
 * @created 2015.07.06 10:38
 */
public class MongodbStorage implements IDataStorage {
    public static final String KEY = "code";
    public static final String PATIENT_ID = "patient_id";
    public static final String EVENT_NO = "event_no";
    public static final String CREATE_AT = "create_at";
    public static final String CREATE_TIME = "create_time";
    public static final String ORG_CODE = "org_code";
    public static final String TTL_INDEX = "ceate_at_1";   //TTL index name, 过期时间索引
    public static final String TTL_INDEX_EXPIRED = "create_time_1"; //旧的TTL index name,已经作废,用于删除索引时使用。
    public static final String INNER_VERSION = "inner_version";
    public static final String EVENT_TIME = "event_time";
    protected String dbName;
    protected AdapterScheme adapterScheme;
    public MongodbStorage(AdapterScheme adapterScheme, String dbName) {
        this.adapterScheme = adapterScheme;
        this.dbName = dbName;
    }
    @Override
    public boolean save(IDataTransformer dataTransformer) {
        DataSetTransformer dataSetTransformer = (DataSetTransformer) dataTransformer;
        ObjectNode jsonObject = (ObjectNode) dataSetTransformer.getJsonObject();
        if (jsonObject != null) {
            String dataSetCode = jsonObject.get(getKey()).asText();
            String patientId = jsonObject.get(PATIENT_ID).asText();
            String eventNo = jsonObject.get(EVENT_NO).asText();
            try {
                MongoCollection<Document> collection = MongoDB.db(dbName).getCollection(dataSetCode);
                createIndex(collection);    //创建索引
                Document filter = new Document();
                filter.append(PATIENT_ID, patientId);
                filter.append(EVENT_NO, eventNo);
                collection.deleteMany(filter);
                UpdateOptions updateOptions = new UpdateOptions();
                updateOptions.upsert(true);
                collection.replaceOne(filter, Document.parse(jsonObject.toString()), updateOptions);
                String url = createUrl(dataSetCode, patientId, eventNo);
                Date expiryDate = DateUtil.setDateTime(new Date(), getExpireDays().intValue());
                SimpleDateFormat sdf = new SimpleDateFormat(DateUtil.DEFAULT_YMDHMSDATE_FORMAT);
                String date = sdf.format(expiryDate);
                Document updateDoc = new Document(CREATE_AT, new Date());
                updateDoc.put("resource.url", url);
                updateDoc.put("resource.expiry_date", date);
                collection.updateMany(filter, new Document("$set", updateDoc));
            } catch (Exception e) {
                //DebugLogger.fatal("保存病人档案信息至MongoDB异常:", e);
                return false;
            }
            return true;
        }
        return false;
    }
    /**
     * @param patient
     * @return
     * @modify 将档案生成到到data目录
     */
    @Override
    public boolean toFile(Patient patient) {
        boolean result = true;
        PatientCDAIndex patientCDAIndex = new PatientCDAIndex(patient);
        Document datasetDoc = new Document();
        Document resultDoc = new Document();
        try {
            // 生成文件,轻量模式需清空data中数据
            for (String name : MongoDB.db(dbName).listCollectionNames()) {
                MongoCollection<Document> collection = MongoDB.db(dbName).getCollection(name);
                FindIterable<Document> documents = collection.find(and(eq(PATIENT_ID, patient.getPatientId()), eq(EVENT_NO, patient.getEventNo()))).projection(excludeId());
                try (MongoCursor<Document> cursor = documents.iterator()) {
                    while (cursor.hasNext()) {
                        String filePath = patientCDAIndex.createDataIndex(dbName, PatientCDAIndex.FileType.JSON);
                        try {
                            Document doc = cursor.next();
                            if ("HDSC01_02".equals(name) || "HDSC02_09".equals(name)) {
                                resultDoc.put(PATIENT_ID, doc.get(PATIENT_ID));
                                resultDoc.put(EVENT_NO, doc.get(EVENT_NO));
                                resultDoc.put(ORG_CODE, doc.get(ORG_CODE));
                                resultDoc.put(INNER_VERSION, doc.get(INNER_VERSION));
                                resultDoc.put(EVENT_TIME, doc.get(EVENT_TIME));
                                if ("HDSC01_02".equals(name)) {
                                    resultDoc.put("visit_type", "1");
                                } else {
                                    resultDoc.put("visit_type", "2");//临时约定,后续从字典中获取
                                }
                            }
                            Map<String, String> resource = (Map<String, String>) doc.get("resource");
                            datasetDoc.put(name, "");
                            resultDoc.put("expiry_date", resource.get("expiry_date"));
                            boolean writeFile = FileUtil.writeFile(filePath, doc.toJson(), "UTF-8");
                            if (!writeFile) {
                                //DebugLogger.fatal("存储临时文件失败:" + cursor.next().toJson());
                                result = false;
                            }
                        } catch (IOException e) {
                            //DebugLogger.fatal("存储临时文件失败.", e);
                            result = false;
                        }
                    }
                } catch (Exception e) {
                    //DebugLogger.fatal("", e);
                }
            }
            //摘要信息生成
//            Document indexData = genPatientIndexData(patient);
//            if (indexData != null) {
//                resultDoc.put("dataset", datasetDoc);
//                resultDoc.put("sumary", indexData);
//                String indexPath = patientCDAIndex.createDataSetIndex("index", PatientCDAIndex.FileType.JSON);
//                boolean writeFile = FileUtil.writeFile(indexPath, resultDoc.toJson(), "UTF-8");
//                if (!writeFile) {
//                    //DebugLogger.fatal("存储索引临时文件失败:" + resultDoc.toJson());
//                    result = false;
//                }
//            }
        } catch (Exception e) {
            //DebugLogger.fatal("", e);
            result = false;
        }
        return result;
    }
    @Override
    public String getDataSet(Patient patient, String dataSetCode) {
        try {
            MongoCollection<Document> collection = MongoDB.db(dbName).getCollection(dataSetCode);
            FindIterable<Document> documents = collection.find(and(eq(PATIENT_ID, patient.getPatientId()), eq(EVENT_NO, patient.getEventNo()), eq(ORG_CODE, patient.getOrgCode()))).projection(excludeId());
            Document document = documents.first();
            if (document != null) {
                return document.toJson();
            }
        } catch (Exception e) {
            //DebugLogger.fatal("", e);
        }
        return null;
    }
    /**
     * 根据条件 获取数据集信息
     *
     * @param dataSetCode 数据集编码
     * @param params      map参数集合
     * @return
     */
    @Override
    public String getArchive(String dataSetCode, Map<String, Object> params) {
        String data = null;
        boolean result = true;
        try {
            MongoCollection<Document> collection = MongoDB.db(dbName).getCollection(dataSetCode);
            BasicDBObject basicDBObject = new BasicDBObject();
            for (Map.Entry<String, Object> entry : params.entrySet()) {
                basicDBObject.put(entry.getKey(), entry.getValue());
            }
            FindIterable<Document> documents = collection.find(basicDBObject);
            try (MongoCursor<Document> cursor = documents.iterator()) {
                while (cursor.hasNext()) {
                    data = cursor.next().toJson();
                    //DebugLogger.fatal("存储临时文 :" + cursor.next().toJson());
                }
            } catch (Exception e) {
                //DebugLogger.fatal("", e);
            }
        } catch (Exception e) {
            //DebugLogger.fatal("", e);
        }
        return data;
    }
    @Override
    public Boolean isStored(String orgCode, String patientID, String eventNo) {
        HashMap<String, PatientIdentity> patientIdentityHashMap = SysConfig.getInstance().getPatientIdentityHashMap();
        Set<Map.Entry<String, PatientIdentity>> entries = patientIdentityHashMap.entrySet();
        Iterator<Map.Entry<String, PatientIdentity>> iterator = entries.iterator();
        try {
            while (iterator.hasNext()) {
                Map.Entry<String, PatientIdentity> next = iterator.next();
                String datasetCode = next.getKey();
                MongoCollection<Document> collection = MongoDB.db(dbName).getCollection(datasetCode);
                Document document = new Document();
                document.append(ORG_CODE, orgCode);
                document.append(PATIENT_ID, patientID);
                document.append(EVENT_NO, eventNo);
                Document findDoc = collection.find(document).first();
                if (findDoc != null) {
                    return true;
                }
            }
        } catch (Exception e) {
            //DebugLogger.fatal("", e);
        }
        return false;
    }
    protected void createIndex(MongoCollection<Document> collection) {
        for (final Document index : collection.listIndexes()) {
            if (index.get("name").equals(TTL_INDEX_EXPIRED)) {
                collection.dropIndex(TTL_INDEX_EXPIRED);  //删除旧的TTL Index
            } else if (index.get("name").equals(TTL_INDEX)) {
                return;
            }
        }
        Document createTimeIndex = new Document(CREATE_AT, 1);
        IndexOptions indexOptions = new IndexOptions();
        indexOptions.expireAfter(getExpireDays(), TimeUnit.DAYS);
        indexOptions.name(TTL_INDEX);
        collection.createIndex(createTimeIndex, indexOptions);
        Document patientIndex = new Document();
        patientIndex.append(PATIENT_ID, 1);
        patientIndex.append(EVENT_NO, 1);
        collection.createIndex(patientIndex);
    }
    /**
     * url生成
     *
     * @param patientId 病人ID
     * @param eventNo   事件号
     * @return
     */
    protected String createUrl(String dataSetCode, String patientId, String eventNo) {
        String requestPath = ConfigureUtil.getProValue("archive.properties", "hos.archives.request.url");
        return requestPath + dataSetCode + "/" + patientId + "/" + eventNo;
    }
    protected String getKey() {
        return KEY;
    }
    protected Long getExpireDays() {
        final Long expireDay = 30L;
        String value = ConfigureUtil.getProValue("archive.properties","hos.archives.expiry.days");
        Long days = NumberUtil.toLong(value);
        return days == null ? expireDay : days;
    }
    /**
     * 病人摘要信息生成
     * 从sys.config文件中的配置读取所需的摘要信息
     *
     * @param patient
     * @return
     */
    protected Document genPatientIndexData(Patient patient) {
        Map<String, PatientIndex> patientIndexMap = SysConfig.getInstance().getPatientIndexMap();
        PatientIndex patientIndex = null;
        List<Document> arrayNode = null;
        Document objectNode = null;
        Document result = new Document();
        MongoCursor<Document> cursor = null;
        MongoCursor<Document> diagCursor = null;
        try {
            for (Map.Entry<String, PatientIndex> entry : patientIndexMap.entrySet()) {
                String dataSetCode = entry.getKey();
                patientIndex = entry.getValue();
                arrayNode = new ArrayList<>();
                MongoCollection<Document> collection = MongoDB.db(dbName).getCollection(dataSetCode);
                FindIterable<Document> documents = collection.find(and(eq(KEY, dataSetCode), eq(PATIENT_ID, patient.getPatientId()), eq(EVENT_NO, patient.getEventNo()))).projection(excludeId());
                cursor = documents.iterator();
                if (cursor.hasNext()) {
                    while (cursor.hasNext()) {
                        Document document = cursor.next();
                        List<Document> list = document.get("data", List.class);
                        for (Document doc : list) {
                            objectNode = new Document();
                            objectNode.put(patientIndex.getPatientId(), patient.getPatientId());
                            objectNode.put(patientIndex.getEventNoCode(), patient.getEventNo());
                            objectNode.put(patientIndex.getRefTimeCode(), doc.get(patientIndex.getRefTimeCode()) == null ? null : (String) doc.get(patientIndex.getRefTimeCode()));
                            objectNode.put("orgCode", patient.getOrgCode());
                            objectNode.put(patientIndex.getOfficeCode(), doc.get(patientIndex.getOfficeCode()) == null ? null : (String) doc.get(patientIndex.getOfficeCode()));
                            objectNode.put(patientIndex.getOfficeName(), doc.get(patientIndex.getOfficeName()) == null ? null : (String) doc.get(patientIndex.getOfficeName()));
                            if ("HDSC02_09".equals(dataSetCode)) {
                                objectNode.put(patientIndex.getLeaveTime(), doc.get(patientIndex.getLeaveTime()) == null ? null : (String) doc.get(patientIndex.getLeaveTime()));
                            }
                            arrayNode.add(objectNode);
                        }
                    }
                    if (arrayNode != null && arrayNode.size() > 0) {
                        result.put(dataSetCode, arrayNode);
                    } else {
                        continue;
                    }
                    String diagDataSet = patientIndex.getDiagDataSet();
                    MongoCollection<Document> diagCollection = MongoDB.db(dbName).getCollection(diagDataSet);
                    FindIterable<Document> diags = diagCollection.find(and(eq(KEY, diagDataSet), eq(PATIENT_ID, patient.getPatientId()), eq(EVENT_NO, patient.getEventNo()))).projection(excludeId());
                    diagCursor = diags.iterator();
                    arrayNode = new ArrayList<>();
                    while (diagCursor.hasNext()) {
                        Document document = diagCursor.next();
                        List<Document> list = document.get("data", List.class);
                        for (Document doc : list) {
                            objectNode = new Document();
                            objectNode.put(patientIndex.getDiagType(), doc.get(patientIndex.getDiagType()) == null ? null : (String) doc.get(patientIndex.getDiagType()));
                            objectNode.put(patientIndex.getDiagCode(), doc.get(patientIndex.getDiagCode()) == null ? null : (String) doc.get(patientIndex.getDiagCode()));
                            objectNode.put(patientIndex.getDiagName(), doc.get(patientIndex.getDiagName()) == null ? null : (String) doc.get(patientIndex.getDiagName()));
                            arrayNode.add(objectNode);
                        }
                    }
                    if (arrayNode != null && arrayNode.size() > 0) {
                        result.put(diagDataSet, arrayNode);
                    }
                } else {
                    continue;
                }
            }
            if (result == null) {
                return null;
            } else {
                return result;
            }
        } catch (Exception e) {
            //DebugLogger.fatal("", e);
        } finally {
            if (cursor != null) {
                cursor.close();
            }
            if (diagCursor != null) {
                diagCursor.close();
            }
        }
        return null;
    }
}

+ 0 - 55
src/main/java/com/yihu/hos/services/ArchiveUploadJob.java

@ -1,55 +0,0 @@
package com.yihu.hos.services;
import com.yihu.hos.common.Services;
import com.yihu.hos.core.datatype.DateUtil;
import com.yihu.hos.core.log.Logger;
import com.yihu.hos.core.log.LoggerFactory;
import com.yihu.hos.crawler.service.CrawlerManager;
import com.yihu.hos.datacollect.model.RsJobConfig;
import com.yihu.hos.datacollect.service.intf.IDatacollectManager;
import com.yihu.hos.web.framework.model.DataGridResult;
import com.yihu.hos.web.framework.util.springutil.SpringBeanUtil;
import java.util.Date;
import java.util.HashMap;
import java.util.Map;
import java.util.UUID;
/**
 * Created by hzp on 2016/5/11.
 */
public class ArchiveUploadJob implements IBaseJob {
    private static Logger logger = LoggerFactory.getLogger(ArchiveUploadJob.class);
    @Override
    public void execute(String jobId) throws Exception{
        String random = UUID.randomUUID().toString();
        logger.info("档案采集上传开始,流水号:" + random + ",jobId:"+jobId);
        IDatacollectManager datacollect = SpringBeanUtil.getService(Services.Datacollect);
        RsJobConfig job = datacollect.getJobById(jobId);
        Map<String, Object> condition = new HashMap<>();
        Date begin = job.getRepeatStartTime();
        Date end = job.getRepeatEndTime();
        if (!job.getJobType().equals("0")) {
            //调整截止时间,当前时间-偏移量
            end = DateUtil.addDate(-job.getDelayTime(), DateUtil.getSysDateTime());
            if ((end.getTime() - begin.getTime()) <= 0) {
                return; //结束时间小于开始时间时,不获取
            }
        }
        condition.put("beginDate", begin);
        condition.put("endDate", end);
        DataGridResult result = datacollect.getJobDatasetByJobId(jobId);
        CrawlerManager crawlerManager = new CrawlerManager(result.getDetailModelList(), job.getSchemeVersion());
        String message = crawlerManager.dataCrawler(condition);
        if (!job.getJobType().equals("0")) {
            job.setRepeatStartTime(end);
            job.setRepeatEndTime(DateUtil.formatYMDToYMDHMS(DateUtil.getSysDateTime().toString()));
        }
        datacollect.updateJob(job);
        logger.info("档案采集上传结束,流水号:" + random + ",jobId:" + jobId + ",message:" + message);
        return;
    }
}

+ 0 - 56
src/main/java/com/yihu/hos/services/CrawlerFlowUploadJob.java

@ -1,56 +0,0 @@
package com.yihu.hos.services;
import com.yihu.hos.common.Services;
import com.yihu.hos.core.datatype.DateUtil;
import com.yihu.hos.core.log.Logger;
import com.yihu.hos.core.log.LoggerFactory;
import com.yihu.hos.crawler.service.CrawlerFlowManager;
import com.yihu.hos.datacollect.model.RsJobConfig;
import com.yihu.hos.datacollect.service.intf.IDatacollectManager;
import com.yihu.hos.web.framework.model.DataGridResult;
import com.yihu.hos.web.framework.util.springutil.SpringBeanUtil;
import java.util.Date;
import java.util.HashMap;
import java.util.Map;
import java.util.UUID;
/**
 * 任务编排任务执行
 *
 * Created by chenyingjie on 16/7/14.
 */
public class CrawlerFlowUploadJob implements IBaseJob {
    private static Logger logger = LoggerFactory.getLogger(CrawlerFlowUploadJob.class);
    @Override
    public void execute(String jobId) throws Exception {
        String random = UUID.randomUUID().toString();
        logger.info("任务编排——档案采集上传开始,流水号:" + random + ",jobId:"+jobId);
        IDatacollectManager datacollect = SpringBeanUtil.getService(Services.Datacollect);
        RsJobConfig job = datacollect.getJobById(jobId);
        Map<String, Object> condition = new HashMap<>();
        Date begin = job.getRepeatStartTime();
        Date end = job.getRepeatEndTime();
        if (!job.getJobType().equals("0")) {
            //调整截止时间,当前时间-偏移量
            end = DateUtil.addDate(-job.getDelayTime(), DateUtil.getSysDateTime());
            if ((end.getTime() - begin.getTime()) <= 0) {
                return; //结束时间小于开始时间时,不获取
            }
        }
        condition.put("beginDate", begin);
        condition.put("endDate", end);
        DataGridResult result = datacollect.getJobDatasetByJobId(jobId);
        CrawlerFlowManager crawlerFlowManager = new CrawlerFlowManager(result.getDetailModelList(), job.getSchemeVersion());
        String message = crawlerFlowManager.dataCrawler(condition);
        if (!job.getJobType().equals("0")) {
            job.setRepeatStartTime(end);
            job.setRepeatEndTime(DateUtil.formatYMDToYMDHMS(DateUtil.getSysDateTime().toString()));
        }
        datacollect.updateJob(job);
        logger.info("任务编排——档案采集上传结束,流水号:" + random + ",jobId:" + jobId + ",message:" + message);
        return;
    }
}