zdm hace 6 años
padre
commit
bd928c7f3e

+ 5 - 3
src/main/java/com/yihu/ehr/resolve/FilePackageResolver.java

@ -59,6 +59,7 @@ public class FilePackageResolver extends PackageResolver {
        String eventDate = root.get("event_time") == null ? "" : root.get("event_time").asText();
        String createDate = root.get("create_date") == null ? "" : root.get("create_date").asText();
        String cdaVersion = root.get("inner_version") == null ? "" : root.get("inner_version").asText();
        Boolean reUploadFlg = root.get("reUploadFlg") == null ? false : root.get("reUploadFlg").asBoolean();
        //验证档案基础数据的完整性,当其中某字段为空的情况下直接提示档案包信息缺失。
        StringBuilder errorMsg = new StringBuilder();
@ -80,6 +81,7 @@ public class FilePackageResolver extends PackageResolver {
        if (!StringUtils.isEmpty(errorMsg.toString())){
            throw new IllegalJsonDataException(errorMsg.toString());
        }
        filePackage.setReUploadFlg(reUploadFlg);
        filePackage.setPatientId(patientId);
        filePackage.setEventNo(eventNo);
        if (eventType != -1) {
@ -90,8 +92,9 @@ public class FilePackageResolver extends PackageResolver {
        filePackage.setCreateDate(DateUtil.strToDate(createDate));
        filePackage.setEventTime(DateUtil.strToDate(eventDate));
        filePackage.setDemographicId(demographicId);
        parseDataSets(filePackage, (ObjectNode) root.get("data_sets"));
        if(root.get("data_sets") != null){
            parseDataSets(filePackage, (ObjectNode) root.get("data_sets"));
        }
        parseFiles(filePackage, (ArrayNode) root.get("files"), documents.getParent() + File.separator + "documents");
    }
@ -118,7 +121,6 @@ public class FilePackageResolver extends PackageResolver {
                Iterator<Map.Entry<String, JsonNode>> filedIterator = jsonRecord.fields();
                while (filedIterator.hasNext()) {
                    Map.Entry<String, JsonNode> field = filedIterator.next();
                    //String metaData = translateMetaDataCode(profile.getCdaVersion(), dataSetCode, field.getKey());
                    String value = field.getValue().asText().equals("null") ? "" : field.getValue().asText();
                    if (field.getKey() != null) {
                        record.putMetaData(field.getKey(), value);

+ 2 - 1
src/main/java/com/yihu/ehr/resolve/LinkPackageResolver.java

@ -74,6 +74,7 @@ public class LinkPackageResolver extends PackageResolver {
        String visitType = jsonNode.get("visit_type") == null? "" : jsonNode.get("visit_type").asText();
        String eventDate = jsonNode.get("event_time") == null ? "" : jsonNode.get("event_time").asText();
        String expireDate = jsonNode.get("expire_date") == null? "" : jsonNode.get("expire_date").asText();
        Boolean reUploadFlg = jsonNode.get("reUploadFlg") == null ? false : jsonNode.get("reUploadFlg").asBoolean();
        //验证档案基础数据的完整性,当其中某字段为空的情况下直接提示档案包信息缺失。
        StringBuilder errorMsg = new StringBuilder();
@ -106,6 +107,7 @@ public class LinkPackageResolver extends PackageResolver {
        linkPackage.setEventTime(DateUtil.strToDate(eventDate));
        linkPackage.setVisitType(visitType);
        linkPackage.setExpireDate(DateUtil.strToDate(expireDate));
        linkPackage.setReUploadFlg(reUploadFlg);
        // dataset节点,存储数据集URL
        JsonNode dataSetNode = jsonNode.get("dataset");
        Iterator<String> fieldNames = dataSetNode.fieldNames();
@ -205,7 +207,6 @@ public class LinkPackageResolver extends PackageResolver {
                    _fileNames.add(fileName);
                    //ftp文件,待数据入库后,在删除
                    needDeleteFiles.put(path,_fileNames);
                    ftpUtils.deleteFile(path, fileName);
                }
            } finally {
                if (ftpUtils != null){

+ 62 - 0
src/main/java/com/yihu/ehr/resolve/dao/MasterResourceDao.java

@ -1,10 +1,17 @@
package com.yihu.ehr.resolve.dao;
import com.google.gson.JsonArray;
import com.google.gson.JsonElement;
import com.google.gson.JsonObject;
import com.google.gson.JsonParser;
import com.yihu.ehr.profile.EventType;
import com.yihu.ehr.hbase.HBaseDao;
import com.yihu.ehr.hbase.TableBundle;
import com.yihu.ehr.profile.core.ResourceCore;
import com.yihu.ehr.profile.exception.IllegalJsonFileException;
import com.yihu.ehr.profile.family.ResourceCells;
import com.yihu.ehr.resolve.model.stage1.FilePackage;
import com.yihu.ehr.resolve.model.stage1.LinkPackage;
import com.yihu.ehr.resolve.model.stage1.OriginalPackage;
import com.yihu.ehr.resolve.model.stage2.MasterRecord;
import com.yihu.ehr.resolve.model.stage2.ResourceBucket;
@ -28,6 +35,15 @@ public class MasterResourceDao {
    private HBaseDao hbaseDao;
    public void saveOrUpdate(ResourceBucket resourceBucket, OriginalPackage originalPackage) throws Exception {
        //如果是非结构化档案, 或者是 影像档案, 通过rowkey 判断结构化档案 是否有数据
        if(originalPackage instanceof FilePackage || originalPackage instanceof LinkPackage){
            String profileId = originalPackage.getProfileId().toString();
            String rowkey = profileId.substring(2,profileId.length());
            Map<String, String> originResult = hbaseDao.get(ResourceCore.MasterTable, rowkey, resourceBucket.getdFamily());
            /*if (!originResult.isEmpty()) {
                throw new IllegalJsonFileException("Please upload the struct package(" + rowkey + ") first !");
            }*/
        }
        String rowKey = resourceBucket.getId();
        TableBundle bundle = new TableBundle();
        if (originalPackage.isReUploadFlg()) { //补传处理
@ -39,6 +55,7 @@ public class MasterResourceDao {
                bundle.addValues(rowKey, resourceBucket.getdFamily(), originResult);
                hbaseDao.save(resourceBucket.getMaster(), bundle);
                Map<String, String> basicResult = hbaseDao.get(resourceBucket.getMaster(), rowKey, resourceBucket.getBasicFamily());
                updateFile(resourceBucket,originalPackage,basicResult);
                if (StringUtils.isNotEmpty(basicResult.get(ResourceCells.EVENT_TYPE))) {
                    EventType eventType = EventType.create(basicResult.get(ResourceCells.EVENT_TYPE));
                    originalPackage.setEventType(eventType);
@ -69,4 +86,49 @@ public class MasterResourceDao {
            hbaseDao.save(resourceBucket.getMaster(), bundle);
        }
    }
    /**
     * 此处方法主要是非结构化档案补传,文件的更新
     * @param resourceBucket
     * @param originalPackage
     * @param basicResult
     */
    private void updateFile(ResourceBucket resourceBucket, OriginalPackage originalPackage,Map<String, String> basicResult){
        if(originalPackage instanceof FilePackage){
            String file_list = basicResult.get("file_list");
            JsonArray oldFileArray = new JsonParser().parse(file_list).getAsJsonArray();
            JsonArray newFileArray = new JsonArray();
            newFileArray.addAll(oldFileArray);
            //新上报的数据
            String file_list1 = resourceBucket.getBasicRecord("file_list");
            JsonArray waitAddFileArray = new JsonParser().parse(file_list1).getAsJsonArray();
            for(JsonElement waitAdd :waitAddFileArray){
                String cdaId = ((JsonObject) waitAdd).get("cda_document_id").getAsString();
                for(JsonElement oldFile :oldFileArray){
                    String oldCdaId = ((JsonObject) oldFile).get("cda_document_id").getAsString();
                    if(cdaId.equalsIgnoreCase(oldCdaId)){
                        newFileArray.remove(oldFile);
                    }
                }
            }
            newFileArray.addAll(waitAddFileArray);
            basicResult.put("file_list",newFileArray.toString());
            TableBundle bundle = new TableBundle();
            bundle.addValues(resourceBucket.getId(), resourceBucket.getBasicFamily(), basicResult);
            hbaseDao.save(resourceBucket.getMaster(), bundle);
        }else if (originalPackage instanceof LinkPackage){
            String file_list = basicResult.get("file_list");
            JsonArray oldFileArray = new JsonParser().parse(file_list).getAsJsonArray();
            //新上报的数据
            String file_list1 = resourceBucket.getBasicRecord("file_list");
            JsonArray waitAddFileArray = new JsonParser().parse(file_list1).getAsJsonArray();
            oldFileArray.addAll(waitAddFileArray);
            basicResult.put("file_list",oldFileArray.toString());
            TableBundle bundle = new TableBundle();
            bundle.addValues(resourceBucket.getId(), resourceBucket.getBasicFamily(), basicResult);
            hbaseDao.save(resourceBucket.getMaster(), bundle);
        }
    }
}

+ 1 - 0
src/main/resources/application.yml

@ -69,6 +69,7 @@ ehr:
        - HDSC02_09: Resident #住院-入院记录 v1.0
        - HDSD00_13: Resident #住院-入院记录 v1.3
        - HDSD00_16: Resident #住院-出院小结 v1.3
        - HDSD00_11: Resident #住院-病案首页 v1.5
        - HDSB05_03: MedicalExam #体检-登记信息 v1.3
    #诊断信息
    diagnosis: