Browse Source

Merge branch 'master' of http://192.168.1.220:10080/esb/esb

Airhead 8 years ago
parent
commit
ba0ae7114f

+ 3 - 0
hos-broker/src/main/java/com/yihu/hos/common/constants/MonitorConstant.java

@ -7,6 +7,9 @@ package com.yihu.hos.common.constants;
 */
public class MonitorConstant {
    public static String MONITOR = "monitor";
    public static String SERVER = "server";
    public static String HOST = "host";

+ 65 - 17
hos-broker/src/main/java/com/yihu/hos/common/scheduler/MonitorScheduler.java

@ -1,5 +1,6 @@
package com.yihu.hos.common.scheduler;
import com.mongodb.*;
import com.yihu.hos.common.constants.MonitorConstant;
import com.yihu.hos.common.dao.BrokerDao;
import com.yihu.hos.common.util.MongodbUtil;
@ -14,16 +15,21 @@ import net.sf.json.JSONArray;
import net.sf.json.JSONObject;
import org.bson.Document;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.data.mongodb.core.MongoOperations;
import org.springframework.data.mongodb.core.MongoTemplate;
import org.springframework.scheduling.annotation.Scheduled;
import org.springframework.stereotype.Component;
import javax.annotation.Resource;
import java.net.InetAddress;
import java.net.UnknownHostException;
import java.util.ArrayList;
import java.util.Date;
import java.util.List;
/**
 *  服务器性能数据采集定时器
 * 服务器性能数据采集定时器
 *
 * @author HZY
 * @vsrsion 1.0
 * Created at 2016/10/11.
@ -31,17 +37,24 @@ import java.util.List;
@Component
public class MonitorScheduler {
    @Autowired
    private MongoOperations mongoOperations;
    @Autowired
    private Mongo mongo;
    static private final Logger logger = LoggerFactory.getLogger(MonitorScheduler.class);
    private static String host = SigarUtil.getHost();
    @Resource(name = ServiceMonitorService.BEAN_ID)
    private ServiceMonitorService serviceMonitorService;
    @Autowired
    private BrokerDao brokerDao;
    @Scheduled(cron="0 0/1 * * * ?") //每分钟执行一次
    @Scheduled(cron = "0 0/1 * * * ?") //每分钟执行一次
    public void statusCheck() {
        System.out.println("每分钟执行一次。开始============================================");
        //TODO 采集服务器健康监控指标数据 statusTask.healthCheck();
//        collectEnvHealth();
        collectEnvHealth();
        try {
            collectServiceHealth();
        } catch (Exception e) {
@ -50,37 +63,42 @@ public class MonitorScheduler {
        System.out.println("每分钟执行一次。结束。");
    }
    public String collectEnvHealth(){
    /**
     * 服务器健康指标采集
     * @return
     */
    public String collectEnvHealth() {
        try {
            MongodbUtil monoEnv = new MongodbUtil("envHealth");
            MongodbUtil monoEnv = new MongodbUtil(MonitorConstant.MONITOR);
            Document result = null;
            result = new Document();
            result.put("create_date", DateUtil.getCurrentString(DateUtil.DEFAULT_YMDHMSDATE_FORMAT));
            result.put("create_time", new Date());
            result.put("host",host);
            //cpu
            JSONObject cpu = JSONObject.fromObject( SigarUtil.cpu());
            result.put("data",cpu);
            JSONObject cpu = JSONObject.fromObject(SigarUtil.cpu());
            result.put("data", cpu);
            result.put("type", MonitorConstant.CPU);
            monoEnv.insert(host,result);
            monoEnv.insert(MonitorConstant.SERVER, result);
            //内存
            JSONObject memory = JSONObject.fromObject( SigarUtil.memory());
            result.put("data",memory);
            JSONObject memory = JSONObject.fromObject(SigarUtil.memory());
            result.put("data", memory);
            result.put("type", MonitorConstant.MEMORY);
            result.remove("_id");
            monoEnv.insert(host,result);
            monoEnv.insert(MonitorConstant.SERVER, result);
            //硬盘
            List<JSONObject> files = JSONArray.fromObject( SigarUtil.file());
            result.put("data",files);
            List<JSONObject> files = JSONArray.fromObject(SigarUtil.file());
            result.put("data", files);
            result.put("type", MonitorConstant.FILES);
            result.remove("_id");
            monoEnv.insert(host, result);
            monoEnv.insert(MonitorConstant.SERVER, result);
            //网络
            JSONObject net = JSONObject.fromObject( SigarUtil.net());
            result.put("data",net);
            JSONObject net = JSONObject.fromObject(SigarUtil.net());
            result.put("data", net);
            result.put("type", MonitorConstant.NET);
            result.remove("_id");
            monoEnv.insert(host,result);
            monoEnv.insert(MonitorConstant.SERVER, result);
        } catch (Exception e) {
            e.printStackTrace();
@ -117,4 +135,34 @@ public class MonitorScheduler {
//        logger.info("每10秒执行一次。结束。");
//    }
    /**
     * 服务器列表保存
     */
//    @Scheduled(cron = "0 0 12 * * ?") //每天中午12点触发
    @Scheduled(fixedDelay = 3600*24*1000,initialDelay=3000) //每天中午12点触发
    public void checkHost() {
        MongodbUtil monoEnv = new MongodbUtil(MonitorConstant.MONITOR);
        mongoOperations = new MongoTemplate(mongo, MonitorConstant.MONITOR);
        DBCollection envCollection = mongoOperations.getCollection(MonitorConstant.HOST);
        BasicDBObject queryObject = new BasicDBObject().append(QueryOperators.AND,
                new BasicDBObject[]{
                        new BasicDBObject().append("host", host)});
        DBCursor cursor = envCollection.find(queryObject);
        if (cursor.size() < 1) {
            try {
                Document result = new Document();
                InetAddress addr = null;
                addr = InetAddress.getLocalHost();
                result.put("name",  addr.getHostName());
                result.put("host",  addr.getHostAddress());
                monoEnv.insert(MonitorConstant.HOST, result);
            } catch (UnknownHostException e) {
                e.printStackTrace();
            }
        }
    }
}

+ 2 - 3
hos-broker/src/main/java/com/yihu/hos/common/util/SigarUtil.java

@ -24,9 +24,8 @@ public class SigarUtil {
    private static Sigar initSigar() {
        try {
            //此处只为得到依赖库文件的目录,可根据实际项目自定义
            URL claaPath = SigarUtil.class.getResource("/");
            String sigarLibPath = SigarUtil.class.getProtectionDomain().getCodeSource().getLocation().getPath() ;
            String path = System.getProperty("java.library.path");
            String sigarLibPath = claaPath.getPath();
            //为防止java.library.path重复加,此处判断了一下
            if (!path.contains(sigarLibPath)) {
                if (isOSWin()) {
@ -315,7 +314,7 @@ public class SigarUtil {
                InetAddress inet = InetAddress.getLocalHost();
                // 没有出现异常而正常当取到的IP时,如果取到的不是网卡循回地址时就返回
                // 否则再通过Sigar工具包中的方法来获取
                if (!NetFlags.LOOPBACK_ADDRESS.equals(address)) {
                if (NetFlags.LOOPBACK_ADDRESS.equals(address)) {
                    address = netConfig.getAddress();
                }
            } catch (UnknownHostException e) {

+ 27 - 0
hos-core/pom.xml

@ -153,6 +153,33 @@
            <artifactId>slf4j-api</artifactId>
            <version>${slf4j.version}</version>
        </dependency>
        <!--=============httpclient start=============-->
        <dependency>
            <groupId>org.apache.httpcomponents</groupId>
            <artifactId>httpclient</artifactId>
            <version>4.5.1</version>
        </dependency>
        <dependency>
            <groupId>org.apache.httpcomponents</groupId>
            <artifactId>httpmime</artifactId>
            <version>4.5.1</version>
        </dependency>
        <dependency>
            <groupId>org.apache.httpcomponents</groupId>
            <artifactId>httpcore</artifactId>
            <version>4.4.3</version>
        </dependency>
        <!--=============httpclient end=============-->
        <dependency>
            <groupId>org.springframework</groupId>
            <artifactId>spring-core</artifactId>
            <version>4.3.3.RELEASE</version>
            <scope>compile</scope>
        </dependency>
    </dependencies>
    <build>

+ 1 - 2
hos-rest/src/main/java/com/yihu/hos/rest/common/dao/DatacollectDao.java

@ -1,7 +1,6 @@
package com.yihu.hos.rest.common.dao;
import com.yihu.hos.core.datatype.StringUtil;
import com.yihu.hos.rest.models.rs.DtoJobDataset;
import com.yihu.hos.rest.models.rs.RsJobDataset;
import com.yihu.hos.web.framework.constrant.DateConvert;
import com.yihu.hos.web.framework.dao.SQLGeneralDAO;
@ -61,7 +60,7 @@ public class DatacollectDao extends SQLGeneralDAO {
    {
        StringBuilder sb = new StringBuilder();
        sb.append("from RsJobConfig t where 1=1 ");
        sb.append("from RsJobConfig t where 1=1 and valid=1 ");
        if (!StringUtil.isEmpty(conditionMap.get("jobName")))
        {
            sb.append(" and t.jobName like '%" + conditionMap.get("jobName") + "%'");

File diff suppressed because it is too large
+ 205 - 0
hos-rest/src/main/java/com/yihu/hos/rest/services/crawler/CollectHelper.java


+ 80 - 0
hos-rest/src/main/java/com/yihu/hos/rest/services/crawler/CrawlerManager.java

@ -25,6 +25,7 @@ import java.util.*;
@Service("CrawlerManager")
public class CrawlerManager {
    public static final String BEAN_ID = "CrawlerManager";
    private static String unstructured = "unstructured";
    private static Logger logger = LoggerFactory.getLogger(CrawlerManager.class);
    @Autowired
    private AdapterDatasetService adapterDatasetService;
@ -150,6 +151,85 @@ public class CrawlerManager {
        return true;
    }
    //TOADD 单个病人采集上传(包含blob)
    public Boolean collectProcess2(Patient patient) {
        if (!getDataForPrepare()) {
            logger.error("适配数据尚未准备");
            return false;
        }
        patient.setReUploadFlg(StringUtil.toString(false));
        logger.trace("采集->注册->打包上传,patient_id:" + patient.getPatientId() + ", event_no:" + patient.getEventNo());
        try {
            //getToken
            if (!dispatch.getToken()) {
                logger.error("token获取失败");
                return false;
            }
            //getRemoteVersion
//            if (!dispatch.getRemoteVersion(patient.getOrgCode())) {
//                logger.error("远程版本获取失败");
//                return false;
//            }
            if(StringUtil.isEmpty(sysConfig.getVersionMap().get(patient.getOrgCode()))){
                logger.error("版本获取失败");
                return false;
            }
            Map<String, AdapterDataSet> dataSetMap = new HashMap<>();
            Map<String, JsonNode> dataMap = new HashMap<>();
            if (adapterDataSetMap.containsKey(unstructured)){
                //TODO 非结构化档处理
            }else {
                //TODO 结构化档案处理
            }
            for (String key : adapterDataSetMap.keySet()) {
                /**
                 * 获取数据
                 */
                AdapterDataSet adapterDataSet = adapterDataSetMap.get(key);
                JsonNode jsonObject = dispatch.fecthData(patient, adapterDataSet);
                if (StringUtil.isEmpty(jsonObject)) {
                    continue;
                }
                dataSetMap.put(adapterDataSet.getAdapterDataSetT().getStdDatasetCode(), adapterDataSet);
                dataMap.put(key, jsonObject);
                /**
                 * 注册病人
                 */
                if (SysConfig.getInstance().getRegisterDataSet().equals(adapterDataSet.getAdapterDataSetT().getStdDatasetCode())) {
                    if (!StringUtil.isEmpty(jsonObject.get("data")) && !StringUtil.isEmpty(jsonObject.get("data").get(0))) {
                        if (!StringUtil.isEmpty(jsonObject.get("data").get(0).get(SysConfig.getInstance().getRegisterIdCardNo()))) {
                            logger.info("注册病人");
                            dispatch.register(patient, jsonObject.toString());
                        }
                    }
                }
            }
            logger.info("采集病人成功,patient_id:" + patient.getPatientId() + ", event_no:" + patient.getEventNo());
            //上传档案
            logger.info("上传病人档案");
            try {
                if (!CollectionUtil.isEmpty(dataMap.keySet())) {
                    if (!dispatch.upload(dataMap, patient, dataSetMap)) {
                        logger.error("上传档案失败");
                        return false;
                    }
                }
            } catch (Exception e) {
                logger.error("档案上传失败,patient_id:" + patient.getPatientId() + ", event_no:" + patient.getEventNo());
                return false;
            }
        } catch (Exception e) {
            logger.error("采集病人失败,patient_id:" + patient.getPatientId() + ", event_no:" + patient.getEventNo(), e);
            return false;
        }
        return true;
    }
    public Boolean getDataForPrepare() {
        if (adapterFlg) {
            return true;

+ 148 - 1
hos-rest/src/main/java/com/yihu/hos/rest/services/crawler/CrawlerService.java

@ -4,15 +4,20 @@ import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.node.ArrayNode;
import com.fasterxml.jackson.databind.node.ObjectNode;
import com.yihu.ehr.dbhelper.common.DBList;
import com.yihu.ehr.dbhelper.common.MongodbQuery;
import com.yihu.ehr.dbhelper.common.QueryCondition;
import com.yihu.ehr.dbhelper.common.QueryEntity;
import com.yihu.ehr.dbhelper.jdbc.DBHelper;
import com.yihu.hos.core.datatype.StringUtil;
import com.yihu.hos.rest.common.dao.CrawlerDatasetDao;
import com.yihu.hos.rest.common.dao.CrawlerFlowDao;
import com.yihu.hos.rest.common.dao.CrawlerFlowHeadDao;
import com.yihu.hos.rest.models.rs.DtoJobDataset;
import com.yihu.hos.rest.models.crawler.flow.CrawlerDataSetModel;
import com.yihu.hos.rest.models.crawler.flow.CrawlerFlowHeadModel;
import com.yihu.hos.rest.models.crawler.flow.CrawlerFlowModel;
import com.yihu.hos.rest.models.crawler.flow.resultModel.*;
import com.yihu.hos.rest.models.rs.DtoJobDataset;
import com.yihu.hos.rest.models.standard.adapter.AdapterDatasetModel;
import com.yihu.hos.rest.models.standard.adapter.AdapterMetadataModel;
import com.yihu.hos.rest.models.standard.adapter.AdapterSchemeVersionModel;
@ -26,12 +31,20 @@ import com.yihu.hos.rest.services.standard.adapter.AdapterSchemeVersionService;
import com.yihu.hos.web.framework.constrant.SqlConstants;
import com.yihu.hos.web.framework.model.ActionResult;
import com.yihu.hos.web.framework.model.DetailModelResult;
import com.yihu.hos.web.framework.util.GridFSUtil;
import net.sf.json.JSONArray;
import net.sf.json.JSONObject;
import org.apache.commons.lang3.StringUtils;
import org.bson.types.ObjectId;
import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Transactional;
import javax.annotation.Resource;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.sql.Blob;
import java.util.*;
@Transactional
@ -514,4 +527,138 @@ public class CrawlerService {
        }
        return DetailModelResult.error("获取已编排任务适配方案失败!");
    }
    public static DBList getMongoDBData(String page, String rows, String tableCode, String paramJson) throws Exception {
        MongodbQuery mdb = new MongodbQuery("mydb");
        QueryEntity qe = new QueryEntity(tableCode, Integer.valueOf(page), Integer.valueOf(rows));
        //设置参数
        //设置参数
        if (!org.springframework.util.StringUtils.isEmpty(paramJson) && !"{}".equals(paramJson)) {
            JSONArray ar = JSONArray.fromObject(paramJson);
            for (int i = 0; i < ar.size(); i++) {
                JSONObject jo = (JSONObject) ar.get(i);
                String andOr = String.valueOf(jo.get("andOr"));
                String field = String.valueOf(jo.get("field"));
                String cond = String.valueOf(jo.get("condition"));
                String value = String.valueOf(jo.get("value"));
                qe.addCondition(new QueryCondition(andOr, cond, field, value));
            }
        }
        DBList list = mdb.query(qe);
        return list;
    }
    public static void main(String[] args) {
//        try {
//        List<EhrCondition> queryParams = new ArrayList<>();
//        queryParams.add(new EhrCondition(" = ","_id","579091357a2b8f19b0000001" ));
//        ObjectMapper mapper = new ObjectMapper();
//        ObjectNode paramsNode = mapper.createObjectNode();
//        paramsNode.put("tableCode", "mydb");
//            paramsNode.put("condition", mapper.writeValueAsString(queryParams));
//
//            DBList document= getMongoDBData("1","10","fs.chunks","");
//            List<org.json.JSONObject> list=document.getList();
//            for (org.json.JSONObject jsonObject:list){
//                String name = UUID.randomUUID().toString()+".jpg";
//                Binary binary = (Binary) jsonObject.get("data");
//                byte type=binary.getType();
//                System.out.println(type);
//                byte[] fileByte= binary.getData();
//                FileUtil.writeFile("E:/test/mongoFile/"+name,fileByte,"utf-8");
//                System.out.println(fileByte);
//            }
//
//            System.out.println(document);
//        } catch (Exception e) {
//            e.printStackTrace();
//        }
//        testGetBlob();
        testFetch();
    }
    /**
     * TODO  采集测试,之后删除
     * @return
     */
    public static Map<String,List<org.json.JSONObject>> testFetch(){
        Map<String,List<org.json.JSONObject>> map=new HashMap<>();
        String tables[] ={"REPORT_MAIN","REPORT_THIN","REPORT_RELATION"};
        for (String table:tables){
            String sql ="select * from "+table;
            String config = "jdbc:oracle:thin:newhos/newhos@//172.19.103.71:1521/neworcl";
            DBHelper db = new DBHelper("blob",config);
//        db.dbType= DBType.Oracle;
            List<org.json.JSONObject> list = db.query(sql);
//        DBQuery  = new DBQuery("blob", config);
//        DBList dv = dbq.queryBySql(sql,1,10);
//        List<org.json.JSONObject> list2=dv.getList();
            for (org.json.JSONObject jsonObject:list){
                if ("REPORT_THIN".equals(table)){
                    Blob blob=(Blob)jsonObject.get("CONTENT");
                    String type= (String) jsonObject.get("REPORT_TYPE");
                    try {
                        ObjectId objectId= GridFSUtil.uploadFile("files", blob, type, null);
                        jsonObject.put("CONTENT",objectId);
//                        InputStream in=blob.getBinaryStream();
//                        String path="e:/test/blob/"+UUID.randomUUID().toString()+"."+type;
//                        writeFile(path,in);
                    } catch (Exception e) {
                        e.printStackTrace();
                    }
                    System.out.println(jsonObject.toString());
                }
            }
            map.put(table,list);
        }
        return map;
    }
    /**
     * TODO 文件采集测试 之后删除
     */
    public static void testGetBlob(){
        String sql ="select * from HOS_BLOB";
        String config = "jdbc:oracle:thin:newhos/newhos@//172.19.103.71:1521/neworcl";
        DBHelper db = new DBHelper("blob",config);
//        db.dbType= DBType.Oracle;
        List<org.json.JSONObject> list = db.query(sql);
//        DBQuery  = new DBQuery("blob", config);
//        DBList dv = dbq.queryBySql(sql,1,10);
//        List<org.json.JSONObject> list2=dv.getList();
        for (org.json.JSONObject jsonObject:list){
            Blob blob=(Blob)jsonObject.get("FILE_DATA");
            String type= (String) jsonObject.get("FILE_TYPE");
            System.out.println(blob);
            try {
                InputStream in=blob.getBinaryStream();
                String path="e:/test/blob/"+UUID.randomUUID().toString()+"."+type;
                writeFile(path,in);
            } catch (Exception e) {
                e.printStackTrace();
            }
            System.out.println(jsonObject.toString());
        }
    }
    public static boolean writeFile(String filePath, InputStream stream) throws IOException {
        File file = new File(filePath);
        if(!file.getParentFile().exists() && !file.getParentFile().mkdirs()) {
            return false;
        } else {
            FileOutputStream fileOutputStream = new FileOutputStream(file);
            byte[] bbuf = new byte[1024];
            int hasRead1;
            while((hasRead1 = stream.read(bbuf)) > 0) {
                fileOutputStream.write(bbuf, 0, hasRead1);
            }
            fileOutputStream.close();
            return true;
        }
    }
}

+ 108 - 0
hos-rest/src/main/java/com/yihu/hos/rest/services/crawler/DataCollectDispatcher.java

@ -27,6 +27,7 @@ import net.sf.json.JSONArray;
import net.sf.json.JSONObject;
import org.apache.http.NameValuePair;
import org.apache.http.message.BasicNameValuePair;
import org.bson.types.ObjectId;
import org.springframework.util.StringUtils;
import java.io.IOException;
@ -39,6 +40,9 @@ public class DataCollectDispatcher {
    private static DataCollectDispatcher ourInstance = new DataCollectDispatcher();
    private static Logger logger = LoggerFactory.getLogger(DataCollectDispatcher.class);
    private String token;
    private static String unstructured_main = "unstructured_main";
    private static String unstructured_thin = "unstructured_thin";
    private static String gridFsDB = "files";
    private DataCollectDispatcher() {
    }
@ -316,6 +320,9 @@ public class DataCollectDispatcher {
    public Boolean upload(Map<String, JsonNode> dataMap, Patient patient, Map<String, AdapterDataSet> dataSetMap) {
        Boolean result = true;
        Map<String, JsonNode> resultMap = new HashMap<>();
        Map<String, JsonNode> mainMap = new HashMap<>();
        Map<String, JsonNode> thinMap = new HashMap<>();
        try {
            DataSetTransformer dataTransformer = new DataSetTransformer();
            for (String key : dataMap.keySet()) {
@ -410,4 +417,105 @@ public class DataCollectDispatcher {
            return "";
        }
    }
    /* **********************************   add in 2016/11/15 ********************************************* */
    //TOADD 数据采集(包含blob)
    public JsonNode fecthData2(Patient patient, AdapterDataSet adapterDataSet) {
        try {
            ObjectMapper mapper = new ObjectMapper();
            List<EhrCondition> queryParams = new ArrayList<>();
            boolean patientId = true;
            if (adapterDataSet.isHavePatientID()) {
                AdapterMetaData adapterMetaData = adapterDataSet.getAdapterMetaDataMap().get(PatientIdentity.getPatientIDCode());
                queryParams.add(new EhrCondition(" = ", adapterMetaData.getAdapterMetadataModel().getStdMetadataCode(), patient.getPatientId()));
            } else {
                patientId = false;
            }
            boolean eventNo = true;
            if (adapterDataSet.isHaveEventNo()) {
                AdapterMetaData adapterMetaData = adapterDataSet.getAdapterMetaDataMap().get(adapterDataSet.getEventNoCode());
                queryParams.add(new EhrCondition(" = ", adapterMetaData.getAdapterMetadataModel().getStdMetadataCode(), patient.getEventNo()));
            } else {
                eventNo = false;
            }
            if (!patientId && !eventNo) {
                logger.error("采集病人数据集至少需要一项病人标识.数据集名:" + adapterDataSet.getAdapterDataSetT().getStdDatasetCode());
                return null;
            }
            ObjectNode paramsNode = mapper.createObjectNode();
            paramsNode.put("tableCode", adapterDataSet.getAdapterDataSetT().getStdDatasetCode());
            paramsNode.put("condition", mapper.writeValueAsString(queryParams));
            Map<String,String> formParams = new HashMap<>();
//            formParams.add(new BasicNameValuePair("secret", secret));
            formParams.put("api", "collectionData");
            formParams.put("param", mapper.writeValueAsString(paramsNode));
            //调用资源服务网关
            String rootStr = EsbHttp.getFecthData(formParams);
            JsonNode resultNode = mapper.readValue(rootStr, JsonNode.class);
            JsonNode result = resultNode.path("detailModelList");
            JsonNode data = matchAdapterData(result, adapterDataSet);
            ObjectNode jsonObject = mapper.createObjectNode();
            if (data != null && data.size() > 0) {
                jsonObject.set("data", data);
                jsonObject.put("code", adapterDataSet.getAdapterDataSetT().getStdDatasetCode());
                jsonObject.put("patient_id", patient.getPatientId());
                jsonObject.put("event_no", patient.getEventNo());
                String agencyCode = patient.getOrgCode();
                jsonObject.put("org_code", agencyCode);
                jsonObject.put("inner_version", SysConfig.getInstance().getVersionMap().get(patient.getOrgCode()));
                jsonObject.put("create_date", DateUtil.toString(new Date(), DateUtil.DEFAULT_YMDHMSDATE_FORMAT));
                jsonObject.put("event_time", patient.getReferenceTime());//DateUtil.toString(patient.getReferenceTime(), DateUtil.DEFAULT_YMDHMSDATE_FORMAT)
                if (StringUtil.isEmpty(patient.getReUploadFlg())) {
                    jsonObject.put("reUploadFlg", LogicValues.LOGIC_FALSE);
                } else {
                    jsonObject.put("reUploadFlg", patient.getReUploadFlg());
                }
                return jsonObject;
            } else {
                return null;
            }
        } catch (Exception e) {
            logger.error("", e);
        }
        return null;
    }
    /**
     * 生成非结构化索引文件document.json
     * @param dataTransformer
     * @param patient
     * @param fileName
     * @return
     */
    public boolean toDoucmentFile(DataSetTransformer dataTransformer, Patient patient, String fileName) {
        JsonNode jsonObject = dataTransformer.getJsonObject();
        PatientCDAIndex patientCDAIndex = new PatientCDAIndex(patient);
        String filePath = patientCDAIndex.createDataIndex(fileName, PatientCDAIndex.FileType.JSON);
        boolean writeFile = false;
        try {
            writeFile = FileUtil.writeFile(filePath, jsonObject.toString(), "UTF-8");
        } catch (IOException e) {
            logger.info("存储临时文件失败.");
            logger.error("", e);
        }
        return writeFile;
    }
    public List<ObjectId> getObjectIds(JsonNode jsonNode){
        List<ObjectId> objectIds = new ArrayList<>();
        if (jsonNode.isArray()){
            for (JsonNode data:jsonNode){
                objectIds.add(new ObjectId(data.get("CONTENT").toString()));
            }
        }
        return objectIds;
    }
}//end DataCollectDispatcher

+ 80 - 0
hos-rest/src/main/java/com/yihu/hos/rest/services/crawler/EsbHttp.java

@ -1,19 +1,27 @@
package com.yihu.hos.rest.services.crawler;
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.node.ObjectNode;
import com.yihu.hos.core.datatype.StringUtil;
import com.yihu.hos.core.encrypt.MD5;
import com.yihu.hos.core.http.HTTPResponse;
import com.yihu.hos.core.http.HttpClientKit;
import com.yihu.hos.core.log.Logger;
import com.yihu.hos.core.log.LoggerFactory;
import com.yihu.hos.rest.common.http.EHRHttpHelper;
import com.yihu.hos.rest.models.crawler.adapter.AdapterDataSet;
import com.yihu.hos.rest.models.crawler.config.SysConfig;
import com.yihu.hos.rest.models.crawler.patient.Patient;
import com.yihu.hos.rest.models.crawler.transform.EhrCondition;
import com.yihu.hos.web.framework.constrant.SqlConstants;
import org.apache.http.HttpStatus;
import org.json.JSONObject;
import sun.misc.BASE64Encoder;
import java.io.File;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.UUID;
@ -23,6 +31,7 @@ import java.util.UUID;
public class EsbHttp {
    private static Logger logger = LoggerFactory.getLogger(EsbHttp.class);
    public static String httpGateway;
    /***************************** 用户接口 *********************************************/
    /**
@ -284,4 +293,75 @@ public class EsbHttp {
        }
    }
    /**
     * 获取病人列表
     */
    public static String getPatientList(AdapterDataSet adapterDataSet, List<EhrCondition> queryParams) {
        try {
            ObjectMapper mapper = new ObjectMapper();
            ObjectNode paramsNode = mapper.createObjectNode();
            paramsNode.put("tableCode", adapterDataSet.getAdapterDataSetT().getStdDatasetCode());
            paramsNode.put("condition", mapper.writeValueAsString(queryParams));
            Map<String, String> formParams = new HashMap<>();
            formParams.put("api", "collectionData");
            String params = mapper.writeValueAsString(paramsNode);
            formParams.put("param", params);
            HTTPResponse response = HttpClientKit.post(httpGateway, formParams);
            if (response == null || response.getStatusCode() != 200) {
                logger.error("获取病人列表错误,请求HTTP错误,请检查配置或HTTP是否可用.");
                return "";
            }
            JsonNode responseNode = mapper.readValue(response.getBody(), JsonNode.class);
            String code = responseNode.path("responseCode").asText();
            if (StringUtil.isEmpty(code) || !code.equals("10000")) {
                logger.error("获取病人列表错误,请求HTTP错误,请检查集成平台网关是否可用.");
                return "";
            }
            String rootStr = responseNode.path("responseResult").asText();
            if ("".equals(rootStr)) {
                logger.error("获取病人列表错误,集成平台获取病人列表失败.");
                return "";
            }
            return rootStr;
        } catch (Exception e) {
            logger.error("获取病人列表失败!", e);
            return "";
        }
    }
    public static String getFecthData(Map<String, String> formParams) {
        try {
            HTTPResponse response = HttpClientKit.post(httpGateway, formParams);
            if (response == null || response.getStatusCode() != 200) {
                logger.info("获取病人数据错误,请求HTTP错误,请检查配置或HTTP是否可用.");
                return SqlConstants.EMPTY;
            }
            ObjectMapper mapper = new ObjectMapper();
            JsonNode responseNode = mapper.readValue(response.getBody(), JsonNode.class);
            String code = responseNode.path("responseCode").asText();
            if (StringUtil.isEmpty(code) || !code.equals("10000")) {
                logger.info("获取病人数据错误,请求HTTP错误,请检查集成平台网关是否可用.");
                return SqlConstants.EMPTY;
            }
            String rootStr = responseNode.path("responseResult").asText();
            if (SqlConstants.EMPTY.equals(rootStr)) {
                logger.info("获取病人数据错误,集成平台获取病人数据失败.");
                return SqlConstants.EMPTY;
            }
            return rootStr;
        } catch (Exception e) {
            logger.error("获取病人数据失败.", e);
            return SqlConstants.EMPTY;
        }
    }
}

+ 4 - 0
hos-rest/src/main/java/com/yihu/hos/rest/services/crawler/PatientCDAIndex.java

@ -66,6 +66,10 @@ public class PatientCDAIndex {
        return createIndex(IndexType.DATA + "/" + indexType, fileType);
    }
    public String getDocumentDirectory() {
        return getDirectory() + "/" + IndexType.DATA + "/" + IndexType.DOCUMENT;
    }
    public class FileType {
        public final static String XML = ".xml";
        public final static String JSON = ".json";

+ 12 - 0
hos-web-framework/pom.xml

@ -14,6 +14,18 @@
        <relativePath>../hos-web-framework-dependencies</relativePath>
    </parent>
    <dependencies>
        <dependency>
            <groupId>eu.medsea.mimeutil</groupId>
            <artifactId>mime-util</artifactId>
            <version>2.1.3</version>
            <exclusions>
                <exclusion>
                    <artifactId>slf4j-log4j12</artifactId>
                    <groupId>org.slf4j</groupId>
                </exclusion>
            </exclusions>
        </dependency>
        <dependency>
            <groupId>org.springframework.boot</groupId>
            <artifactId>spring-boot-starter-data-jpa</artifactId>

+ 453 - 0
hos-web-framework/src/main/java/com/yihu/hos/web/framework/util/GridFSUtil.java

@ -0,0 +1,453 @@
package com.yihu.hos.web.framework.util;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.mongodb.BasicDBObject;
import com.mongodb.client.FindIterable;
import com.mongodb.client.MongoCursor;
import com.mongodb.client.MongoDatabase;
import com.mongodb.client.gridfs.GridFSBucket;
import com.mongodb.client.gridfs.GridFSBuckets;
import com.mongodb.client.gridfs.GridFSFindIterable;
import com.mongodb.client.gridfs.GridFSUploadStream;
import com.mongodb.client.gridfs.model.GridFSFile;
import com.mongodb.client.gridfs.model.GridFSUploadOptions;
import com.mongodb.client.model.Filters;
import com.yihu.ehr.dbhelper.mongodb.MongodbFactory;
import com.yihu.ehr.dbhelper.mongodb.MongodbHelper;
import com.yihu.hos.core.file.FileUtil;
import eu.medsea.mimeutil.MimeUtil;
import org.bson.Document;
import org.bson.conversions.Bson;
import org.bson.types.ObjectId;
import org.json.JSONObject;
import java.io.ByteArrayOutputStream;
import java.io.File;
import java.io.IOException;
import java.sql.Blob;
import java.util.*;
/**
 *  MongoDB GridFS 操作类
 * @author HZY
 * @vsrsion 1.0
 * Created at 2016/7/21.
 */
public class GridFSUtil {
    public static final int defaultChunkSize = 1024 * 1024 * 4;
    public static ObjectId uploadFile(String dbName, Blob blob,String fileType,Map<String,Object> params) {
        //获取mongodb连接
//        MongodbHelper mongoOrigin = new MongodbHelper(dbName);
        //创建一个容器
        MongoDatabase db = MongodbFactory.getDB(dbName);
        GridFSBucket gridFS = GridFSBuckets.create(db);
        String fileName=UUID.randomUUID().toString()+"."+fileType;
        //自定义字段
        Document metaDocument= new Document();
        if ( params!=null && params.size()>0){
            for (Map.Entry<String, Object> entry : params.entrySet()) {
                String key=entry.getKey();
                metaDocument.append(key,entry.getValue());
            }
        }
        // Create some custom options
        GridFSUploadOptions gridFSUploadOptions = new GridFSUploadOptions()
                .chunkSizeBytes(defaultChunkSize).metadata(metaDocument);
        GridFSUploadStream uploadStream = gridFS.openUploadStream(fileName, gridFSUploadOptions);
        try {
            byte[] data = FileUtil.toByteArray(blob.getBinaryStream());
            uploadStream.write(data);
            ObjectId id = uploadStream.getFileId();
            if (id != null) {
                return id;
            }
        } catch (Exception e) {
            e.printStackTrace();
        } finally {
            if (uploadStream != null) {
                uploadStream.close();
            }
        }
        return null;
    }
    /**
     *  上传文件至Mongodb by GridFS
     *
     * @param dbName    数据库名
     * @param filePath  文件路径
     * @param params    自定义保存字段
     */
    public static boolean uploadFile(String dbName, String filePath,Map<String,Object> params) {
        //获取mongodb连接
//        MongodbHelper mongoOrigin = new MongodbHelper(dbName);
        //创建一个容器
        MongoDatabase db = MongodbFactory.getDB(dbName);
        GridFSBucket gridFS = GridFSBuckets.create(db);
        File readFile = new File(filePath);
        //自定义字段
        Document metaDocument= new Document();
        if ( params!=null && params.size()>0){
            for (Map.Entry<String, Object> entry : params.entrySet()) {
                String key=entry.getKey();
                metaDocument.append(key,entry.getValue());
            }
        }
        // Create some custom options
        GridFSUploadOptions gridFSUploadOptions = new GridFSUploadOptions()
                .chunkSizeBytes(defaultChunkSize).metadata(metaDocument);
        GridFSUploadStream uploadStream = gridFS.openUploadStream(readFile.getName(), gridFSUploadOptions);
        try {
            byte[] data = FileUtil.toByteArray(filePath);
            uploadStream.write(data);
            ObjectId id = uploadStream.getFileId();
            if (id != null) {
                return true;
            }
        } catch (IOException e) {
            e.printStackTrace();
        } finally {
            if (uploadStream != null) {
                uploadStream.close();
            }
        }
        return false;
    }
    /**
     * 从 mongodb GridFS 下载文件
     * @param dbName    数据库名
     * @param savePath 文件保存路径
     * @param objId        GridFS文件保存ObjectId
     * @return
     */
    public static String downFile(String dbName, String savePath, ObjectId objId) {
        //穿件mongodb连接
//        MongodbHelper mongoOrigin = new MongodbHelper(dbName);
        //创建一个容器
        MongoDatabase db = MongodbFactory.getDB(dbName);
        GridFSBucket gridFS = GridFSBuckets.create(db);
        ByteArrayOutputStream out = new ByteArrayOutputStream();
        gridFS.downloadToStream(objId, out);
        try {
            boolean succ = FileUtil.writeFile(savePath, out.toByteArray(), "utf-8");
            if (succ) {
                return savePath;
            } else {
                return null;
            }
        } catch (IOException e) {
            e.printStackTrace();
        } finally {
            if (out != null) {
                try {
                    out.close();
                } catch (IOException e) {
                    e.printStackTrace();
                }
            }
        }
        return null;
    }
    /**
     *  批量下载文件保存(根据 fs.files集合)
     * @param dbName    数据库名
     * @param savePath  文件保存的路径
     * @param fsFiles     fs.files
     * @return 以“,”分割的文件名
     */
    public static Map<String,StringBuffer> downFileList(String dbName, String savePath, List<GridFSFile> fsFiles) {
        StringBuffer stringBuffer = new StringBuffer();
        Map<String,String> fileNames= new HashMap<>();
        //穿件mongodb连接
//        MongodbHelper mongoOrigin = new MongodbHelper(dbName);
        //创建一个容器
        MongoDatabase db = MongodbFactory.getDB(dbName);
        GridFSBucket gridFS = GridFSBuckets.create(db);
        ByteArrayOutputStream out = new ByteArrayOutputStream();
        if (fsFiles!=null && fsFiles.size()>0){
            for (GridFSFile fsFile:fsFiles){
                ObjectId objId = fsFile.getObjectId();
                String fileType= fsFile.getFilename().substring(fsFile.getFilename().lastIndexOf("."));
                String newName = UUID.randomUUID().toString()+"."+fileType;
                gridFS.downloadToStream(objId, out);
                try {
                    boolean succ = FileUtil.writeFile(savePath+"/"+newName, out.toByteArray(), "utf-8");
                    if (succ) {
                        String type = getMimeType( out.toByteArray());
                        fileNames.put(newName,type);
//                        stringBuffer.append(newName).append(",");
                        continue;
                    } else {
                        break;
                    }
                } catch (IOException e) {
                    e.printStackTrace();
                    return null;
                } finally {
                    if (out != null) {
                        try {
                            out.close();
                        } catch (IOException e) {
                            e.printStackTrace();
                        }
                    }
                }
            }
            return groupDataMap(fileNames);
        }else {
            return null;
        }
    }
    public static Map<String,StringBuffer> downFilesByObjectIds(String dbName, String savePath, List<ObjectId> ids) {
        StringBuffer stringBuffer = new StringBuffer();
        Map<String,String> fileNames= new HashMap<>();
        //穿件mongodb连接
//        MongodbHelper mongoOrigin = new MongodbHelper(dbName);
        //创建一个容器
        MongoDatabase db = MongodbFactory.getDB(dbName);
        GridFSBucket gridFS = GridFSBuckets.create(db);
        ByteArrayOutputStream out = new ByteArrayOutputStream();
        List<GridFSFile> fsFiles = findFsFiles(dbName, ids);
        if (fsFiles!=null && fsFiles.size()>0){
            for (GridFSFile fsFile:fsFiles){
                ObjectId objId = fsFile.getObjectId();
                String fileType= fsFile.getFilename().substring(fsFile.getFilename().lastIndexOf("."));
                String newName = UUID.randomUUID().toString()+"."+fileType;
                gridFS.downloadToStream(objId, out);
                try {
                    boolean succ = FileUtil.writeFile(savePath+"/"+newName, out.toByteArray(), "utf-8");
                    if (succ) {
                        String type = getMimeType( out.toByteArray());
                        fileNames.put(newName,type);
//                        stringBuffer.append(newName).append(",");
                        continue;
                    } else {
                        break;
                    }
                } catch (IOException e) {
                    e.printStackTrace();
                    return null;
                } finally {
                    if (out != null) {
                        try {
                            out.close();
                        } catch (IOException e) {
                            e.printStackTrace();
                        }
                    }
                }
            }
            return groupDataMap(fileNames);
        }else {
            return null;
        }
    }
    /**
     *  删除 mongodb-GridFS文件
     * @param dbName
     * @param objId
     */
    public static void deleteFile(String dbName, ObjectId objId) {
//        MongodbHelper mongoOrigin = new MongodbHelper(dbName);
        MongoDatabase db = MongodbFactory.getDB(dbName);
        GridFSBucket gridFS = GridFSBuckets.create(db);
        gridFS.delete(objId);
    }
    /**
     * 查询fs.files 数据 in GridFS
     * @param dbName 数据库名
     * @param filters 查询条件
     * @return files集合
     */
    public static List<GridFSFile> findFiles(String dbName,Map<String,Object> filters){
        //穿件mongodb连接
        List<GridFSFile> list=new ArrayList<>();
        //创建一个容器
        MongoDatabase db = MongodbFactory.getDB(dbName);
        GridFSBucket gridFSBucket = GridFSBuckets.create(db);
        List<Bson> querys=new ArrayList<>();
        //添加查询条件
        if (filters!=null && filters.size()>0){
            for (Map.Entry<String, Object> entry : filters.entrySet()) {
                String key=entry.getKey();
                querys.add(Filters.eq(key, entry.getValue()));
            }
        }
//        GridFSFindIterable gsIterable=gridFSBucket.find(Filters.eq("metadata.cda_id", "111"));
        GridFSFindIterable gsIterable=gridFSBucket.find(Filters.and(querys));
       MongoCursor<GridFSFile> it= gsIterable.iterator();
        while (it.hasNext()){
            GridFSFile fsFile= it.next();
            list.add(fsFile);
        }
        return list;
    }
    /**
     *  根据ObjectID集合查询GridFS 文件列表
     * @param dbName 数据库名
     * @param ids     objectId集合
     * @return
     */
    public static List<GridFSFile> findFsFiles(String dbName,List<ObjectId> ids){
        //穿件mongodb连接
        List<GridFSFile> list=new ArrayList<>();
        //创建一个容器
        MongoDatabase db = MongodbFactory.getDB(dbName);
        GridFSBucket gridFSBucket = GridFSBuckets.create(db);
        List<Bson> querys=new ArrayList<>();
        //添加查询条件
        if (ids!=null && ids.size()>0){
            querys.add(Filters.in("_id", ids));
        }
        GridFSFindIterable gsIterable=null;
        if (querys.size()>0){
            gsIterable=gridFSBucket.find(Filters.and(querys));
        }else {
            gsIterable=gridFSBucket.find();
        }
        MongoCursor<GridFSFile> it= gsIterable.iterator();
        while (it.hasNext()){
            GridFSFile fsFile= it.next();
            list.add(fsFile);
        }
        return list;
    }
    public static Map parseJsonToMap(JSONObject condition) {
        ObjectMapper objectMapper = new ObjectMapper();
        HashMap map = new HashMap();
            try {
                Iterator fileNames = (Iterator) condition.names();
                while(fileNames.hasNext()) {
                    String fieldName = (String)fileNames.next();
                    Object valueNode = condition.get(fieldName);
                    if(valueNode instanceof Blob) {
                        map.put(fieldName, valueNode);
                    } else {
                        map.put(fieldName, condition.get(fieldName).toString());
                    }
                }
            } catch (Exception var7) {
                var7.printStackTrace();
            }
        return map;
    }
    //TODO  测试所用,后删除
    public static void tTestFind(String tableName, String collection, Map<String,Object> params){
        MongodbHelper mongodbHelper = new MongodbHelper(tableName);
        BasicDBObject basicDBObject=new BasicDBObject();
        for (Map.Entry<String, Object> entry : params.entrySet()) {
            String key=entry.getKey();
            basicDBObject.append(key,entry.getValue());
        }
        FindIterable<Document> documents=mongodbHelper.query(collection,basicDBObject, null, null);
        MongoCursor<Document> it= documents.iterator();
        while (it.hasNext()){
            Document document= it.next();
            List<Document> fileList= (List<Document>) document.get("files");
            Document jsonNode=fileList.get(0);
            List files= (List) jsonNode.get("files_id");
            List<GridFSFile> fs=findFsFiles("mydb",files);
            System.out.println(files.toString());
            String filePath="e:/test/";
            Map<String,StringBuffer> path=downFileList("mydb",filePath, fs);
        }
    }
    public static void main(String[] args) {
        try {
            //上传
            Map<String,Object> params= new HashMap<>();
//            params.put("cda_id","111");
//            params.put("report_id","1001");
            params.put("patient_id","1001");
            params.put("event_no","1001");
//            String fileName = "e:/test/肺2.jpg";
//            uploadFile("mydb", fileName,params);
            //下载
//            String filePath="e:/test/xiao_copy.jpg";
//            downFile("mydb",filePath,"579067939724e11514b2eead");
            //删除
//            deleteFile("mydb","57906e369724e11dd8e75798");
            //查询
//            List<GridFSFile> fsFiles  = findFiles("mydb",params);
            // 根据objectId集合 查询
//            List<ObjectId> list=new ArrayList<>();
//            list.add(new ObjectId("5791c9399724e12b9437b229"));
//            list.add(new ObjectId("5795aab07a2b9020ec19fdfa"));
//            List<GridFSFile> fs=findFsFiles("mydb",list);
            //测试查询类
            tTestFind("document","CDA_TEST",params);
            //批量下载
//            String filePath="e:/test/";
//            downFileList("mydb",filePath, fs);
        } catch (Exception e) {
            e.printStackTrace();
        }
    }
    /*************************************** MineType 工具类 *********************************/
    /**
     * 获取文件Mine-Type
     * @param file
     * @return
     */
    public static String getMimeType(File file) {
        MimeUtil.registerMimeDetector("eu.medsea.mimeutil.detector.MagicMimeMimeDetector");
        Collection<?> collection=MimeUtil.getMimeTypes(file);
        return collection.toString();
    }
    public static String getMimeType(byte[] bytes) {
        MimeUtil.registerMimeDetector("eu.medsea.mimeutil.detector.MagicMimeMimeDetector");
        Collection<?> collection=MimeUtil.getMimeTypes(bytes);
        return collection.toString();
    }
    /**
     * 非结构化档案--文件类型map生成
     * @param map
     * @return
     */
    public static Map<String, StringBuffer> groupDataMap(Map<String, String> map) {
        Map<String, StringBuffer> result = new HashMap<String, StringBuffer>();
        Iterator<String> rs=map.keySet().iterator();
        while (rs.hasNext()) {
            String key = rs.next();
            String value = map.get(key);
            if (result.containsKey(value)) {
                result.get(value).append(",").append(key);
            } else {
                result.put(value, new StringBuffer(key));
            }
        }
        return result;
    }
}

File diff suppressed because it is too large
+ 205 - 0
src/main/java/com/yihu/hos/common/CollectHelper.java


+ 273 - 0
src/main/java/com/yihu/hos/datacollect/service/DatacollectService.java

@ -24,6 +24,8 @@ import com.yihu.hos.resource.service.IStdService;
import com.yihu.hos.web.framework.constrant.DateConvert;
import com.yihu.hos.web.framework.constrant.SqlConstants;
import com.yihu.hos.web.framework.model.ActionResult;
import com.yihu.hos.web.framework.util.GridFSUtil;
import org.bson.types.ObjectId;
import org.dom4j.Document;
import org.dom4j.Element;
import org.dom4j.io.SAXReader;
@ -35,6 +37,7 @@ import org.springframework.transaction.annotation.Transactional;
import javax.annotation.Resource;
import java.io.ByteArrayInputStream;
import java.sql.Blob;
import java.text.ParseException;
import java.text.SimpleDateFormat;
import java.util.*;
@ -933,4 +936,274 @@ public class DatacollectService implements IDatacollectService {
        logger.info(message);
        return message;
    }
    /**
     * 采集入库(包含blob字段处理)
     * @return
     */
    private String intoMongodb2(List<JSONObject> list,String schemeVersion,String stdDatasetCode,JSONArray colList)
    {
        String patientIdCode = SqlConstants.PATIENT_ID.toUpperCase();
        String eventNoCode = SqlConstants.EVENT_NO.toUpperCase();
        PatientIdentity patientIdentity = SysConfig.getInstance().getPatientIdentity(stdDatasetCode);
        if (patientIdentity != null) {
            patientIdCode = patientIdentity.getPatientIDCode();
            eventNoCode = patientIdentity.getEventNoCode();
        }
        try{
            if(!mongo.createIndex(stdDatasetCode, "patientIndex", patientIdCode, eventNoCode)) {
                return "Mongodb索引创建失败!(表:"+stdDatasetCode+")";
            }
            if(list!=null && list.size()>0)
            {
                //TODO TOSET 判断是否是非结构化数据集
                if ("unstructured".equals(stdDatasetCode)){
                    for (JSONObject jsonObject:list) {
                        //文件内容保存到GridFS,细表内容字段保存为文件objctId
                        Blob blob = (Blob) jsonObject.get("CONTENT");
                        String type = (String) jsonObject.get("FILE_TYPE");
                        String patientId=  (String) jsonObject.get("patient_id");
                        String eventNo=  (String) jsonObject.get("event_no");
                        Map<String,Object> params = new HashMap<>();
                        params.put("patient_id",patientId);
                        params.put("event_no",eventNo);
                        try {
                            ObjectId objectId = GridFSUtil.uploadFile("files", blob, type, params);
                            jsonObject.put("CONTENT", objectId);
                        } catch (Exception e) {
                            e.printStackTrace();
                        }
                    }
                }
                //字典未转换前采集到原始库
                boolean b = mongoOrigin.insert(stdDatasetCode,translateDictCN(list, colList,schemeVersion));
                //字典转换
                list = translateDict(list, colList,schemeVersion);
                //采集到mongodb
                b = mongo.insert(stdDatasetCode,list);
                if(!b)
                {
                    if(mongo.errorMessage!=null && mongo.errorMessage.length()>0)
                    {
                        System.out.print(mongo.errorMessage);
                        return mongo.errorMessage;
                    }
                    else {
                        return "Mongodb保存失败!(表:"+stdDatasetCode+")";
                    }
                }
            }
        }
        catch (Exception e)
        {
            return e.getMessage();
        }
        return "";
    }
    /**
     * 数据库采集(包含Blob类型数据)
     * @param ds
     * @param schemeVersion
     * @param logId
     * @return
     * @throws Exception
     */
    private String collectBlobTable(DtoJobDataset ds,String schemeVersion,String logId) throws Exception
    {
        String message = "";
        String datasetId = ds.getJobDatasetId();
        String jobDatasetName = ds.getJobDatasetName();
        String condition=ds.getJobDatasetCondition();
        String key=ds.getJobDatasetKey();
        String keytype=ds.getJobDatasetKeytype();
        String keyvalue=ds.getJobDatasetKeyvalue();
        String orgCode = ds.getOrgCode();
        String datasourceId = ds.getDatasourceId();
        String config = ds.getConfig(); //数据库连接
        DBHelper db = new DBHelper(datasourceId,config);
        DBType dbType = db.dbType;
        //获取数据集映射
        List datasetString = stdService.getDatasetByScheme(schemeVersion, datasetId);
        JSONArray datasetList = new JSONArray(datasetString);
        if(datasetList!=null &&datasetList.length()>0)
        {
            String stdTableName = datasetList.getJSONObject(0).optString("stdDatasetCode");
            String adapterTableName = datasetList.getJSONObject(0).optString("adapterDatasetCode");
            //获取数据集字段映射结构
            List colString = stdService.getDatacolByScheme(schemeVersion,datasetId);
            JSONArray colList = new JSONArray(colString);
            if(colList!=null && colList.length()>0)
            {
                //拼接查询sql
                String strSql = "Select '" + orgCode +"' as RSCOM_ORG_CODE";
                for(int i=0; i< colList.length();i++)
                {
                    JSONObject col = colList.getJSONObject(i);
                    String adapterMetadataCode = col.optString("adapterMetadataCode");
                    if(adapterMetadataCode.length()>0)
                    {
                        strSql+= ","+adapterMetadataCode +" as " + col.optString("stdMetadataCode") ;
                    }
                }
                strSql += " from " +adapterTableName;
                String strWhere = " where 1=1";
                //采集范围
                if(condition!=null && condition.length()>0)
                {
                    strWhere += getConditionSql(dbType,condition);
                }
                //增量采集
                String maxKey = "0";
                if(key!=null && key.length()>0)
                {
                    maxKey = key;
                    if(keytype.toUpperCase().equals("DATE")) //时间类型
                    {
                        if(keyvalue!=null && keyvalue.length()>0) {
                            Date keyDate = new Date();
                            //字符串转时间
                            keyDate = DateConvert.toDate(keyvalue);
                            //根据数据库类型获取时间sql
                            strWhere += " and "+ maxKey + ">'"+getDateSqlByDBType(dbType,keyDate)+"'";
                        }
                    }
                    else if(keytype.toUpperCase().equals("VARCHAR")) //字符串类型
                    {
                        maxKey = getToNumberSqlByDBType(dbType,key);
                        if(keyvalue!=null && keyvalue.length()>0) {
                            strWhere += " and "+ maxKey + ">'" + keyvalue + "'";
                        }
                    }
                    else{
                        if(keyvalue!=null && keyvalue.length()>0) {
                            strWhere += " and "+ maxKey + ">'" + keyvalue + "'";
                        }
                    }
                    strWhere += " order by " + maxKey;
                }
                strSql += strWhere;
                //总条数
                String sqlCount = "select count(1) as COUNT from (" + strSql+")";
                String sqlMax = "select max(" + maxKey + ") as MAX_KEYVALUE from " + adapterTableName + strWhere;
                JSONObject objCount = db.load(sqlCount);
                if(objCount==null)
                {
                    if(db.errorMessage.length()>0)
                    {
                        throw new Exception(db.errorMessage);
                    }
                    else{
                        throw new Exception("查询异常:"+sqlCount);
                    }
                }
                else{
                    int count = objCount.getInt("COUNT");
                    if(count==0) //0条记录,无需采集
                    {
                        message = "0条记录,无需采集。";
                    }
                    else
                    {
                        //获取最大值
                        JSONObject objMax = db.load(sqlMax);
                        int successCount = 0;
                        String maxKeyvalue = objMax.optString("MAX_KEYVALUE");
                        //修改最大值
                        if(maxKeyvalue!=null&& maxKeyvalue.length()>0)
                        {
                            datacollectLogDao.updateJobDatasetKeyvalue(ds.getId(),maxKeyvalue);
                            logger.info("修改任务数据集最大值为"+maxKeyvalue+"。"); //文本日志
                        }
                        int countPage = 1;
                        if(count > maxNum) //分页采集
                        {
                            countPage = count/maxNum+1;
                        }
                        for(int i=0;i<countPage;i++)
                        {
                            int rows = maxNum;
                            if(i+1==countPage){
                                rows = count-i*maxNum;
                            }
                            String sql = getPageSqlByDBType(dbType,strSql,i*maxNum,rows); //获取分页sql语句
                            RsJobLogDetail detail = new RsJobLogDetail();
                            detail.setStartTime(new Date());
                            detail.setJobLogId(logId);
                            detail.setDatasourceId(datasourceId);
                            detail.setConfig(config);
                            detail.setStdDatasetCode(stdTableName);
                            detail.setJobDatasetId(datasetId);
                            detail.setJobDatasetName(ds.getJobDatasetName());
                            detail.setJobId(ds.getJobId());
                            detail.setJobSql(sql);
                            detail.setJobNum(i+1);
                            detail.setJobDatasetRows(rows);
                            detail.setSchemeVersion(schemeVersion);
                            List<JSONObject> list = db.query(sql);
                            String msg = "";
                            if(list!=null)
                            {
                                msg = intoMongodb2(list,schemeVersion,stdTableName,colList); //返回信息
                            }
                            else{
                                if(db.errorMessage.length()>0)
                                {
                                    msg = db.errorMessage;
                                }
                                else{
                                    msg = "查询数据为空!";
                                }
                            }
                            if(msg.length()>0)
                            {
                                //任务日志细表异常操作
                                detail.setJobStatus("0");
                                detail.setJobContent(msg);
                            }
                            else{
                                detail.setJobStatus("1");
                                detail.setJobContent("采集成功!");
                                successCount += rows;
                            }
                            detail.setEndTime(new Date());
                            datacollectLogDao.saveEntity(detail);
                        }
                        message = jobDatasetName + "采集成功"+successCount+"条数据,总条数"+count+"条。";
                    }
                }
            }
            else
            {
                throw new Exception(jobDatasetName + "数据集字段映射为空!");
            }
        }
        else{
            throw new Exception(jobDatasetName + "数据集映射为空!");
        }
        return message;
    }
    
}

+ 32 - 18
src/main/java/com/yihu/hos/monitor/service/ServerMonitorService.java

@ -21,7 +21,9 @@ import java.util.*;
@Service("ServerMonitorService")
public class ServerMonitorService {
    public static final String BEAN_ID = "ServerMonitorService";
    public static final String envHealth = "envHealth";
    public static final String MONITOR = "monitor";
    public static final String SERVER = "server";
    public static final String HOST = "host";
    @Autowired
    private MongoOperations mongoOperations;
@ -29,11 +31,12 @@ public class ServerMonitorService {
    @Autowired
    private Mongo mongo;
    public Result getMonitorList(String table, String type, String beginTime, String endTime) {
        mongoOperations = new MongoTemplate(mongo, envHealth);
        DBCollection envCollection = mongoOperations.getCollection(table);
    public Result getMonitorList(String host, String type, String beginTime, String endTime) {
        mongoOperations = new MongoTemplate(mongo, MONITOR);
        DBCollection envCollection = mongoOperations.getCollection(SERVER);
        BasicDBObject queryObject = new BasicDBObject().append(QueryOperators.AND,
                new BasicDBObject[]{
                        new BasicDBObject().append("host",host),
                        new BasicDBObject().append("type",type),
                        new BasicDBObject().append("create_time",
                                new BasicDBObject().append(QueryOperators.GTE, DateUtil.toTimestamp(beginTime))),
@ -54,10 +57,11 @@ public class ServerMonitorService {
    }
    public Result getMonitorDetail(String host, String type, String date) {
        mongoOperations = new MongoTemplate(mongo, envHealth);
        DBCollection envCollection = mongoOperations.getCollection(host);
        mongoOperations = new MongoTemplate(mongo, MONITOR);
        DBCollection envCollection = mongoOperations.getCollection(SERVER);
        BasicDBObject queryObject = new BasicDBObject().append(QueryOperators.AND,
                new BasicDBObject[]{
                        new BasicDBObject().append("host", host),
                        new BasicDBObject().append("type", type),
                        new BasicDBObject().append("create_time",new BasicDBObject().append(QueryOperators.LTE, DateUtil.toTimestamp(date)))});
@ -86,34 +90,44 @@ public class ServerMonitorService {
    }
    public Result getHosts() {
        mongoOperations = new MongoTemplate(mongo, envHealth);
        Set<String> collections = mongoOperations.getCollectionNames();
        List<Map<String,Object>> result = new ArrayList<>();
        Iterator<String> iterator = collections.iterator();
        while (iterator.hasNext()){
            String host = iterator.next();
        mongoOperations = new MongoTemplate(mongo, MONITOR);
        DBCollection envCollection = mongoOperations.getCollection(HOST);
        DBCursor cursor = envCollection.find();
        while(cursor.hasNext()) {
            DBObject dbObject = cursor.next();
            Map<String,Object> hostMap = new HashMap<>();
            hostMap.put("host",host);
            hostMap.put("host",dbObject.get("host"));
            hostMap.put("name",dbObject.get("name"));
            result.add(hostMap);
            hostMap=null;
        }
        ActionResult actionResult = new ActionResult();
        actionResult.setData(result);
        return actionResult;
    }
    /**
     * 獲取服務器列表 树
     * @return
     * @throws Exception
     */
    public Result getServerTreeList() throws Exception {
        List<TreeView> treeList = new ArrayList<>();
        mongoOperations = new MongoTemplate(mongo, envHealth);
        Set<String> collections = mongoOperations.getCollectionNames();
        for (String host : collections) {
        mongoOperations = new MongoTemplate(mongo, MONITOR);
        DBCollection envCollection = mongoOperations.getCollection(HOST);
        DBCursor cursor = envCollection.find();
        while(cursor.hasNext()) {
            DBObject dbObject = cursor.next();
            TreeView rootTree = new TreeView();
            rootTree.setId("host" + host);
            rootTree.setId(dbObject.get("host").toString());
            rootTree.setPid("-1");
            rootTree.setText(host);
            rootTree.setText(dbObject.get("name").toString()+"/"+dbObject.get("host").toString());
            treeList.add(rootTree);
        }
        JSONArray jsonArray = new JSONArray(treeList);
        JSONArray jsonArray = new JSONArray(treeList);
        return Result.success(jsonArray.toString());
    }

+ 3 - 3
src/main/webapp/WEB-INF/ehr/jsp/monitor/server/sEnvManage.jsp

@ -16,7 +16,7 @@
    .div-item{width:40%;float: left;}
    .d-item{font-size: 12px;display: inline-block;width: 100px;}
    .c-content{font-size: 16px;display: inline-block;width: 100px;font-weight: bold;}
    .div-right-item{font-size: 12px;width: 150px;}
    .div-right-item{font-size: 12px;width: 200px;}
</style>
<!-- ####### 页面部分 ####### -->
<div id="div_wrapper">
@ -24,7 +24,7 @@
    <div position="left"  style="margin-left:10px;margin-top:10px;">
        <input type="hidden" id="nowHost">
        <ul    style="margin-left:30px;margin-top:10px;font-size: 17px;">服务器列表</ul>
        <ul id="div_wrapper_left_ul_resourcetree" class="m-snav"  style="margin-left:20px;margin-top:40px;"></ul>
        <ul id="div_wrapper_left_ul_resourcetree" class="m-snav"  style="margin-top:40px;"></ul>
    </div>
    <div position="center" style="margin-left:10px;margin-top:10px;margin-right:10px;">
        <!-- ####### 查询条件部分 ####### -->
@ -55,7 +55,7 @@
            <div class="c-item" data-item="net">网络</div>
        </div>
            <%-- echarts 数据--%>
        <div id="main" style="width: 80%;height:400px;"></div>
        <div id="main" style="width: 60%;height:400px;"></div>
            <%-- 详细数据 --%>
            <div id="detail" class="ml50 mt20">

+ 5 - 5
src/main/webapp/WEB-INF/ehr/jsp/monitor/server/sEnvManageJs.jsp

@ -87,7 +87,7 @@
            async: false,
            success: function (msg) {
                var data = JSON.parse(msg.message);
                host = data[0].text;
                host = data[0].id;
                if (msg.successFlg) {
                    // 初始化树形菜单
                    serviceTree = serviceTree.ligerTree({
@ -98,7 +98,7 @@
                        isExpand: false,
                        onClick: function (obj) {
                            //初始化图表
                            host = obj.data.text;
                            host = obj.data.id;
                            var beginTime = $("#repeatStartTime").ligerDateEditor("getValue");
                            var endTime = $("#repeatEndTime").ligerDateEditor("getValue");
                            if(beginTime=="" && $endTime=="")
@ -122,7 +122,7 @@
                        },
                        onSuccess: function (data) {
                            alert(2222);
                            host = data[0].text;
                            host = data[0].id;
                        }
                    });
                }
@ -656,13 +656,13 @@
                "                        <div class=\"d-item\">发送</div>" +
                "                    </div>" +
                "                    <div class=\"mb20\">\n" +
                "                        <div class=\"c-content\">"+envData.txbps+"kbps</div>" +
                "                        <div class=\"c-content\">"+envData.txbps+" kbps</div>" +
                "                    </div>\n" +
                "                    <div>\n" +
                "                        <div class=\"d-item\">接收</div>\n" +
                "                    </div>\n" +
                "                    <div class=\"mb20\">\n" +
                "                        <div class=\"c-content\">"+envData.rxbps+"kbps</div>\n" +
                "                        <div class=\"c-content\">"+envData.rxbps+" kbps</div>\n" +
                "                    </div>\n" +
                "                </div>\n" +
                "                <div class=\"div-item\">\n" +