瀏覽代碼

Merge branch 'master' of chenyj/esb into master

esb 9 年之前
父節點
當前提交
1275cec369

+ 23 - 1
Hos-resource/src/main/java/com/yihu/ehr/crawler/controller/CrawlerController.java

@ -2,17 +2,19 @@ package com.yihu.ehr.crawler.controller;
import com.yihu.ehr.crawler.model.patient.Patient;
import com.yihu.ehr.crawler.service.CrawlerManager;
import com.yihu.ehr.crawler.service.JobArrangeManager;
import com.yihu.ehr.framework.model.Result;
import com.yihu.ehr.framework.util.operator.StringUtil;
import io.swagger.annotations.Api;
import io.swagger.annotations.ApiOperation;
import io.swagger.annotations.ApiParam;
import net.sf.json.JSONObject;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestMethod;
import org.springframework.web.bind.annotation.RequestParam;
import org.springframework.web.bind.annotation.RestController;
import javax.annotation.Resource;
/**
 * 目前版本只需要采集,上传和注册病人档案就可了。
@ -24,6 +26,9 @@ import org.springframework.web.bind.annotation.RestController;
@Api(protocols = "http", value = "CrawlerController", description = "档案采集接口", tags = {"采集"})
public class CrawlerController {
    @Resource
    JobArrangeManager jobArrangeManager;
    @RequestMapping(value = "patient", method = RequestMethod.POST)
    @ApiOperation(value = "采集病人健康档案", produces = "application/json", notes = "采集病人健康档案")
    public Result crawler(
@ -42,4 +47,21 @@ public class CrawlerController {
            return Result.error("参数转换病人实体失败");
        }
    }
    /**
     *保存任务编排数据
     */
    @RequestMapping(value = "job", method = RequestMethod.POST)
    @ApiOperation(value = "保存任务编排", produces = "application/json", notes = "保存任务编排")
    public Result saveJobData(
            @ApiParam(name = "job", value = "任务编排信息", required = true)
            @RequestParam(value = "job", required = true) String jobInfo) {
        try {
            jobArrangeManager.saveJobData(jobInfo);
        } catch (Exception e) {
            e.printStackTrace();
            return Result.error("保存失败");
        }
        return Result.success("保存成功");
    }
}

+ 14 - 0
Hos-resource/src/main/java/com/yihu/ehr/crawler/dao/CrawlerDao.java

@ -3,6 +3,7 @@ package com.yihu.ehr.crawler.dao;
import com.yihu.ehr.crawler.dao.intf.ICrawlerDao;
import com.yihu.ehr.crawler.model.flow.CrawlerDataSetModel;
import com.yihu.ehr.framework.common.dao.SQLGeneralDAO;
import com.yihu.ehr.standard.model.adapter.AdapterSchemeVersionModel;
import org.springframework.stereotype.Repository;
import java.util.List;
@ -30,4 +31,17 @@ public class CrawlerDao extends SQLGeneralDAO implements ICrawlerDao {
        String sql = "delete from crawler_flow_head where scheme_version_id='"+versionId+"'";
        super.execute(sql);
    }
    /**
     * 通过适配方案和版本号获取适配ID
     * @param schemeId
     * @param version
     * @return
     */
    public AdapterSchemeVersionModel getIdBySchemeAndVersion(String schemeId, String version) throws Exception {
        String sql = " select * from adapter_scheme_version o where 1=1 and o.scheme_id = '" + schemeId + "' and o.version = '"
                + version + "'";
        return super.queryObjBySql(sql, AdapterSchemeVersionModel.class);
    }
}

+ 64 - 0
Hos-resource/src/main/java/com/yihu/ehr/crawler/service/JobArrangeManager.java

@ -0,0 +1,64 @@
package com.yihu.ehr.crawler.service;
import com.yihu.ehr.crawler.dao.CrawlerDao;
import com.yihu.ehr.crawler.model.flow.CrawlerDataSetModel;
import com.yihu.ehr.standard.model.adapter.AdapterSchemeVersionModel;
import net.sf.json.JSONArray;
import net.sf.json.JSONObject;
import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Transactional;
import javax.annotation.Resource;
/**
 * @author Yingjie Chen
 * @version $$Revision$$
 * @date 2016/4/27
 */
@Service
public class JobArrangeManager {
    @Resource
    CrawlerDao crawlerDao;
    /**
     * 保存任务编排数据
     * @param json
     * @return
     * @throws Exception
     */
    @Transactional
    public void saveJobData(String json) throws Exception {
        JSONArray jsonList = JSONArray.fromObject(json);
        for(Object item : jsonList)
        {
            JSONObject obj = JSONObject.fromObject(item);
            String schemeId = obj.getString("schemeId");
            String version = obj.getString("version");
            AdapterSchemeVersionModel versionModel = crawlerDao.getIdBySchemeAndVersion(schemeId, version);
            JSONArray dataSets = obj.getJSONArray("dataSets");
            //保存数据集
            for (Object o : dataSets) {
                JSONObject dataSet =  JSONObject.fromObject(o);
                String datasetId = dataSet.getString("datasetId");
                String datasetCode = dataSet.getString("datasetCode");
                String datasetName = dataSet.getString("datasetName");
                CrawlerDataSetModel dataSetModel = new CrawlerDataSetModel();
                dataSetModel.setSchemeVersionId(versionModel.getId());
                dataSetModel.setSchemeId(Integer.valueOf(schemeId));
                dataSetModel.setDatasetId(Integer.valueOf(datasetId));
                dataSetModel.setDatasetCode(datasetCode);
                dataSetModel.setDatasetName(datasetName);
                crawlerDao.saveOrUpdateEntity(dataSetModel);
            }
            if (obj.containsKey("lines") && obj.containsKey("entrances")) {
                // TODO: 2016/4/27  保存编排关系
//                 saveDataSetRelation(schemeId, version, json);
            }
        }
    }
}  

+ 2 - 1
Hos-resource/src/main/resources/hbm/resource/CrawlerDataSet.hbm.xml

@ -5,7 +5,8 @@
    <class name="com.yihu.ehr.crawler.model.flow.CrawlerDataSetModel" table="crawler_dataset">
        <composite-id>
            <key-property name="schemeVersionId" column="scheme_version_id"/>
            <key-property name="datasetCode" column="dataset_id"/>
            <key-property name="datasetId" column="dataset_id"/>
            <key-property name="schemeId" column="scheme_id"/>
        </composite-id>
        <property name="datasetCode">