Browse Source

Merge branch 'master' of http://192.168.1.220:10080/esb/esb

llh 9 years ago
parent
commit
0bc82722fa

+ 66 - 4
Hos-resource/src/main/java/com/yihu/ehr/crawler/controller/CrawlerController.java

@ -7,11 +7,13 @@ import com.yihu.ehr.framework.model.ActionResult;
import com.yihu.ehr.framework.model.DetailModelResult;
import com.yihu.ehr.framework.model.Result;
import com.yihu.ehr.framework.util.operator.StringUtil;
import com.yihu.ehr.standard.service.adapter.AdapterSchemeService;
import io.swagger.annotations.Api;
import io.swagger.annotations.ApiOperation;
import io.swagger.annotations.ApiParam;
import org.springframework.stereotype.Controller;
import org.springframework.web.bind.annotation.*;
import org.springframework.ui.Model;
import javax.annotation.Resource;
@ -20,14 +22,51 @@ import javax.annotation.Resource;
 * <p>
 * Created by Airhead on 2015/12/16.
 */
@RestController
@RequestMapping("/crawler")
@Controller("crawlerController")
@Api(protocols = "http", value = "CrawlerController", description = "档案采集接口", tags = {"采集"})
public class CrawlerController {
    @Resource
    CrawlerService crawlerService;
    @Resource(name = AdapterSchemeService.BEAN_ID)
    private AdapterSchemeService adapterSchemeService;
    /*
任务编排
*/
    @RequestMapping("jobLayout")
    public String jobLayout(Model model) {
        model.addAttribute("contentPage", "/crawler/jobLayout");
        return "partView";
    }
    /*
   任务编排数据映射
    */
    @RequestMapping("jobDataMapping")
    public String jobDataMapping(Model model, String jobId) {
        try {
            model.addAttribute("contentPage", "/crawler/dataMapping");
            return "pageView";
        } catch (Exception ex) {
            model.addAttribute("contentPage", "/crawler/dataMapping");
            return "pageView";
        }
    }
    @RequestMapping("datasetDetail")
    public String datasetDetail(Model model,Integer schemeId,String schemeNm,String version,String checkedRowsIndex) {
        model.addAttribute("contentPage", "/crawler/datasetDetail");
        model.addAttribute("schemeId",schemeId);
        model.addAttribute("version",version);
        model.addAttribute("schemeNm",schemeNm);
        model.addAttribute("checkedRowsIndex",checkedRowsIndex);
        return "pageView";
    }
    @RequestMapping(value = "patient", method = RequestMethod.POST)
    @ApiOperation(value = "采集病人健康档案", produces = "application/json", notes = "采集病人健康档案")
    public Result crawler(
@ -52,6 +91,7 @@ public class CrawlerController {
     */
    @RequestMapping(value = "saveDateSet", method = RequestMethod.POST)
    @ApiOperation(value = "保存任务编排", produces = "application/json", notes = "保存任务编排")
    @ResponseBody
    public Result saveJobData(
            @ApiParam(name = "job", value = "任务编排信息", required = true)
            @RequestParam(value = "job") String jobInfo) {
@ -79,7 +119,7 @@ public class CrawlerController {
            @RequestParam(value = "name", required = false) String name
    ) {
        try {
            return crawlerService.getSchemeDataset(versionId);
            return crawlerService.getSchemeDataset(versionId,name);
        } catch (Exception e) {
            e.printStackTrace();
            return new DetailModelResult();
@ -107,7 +147,19 @@ public class CrawlerController {
        }
    }
    @RequestMapping("getRelations")
    @ResponseBody
    public String  getRelations(
            @RequestParam(value = "versionId", required = true) Integer versionId,
            @RequestParam(value = "datasetIdStr", required = false) String datasetIdStr
    ) {
        try {
            return crawlerService.getRelations(versionId,datasetIdStr);
        } catch (Exception e) {
            e.printStackTrace();
            return null;
        }
    }
    /**
@ -115,6 +167,7 @@ public class CrawlerController {
     */
    @RequestMapping(value = "deleteDateSet", method = RequestMethod.POST)
    @ApiOperation(value = "删除任务编排", produces = "application/json", notes = "删除任务编排")
    @ResponseBody
    public Result deleteJobData(
            @ApiParam(name = "version", value = "版本号", required = true)
            @RequestParam(value = "version") String version){
@ -135,6 +188,7 @@ public class CrawlerController {
     */
    @RequestMapping(value = "list",method = RequestMethod.POST)
    @ApiOperation(value = "分页显示任务编排", produces = "application/json", notes = "分页显示任务编排")
    @ResponseBody
    public DetailModelResult listJobData(
            @ApiParam(name = "rows", value = "Limit the size of result set. Must be an integer")
            @RequestParam(value = "rows", required = false) Integer rows,
@ -144,4 +198,12 @@ public class CrawlerController {
            return crawlerService.getDataSetResult(rows, page);
    }
    @RequestMapping(value = "getSchemeList",method = RequestMethod.POST)
    @ApiOperation(value = "获取适配方案-方案版本下拉框", produces = "application/json", notes = "获取适配方案-方案版本下拉框")
    @ResponseBody
    public DetailModelResult getSchemeList(){
        DetailModelResult result=adapterSchemeService.getAdapterSchemeResultModelList();
        return result;
    }
}

+ 5 - 2
Hos-resource/src/main/java/com/yihu/ehr/crawler/dao/CrawlerDatasetDao.java

@ -2,6 +2,7 @@ package com.yihu.ehr.crawler.dao;
import com.yihu.ehr.crawler.model.flow.CrawlerDataSetModel;
import com.yihu.ehr.framework.common.dao.SQLGeneralDAO;
import org.hibernate.Query;
import org.springframework.stereotype.Repository;
import java.util.List;
@ -20,7 +21,9 @@ public class CrawlerDatasetDao extends SQLGeneralDAO {
    }
    public void deleteCrawlerDatasetList(Integer versionId) throws Exception {
        String sql = "delete from crawler_dataset where scheme_version_id='" + versionId + "'";
        super.execute(sql);
        String sql = "delete from crawler_dataset where scheme_version_id= :scheme_version_id";
        Query query = getCurrentSession().createSQLQuery(sql);
        query.setInteger("scheme_version_id", versionId);
        query.executeUpdate();
    }
}

+ 4 - 2
Hos-resource/src/main/java/com/yihu/ehr/crawler/dao/CrawlerFlowHeadDao.java

@ -16,8 +16,10 @@ public class CrawlerFlowHeadDao extends SQLGeneralDAO {
    public static final String BEAN_ID = "CrawlerFlowHeadDao";
    public void deleteCrawlerFlowHeadList(Integer versionId) throws Exception {
        String sql = "delete from crawler_flow_head where scheme_version_id='" + versionId + "'";
        super.execute(sql);
        String sql = "delete from crawler_flow_head where scheme_version_id= :scheme_version_id";
        Query query = getCurrentSession().createSQLQuery(sql);
        query.setInteger("scheme_version_id", versionId);
        query.executeUpdate();
    }
    public List<CrawlerFlowHeadModel> getCrawlerFlowHeadList(Integer versionId) {

+ 47 - 0
Hos-resource/src/main/java/com/yihu/ehr/crawler/model/flow/resultModel/MappingDataset.java

@ -0,0 +1,47 @@
package com.yihu.ehr.crawler.model.flow.resultModel;
import java.util.List;
/**
 * @author HZY
 * @vsrsion 1.0
 * Created at 2016/4/29.
 */
public class MappingDataset {
    private Integer id;
    private String code;
    private String name;
    private List<MappingMetadata> data;
    public List<MappingMetadata> getData() {
        return data;
    }
    public void setData(List<MappingMetadata> data) {
        this.data = data;
    }
    public Integer getId() {
        return id;
    }
    public void setId(Integer id) {
        this.id = id;
    }
    public String getCode() {
        return code;
    }
    public void setCode(String code) {
        this.code = code;
    }
    public String getName() {
        return name;
    }
    public void setName(String name) {
        this.name = name;
    }
}

+ 36 - 0
Hos-resource/src/main/java/com/yihu/ehr/crawler/model/flow/resultModel/MappingMetadata.java

@ -0,0 +1,36 @@
package com.yihu.ehr.crawler.model.flow.resultModel;
/**
 * @author HZY
 * @vsrsion 1.0
 * Created at 2016/4/29.
 */
public class MappingMetadata {
    private Integer id;
    private String code;
    private String name;
    public Integer getId() {
        return id;
    }
    public void setId(Integer id) {
        this.id = id;
    }
    public String getCode() {
        return code;
    }
    public void setCode(String code) {
        this.code = code;
    }
    public String getName() {
        return name;
    }
    public void setName(String name) {
        this.name = name;
    }
}

+ 101 - 25
Hos-resource/src/main/java/com/yihu/ehr/crawler/service/CrawlerService.java

@ -6,26 +6,25 @@ import com.yihu.ehr.crawler.dao.CrawlerFlowHeadDao;
import com.yihu.ehr.crawler.model.flow.CrawlerDataSetModel;
import com.yihu.ehr.crawler.model.flow.CrawlerFlowHeadModel;
import com.yihu.ehr.crawler.model.flow.CrawlerFlowModel;
import com.yihu.ehr.crawler.model.flow.resultModel.CrawlerDatasetResultDetailModel;
import com.yihu.ehr.crawler.model.flow.resultModel.FlowEntrance;
import com.yihu.ehr.crawler.model.flow.resultModel.FlowLines;
import com.yihu.ehr.crawler.model.flow.resultModel.FlowMapping;
import com.yihu.ehr.crawler.model.flow.resultModel.*;
import com.yihu.ehr.framework.constrant.Constants;
import com.yihu.ehr.framework.model.ActionResult;
import com.yihu.ehr.framework.model.DetailModelResult;
import com.yihu.ehr.framework.util.operator.StringUtil;
import com.yihu.ehr.standard.model.adapter.AdapterDatasetModel;
import com.yihu.ehr.standard.model.adapter.AdapterMetadataModel;
import com.yihu.ehr.standard.model.adapter.AdapterSchemeVersionModel;
import com.yihu.ehr.standard.service.adapter.AdapterDatasetService;
import com.yihu.ehr.standard.service.adapter.AdapterMetadataService;
import com.yihu.ehr.standard.service.adapter.AdapterSchemeVersionService;
import com.yihu.ehr.standard.service.bo.AdapterVersion;
import net.sf.json.JSONArray;
import net.sf.json.JSONObject;
import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Transactional;
import javax.annotation.Resource;
import java.util.ArrayList;
import java.util.List;
import java.util.Objects;
import java.util.*;
@Transactional
@Service("CrawlerService")
@ -43,6 +42,8 @@ public class CrawlerService {
    private CrawlerFlowDao crawlerFlowDao;
    @Resource(name = CrawlerFlowHeadDao.BEAN_ID)
    private CrawlerFlowHeadDao crawlerFlowHeadDao;
    @Resource(name = AdapterMetadataService.BEAN_ID)
    private AdapterMetadataService adapterMetadataService;
    /**
     * 保存编排映射关系
@ -101,10 +102,12 @@ public class CrawlerService {
     * @return
     * @throws Exception
     */
    public DetailModelResult getSchemeDataset(Integer schemeVersionId) throws Exception {
    public DetailModelResult getSchemeDataset(Integer schemeVersionId,String datasetName) throws Exception {
        AdapterSchemeVersionModel versionModel = (AdapterSchemeVersionModel) adapterSchemeVersionService.get(schemeVersionId);
        //获取适配数据集总和
        List<AdapterDatasetModel> adapterDatasetModelList = adapterDatasetService.getDatasetList(AdapterDatasetModel.class, versionModel.getVersion(), null, null, null, null);
        Map<String,Object> map = new HashMap<String,Object>();
        map.put("name", datasetName);
        List<AdapterDatasetModel> adapterDatasetModelList = adapterDatasetService.getDatasetList(AdapterDatasetModel.class, versionModel.getVersion(), net.sf.json.JSONObject.fromObject(map).toString(), null, null, null);
        //获取编排数据集
        List<CrawlerDataSetModel> crawlerDataset = crawlerDatasetDao.getCrawlerDatasetList(versionModel.getId());
        DetailModelResult re = new DetailModelResult();
@ -172,22 +175,6 @@ public class CrawlerService {
        }
    }
    /**
     * 获取适配方案映射数据
     *
     * @param schemeVersionId
     * @return
     */
    public FlowMapping getFlowMappingDataset(Integer schemeVersionId) {
        FlowMapping flowMapping = new FlowMapping();
        List<FlowEntrance> entrances = getFlowEntrances(schemeVersionId);
        List<FlowLines> lines = getFlowLines(schemeVersionId);
        flowMapping.setSchemeVersionId(schemeVersionId);
        flowMapping.setEntrances(entrances);
        flowMapping.setLines(lines);
        return flowMapping;
    }
    public List<FlowEntrance> getFlowEntrances(Integer schemeVersionId) {
        List<FlowEntrance> entrances = new ArrayList<>();
        List<CrawlerFlowHeadModel> modelList = crawlerFlowHeadDao.getCrawlerFlowHeadList(schemeVersionId);
@ -266,7 +253,96 @@ public class CrawlerService {
            e.printStackTrace();
            return DetailModelResult.error("获取数据集失败");
        }
    }
    /**
     * 获取编排已选择的适配数据集
     *
     * @param schemeVersionId
     * @param datasetIdStr
     * @return
     * @throws Exception
     */
    public List<MappingDataset> getSchemeDatasetByChecked(Integer schemeVersionId, String datasetIdStr) throws Exception {
        AdapterSchemeVersionModel versionModel = (AdapterSchemeVersionModel) adapterSchemeVersionService.get(schemeVersionId);
        //获取适配数据集总和
        AdapterVersion adapterVersion = new AdapterVersion(versionModel.getVersion());
        List<AdapterDatasetModel> adapterDatasetModelList = new ArrayList<>();
        if (datasetIdStr != null && !"".equals(datasetIdStr)) {
            String[] datasetIdList = datasetIdStr.split(",");
            List<Integer> newDatasetIdList = new ArrayList<>();
            for (String datasetId : datasetIdList) {
                if (!StringUtil.isStrEmpty(datasetId)) {
                    Integer newDatasetId = Integer.parseInt(datasetId);
                    newDatasetIdList.add(newDatasetId);
                }
            }
            adapterDatasetModelList = adapterDatasetService.getListByAdapterDatasetIdList(adapterVersion, newDatasetIdList);
        } else {
            adapterDatasetModelList = adapterDatasetService.getDatasetList(AdapterDatasetModel.class, versionModel.getVersion(), null, null, null, null);
        }
        //获取编排数据集
        List<CrawlerDataSetModel> crawlerDataset = crawlerDatasetDao.getCrawlerDatasetList(versionModel.getId());
        List<MappingDataset> list = new ArrayList<>();
        for (AdapterDatasetModel datasetModel : adapterDatasetModelList) {
            MappingDataset obj = new MappingDataset();
            if (crawlerDataset != null && crawlerDataset.size() > 0) {
                for (CrawlerDataSetModel cDataSet : crawlerDataset) {
                    if (cDataSet.getDatasetId().equals(datasetModel.getAdapterDatasetId())) {
                        List<MappingMetadata> metadatas = getMappingMetaDatasByDataset(versionModel.getVersion(), datasetModel.getAdapterDatasetId());
                        obj.setId(datasetModel.getStdDatasetId());
                        obj.setCode(datasetModel.getStdDatasetCode());
                        obj.setName(datasetModel.getStdDatasetName());
                        obj.setData(metadatas);
                        break;
                    }
                }
            }
            list.add(obj);
        }
        return list;
    }
    /**
     * 返回前端映射数据元信息
     *
     * @param adapterVersion 适配版本号
     * @param dataSetId      适配数据集ID
     * @return
     */
    public List<MappingMetadata> getMappingMetaDatasByDataset(String adapterVersion, Integer dataSetId) {
        List<AdapterMetadataModel> adapterMetadataModels = adapterMetadataService.getAdapterMetadataByDataset(adapterVersion, dataSetId);
        List<MappingMetadata> resultList = new ArrayList<>();
        if (adapterMetadataModels != null && adapterMetadataModels.size() > 0) {
            for (AdapterMetadataModel metadataModel : adapterMetadataModels) {
                MappingMetadata metadata = new MappingMetadata();
                metadata.setId(metadataModel.getAdapterMetadataId());
                metadata.setCode(metadataModel.getAdapterMetadataCode());
                metadata.setName(metadataModel.getAdapterMetadataName());
                resultList.add(metadata);
            }
        }
        return resultList;
    }
    /**
     * 获取适配方案映射数据
     *
     * @param schemeVersionId
     * @param datasetIdStr
     * @return
     */
    public String getRelations(Integer schemeVersionId, String datasetIdStr) {
        JSONObject jsonObject = new JSONObject();
        try {
            List<MappingDataset> datasets = getSchemeDatasetByChecked(schemeVersionId, datasetIdStr);
            List<FlowLines> lines = getFlowLines(schemeVersionId);
            jsonObject.put("tables", datasets);
            jsonObject.put("rels", lines);
        } catch (Exception e) {
            e.printStackTrace();
        }
        return jsonObject.toString();
    }
}

+ 9 - 0
Hos-resource/src/main/java/com/yihu/ehr/standard/service/adapter/AdapterMetadataService.java

@ -381,4 +381,13 @@ public class AdapterMetadataService extends SQLGeneralDAO {
            return Result.error("删除适配数据元失败");
        }
    }
    public List getAdapterMetadataByDataset(String adapterVersion,Integer datasetId){
        Map<String,Object> map = new HashMap<String,Object>();
        map.put("adapterDatasetId", datasetId);
        String tableName="adapter_dataset_"+adapterVersion;
        List<AdapterMetadataModel> metadataModelList = getList(AdapterMetadataModel.class,tableName,  net.sf.json.JSONObject.fromObject(map).toString(), null, null, null);
        return metadataModelList;
    }
}