浏览代码

修改任务编排保存问题

llh 9 年之前
父节点
当前提交
ab79e106cc

+ 6 - 2
Hos-resource/src/main/java/com/yihu/ehr/crawler/controller/CrawlerController.java

@ -100,9 +100,13 @@ public class CrawlerController {
    @ResponseBody
    public Result saveJobData(
            @ApiParam(name = "job", value = "任务编排信息", required = true)
            @RequestParam(value = "job") String jobInfo) {
            @RequestParam(value = "job") String jobInfo,
            @ApiParam(name = "rows",value = "当前行数", required = true)
            @RequestParam(value = "rows") Integer rows,
            @ApiParam(name = "page", value = "当前页数", required = true)
            @RequestParam(value = "page") Integer page) {
        try {
            crawlerService.saveJobData(jobInfo);
            crawlerService.saveJobData(jobInfo, rows, page);
        } catch (Exception e) {
            e.printStackTrace();
            return Result.error("保存失败");

+ 10 - 0
Hos-resource/src/main/java/com/yihu/ehr/crawler/model/flow/CrawlerFlowModel.java

@ -11,6 +11,8 @@ import java.io.Serializable;
 */
public class CrawlerFlowModel extends Result implements Serializable {
    private Integer id;
    private Integer schemeVersionId;
    private String datasetCode;
@ -25,6 +27,14 @@ public class CrawlerFlowModel extends Result implements Serializable {
    private String inputDefaultValue;
    public Integer getId() {
        return id;
    }
    public void setId(Integer id) {
        this.id = id;
    }
    public Integer getSchemeVersionId() {
        return schemeVersionId;
    }

+ 57 - 41
Hos-resource/src/main/java/com/yihu/ehr/crawler/service/CrawlerService.java

@ -156,52 +156,60 @@ public class CrawlerService {
     * 保存任务编排数据
     *
     * @param json
     * @return
     * @param rows
     *@param page @return
     * @throws Exception
     */
    public void saveJobData(String json) throws Exception {
    public void saveJobData(String json, Integer rows, Integer page) throws Exception {
        JSONArray jsonList = JSONArray.fromObject(json);
        //清空当页数据
        deleteCurrentPage(rows, page);
        for (Object item : jsonList) {
            JSONObject obj = JSONObject.fromObject(item);
            String schemeId = obj.getString("schemeId");
            String versionId = obj.getString("versionId");
            AdapterSchemeVersionModel versionModel = (AdapterSchemeVersionModel) adapterSchemeVersionService.get(Integer.valueOf(versionId));
            AdapterVersion adapterVersion = new AdapterVersion(versionModel.getVersion());
            //删除已存在的数据集
            crawlerDatasetDao.deleteCrawlerDatasetList(versionModel.getId());
            List<AdapterDatasetModel> adapterDatasetModelList = new ArrayList<>();
            //根据id字符串获取编排数据集
            if (obj.containsKey("dataSets")) {
                List<Integer> newDatasetIdList = new ArrayList<>();
                String dataSetStr = obj.getString("dataSets");
                if (StringUtils.isNotBlank(dataSetStr)) {
                    String[] IdList =  dataSetStr.split(",");
                    for (String aIdList : IdList) {
                        if (!Objects.equals(aIdList, "")) {
                            Integer DaSetId = Integer.valueOf(aIdList);
                            newDatasetIdList.add(DaSetId);
            if (obj.containsKey("schemeId") && obj.containsKey("versionId")) {
                String schemeId = obj.getString("schemeId");
                String versionId = obj.getString("versionId");
                AdapterSchemeVersionModel versionModel = (AdapterSchemeVersionModel) adapterSchemeVersionService.get(Integer.valueOf(versionId));
                if (versionModel != null) {
                    AdapterVersion adapterVersion = new AdapterVersion(versionModel.getVersion());
                    //删除已存在的数据集
                    crawlerDatasetDao.deleteCrawlerDatasetList(versionModel.getId());
                    List<AdapterDatasetModel> adapterDatasetModelList = new ArrayList<>();
                    //根据id字符串获取编排数据集
                    if (obj.containsKey("dataSets")) {
                        List<Integer> newDatasetIdList = new ArrayList<>();
                        String dataSetStr = obj.getString("dataSets");
                        if (StringUtils.isNotBlank(dataSetStr)) {
                            String[] IdList =  dataSetStr.split(",");
                            for (String aIdList : IdList) {
                                if (!Objects.equals(aIdList, "")) {
                                    Integer DaSetId = Integer.valueOf(aIdList);
                                    newDatasetIdList.add(DaSetId);
                                }
                            }
                        }
                        adapterDatasetModelList = adapterDatasetService.getListByAdapterDatasetIdList(adapterVersion, newDatasetIdList);
                        for (AdapterDatasetModel model : adapterDatasetModelList) {
                            CrawlerDataSetModel dataSetModel = new CrawlerDataSetModel();
                            dataSetModel.setSchemeId(Integer.valueOf(schemeId));
                            dataSetModel.setSchemeVersionId(versionModel.getId());
                            dataSetModel.setDatasetId(model.getId());
                            dataSetModel.setDatasetCode(model.getStdDatasetCode());
                            dataSetModel.setDatasetName(model.getStdDatasetName());
                            crawlerDatasetDao.saveEntity(dataSetModel);
                        }
                    }
                }
                adapterDatasetModelList = adapterDatasetService.getListByAdapterDatasetIdList(adapterVersion, newDatasetIdList);
                for (AdapterDatasetModel model : adapterDatasetModelList) {
                    CrawlerDataSetModel dataSetModel = new CrawlerDataSetModel();
                    dataSetModel.setSchemeId(Integer.valueOf(schemeId));
                    dataSetModel.setSchemeVersionId(versionModel.getId());
                    dataSetModel.setDatasetId(model.getId());
                    dataSetModel.setDatasetCode(model.getStdDatasetCode());
                    dataSetModel.setDatasetName(model.getStdDatasetName());
                    crawlerDatasetDao.saveEntity(dataSetModel);
                    //如果保存传入编排映射关系,进行保存操作
                    if (obj.containsKey("relation") && !Objects.equals(obj.getString("relation"), "")) {
                        saveDataSetRelation(versionId,obj.getString("relation"));
                    }
                }
            }
            //如果保存传入编排映射关系,进行保存操作
            if (obj.containsKey("relation") && !Objects.equals(obj.getString("relation"), "")) {
                saveDataSetRelation(versionId,obj.getString("relation"));
            }
        }
    }
@ -240,13 +248,14 @@ public class CrawlerService {
     */
    @Transactional
    public String  deleteJobData(String version) {
        AdapterSchemeVersionModel versionModel = (AdapterSchemeVersionModel) adapterSchemeVersionService.get(Integer.valueOf(version));
        if (versionModel == null || versionModel.getId() == null) {
            return "删除失败";
        }
        //删除对应表记录
        try {
            AdapterSchemeVersionModel versionModel = (AdapterSchemeVersionModel) adapterSchemeVersionService.get(Integer.valueOf(version));
            if (versionModel == null || versionModel.getId() == null) {
                return "删除失败";
            }
            //删除对应表记录
            crawlerDatasetDao.deleteCrawlerDatasetList(versionModel.getId());
            crawlerFlowHeadDao.deleteCrawlerFlowHeadList(versionModel.getId());
            crawlerFlowDao.deleteCrawlerFlowList(versionModel.getId());
@ -279,7 +288,7 @@ public class CrawlerService {
            Integer total = crawlerDatasetDao.getTotalRows();
            List list = crawlerDatasetDao.queryListBySql(stringBuffer.toString());
            List<Map<String, Object>>list = crawlerDatasetDao.queryListBySql(stringBuffer.toString());
            DetailModelResult detailModelResult = DetailModelResult.success("获取数据集成功");
            detailModelResult.setDetailModelList(list);
            detailModelResult.setTotalCount(total);
@ -399,5 +408,12 @@ public class CrawlerService {
        lineCache.put(schemeVersionId,line);
    }
    public void deleteCurrentPage(Integer rows, Integer page) {
        DetailModelResult currentResut = getDataSetResult(rows,page);
        List<Map<String, Object>> list = currentResut.getDetailModelList();
        for (Map<String, Object> map : list) {
            String version = String.valueOf(map.get("scheme_version_id"));
            deleteJobData(version);
        }
    }
}

+ 12 - 5
Hos-resource/src/main/resources/hbm/resource/CrawlerFlow.hbm.xml

@ -3,11 +3,18 @@
"http://hibernate.sourceforge.net/hibernate-mapping-3.0.dtd">
<hibernate-mapping>
    <class name="com.yihu.ehr.crawler.model.flow.CrawlerFlowModel" table="crawler_flow">
        <composite-id>
            <key-property name="schemeVersionId" column="scheme_version_id"/>
            <key-property name="datasetCode" column="dataset_code"/>
            <key-property name="metadataCode" column="metadata_code"/>
        </composite-id>
        <id name="id" column="id">
            <generator class="increment"/>
        </id>
        <property name="schemeVersionId">
            <column name="scheme_version_id"/>
        </property>
        <property name="datasetCode">
            <column name="dataset_code"/>
        </property>
        <property name="metadataCode">
            <column name="metadata_code"/>
        </property>
        <property name="inputDatasetCode">
            <column name="input_dataset_code"/>
        </property>

+ 4 - 3
Hos-resource/src/main/webapp/WEB-INF/ehr/jsp/crawler/jobLayoutJs.jsp

@ -193,7 +193,6 @@
        bindEvents: function () {
            var me = this;
            $('#btnAdd').click(function () {
                me.gridScrollTop();//1、将滚动条滚动到底部
                var dataRow = {
                    status:'',
                    scheme_id:'',
@ -212,6 +211,7 @@
                $(".sel-scheme-name#schemeNm"+rowData.length).trigger("change");
                me.cacheDatasetIdList.push("");
                me.cacheDatasetCodeList.push("");
                me.gridScrollTop();//1、将滚动条滚动到底部
            });
            $('#btnSave').click(function(){
                
@ -219,12 +219,13 @@
                    return false;
                }
                var resultArr =  me.getGridData();//获取表格数据
                var option = me.grid.options;
                $.ajax({
                    url: "${contextRoot}/crawler/saveDateSet",
                    type: "post",
                    dataType: "json",
                    data:{job: JSON.stringify(resultArr)},
                    data:{job: JSON.stringify(resultArr),rows:option.pageSize,page:option.page},
                    success: function (data) {
                        
                        if(data.successFlg){
@ -271,7 +272,7 @@
            return temp;
        },
        gridScrollTop:function(){
            var div = $('#div_job_grid')[0],
            var div = $('#div_job_grid .l-grid-body2')[0],
                    scrollHeight = div.scrollHeight;
    
            if( div.scrollTop + div.clientHeight >= div.scrollHeight ){//滚动条已滑动到底部