Procházet zdrojové kódy

任务编排 删除接口 分页显示接口

Ezreal před 9 roky
rodič
revize
deece9c276

+ 21 - 11
Hos-resource/src/main/java/com/yihu/ehr/crawler/controller/CrawlerController.java

@ -3,15 +3,15 @@ package com.yihu.ehr.crawler.controller;
import com.yihu.ehr.crawler.model.patient.Patient;
import com.yihu.ehr.crawler.service.CrawlerManager;
import com.yihu.ehr.crawler.service.CrawlerService;
import com.yihu.ehr.framework.model.ActionResult;
import com.yihu.ehr.framework.model.DataGridResult;
import com.yihu.ehr.framework.model.DetailModelResult;
import com.yihu.ehr.framework.model.Result;
import com.yihu.ehr.framework.util.operator.StringUtil;
import io.swagger.annotations.Api;
import io.swagger.annotations.ApiOperation;
import io.swagger.annotations.ApiParam;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestMethod;
import org.springframework.web.bind.annotation.RequestParam;
import org.springframework.web.bind.annotation.RestController;
import org.springframework.web.bind.annotation.*;
import javax.annotation.Resource;
@ -51,7 +51,7 @@ public class CrawlerController {
    /**
     *保存任务编排数据
     */
    @RequestMapping(value = "job", method = RequestMethod.POST)
    @RequestMapping(value = "saveDateSet", method = RequestMethod.POST)
    @ApiOperation(value = "保存任务编排", produces = "application/json", notes = "保存任务编排")
    public Result saveJobData(
            @ApiParam(name = "job", value = "任务编排信息", required = true)
@ -114,15 +114,13 @@ public class CrawlerController {
    /**
     * 删除任务编排数据
     */
    @RequestMapping(value = "delete", method = RequestMethod.POST)
    @RequestMapping(value = "deleteDateSet", method = RequestMethod.POST)
    @ApiOperation(value = "删除任务编排", produces = "application/json", notes = "删除任务编排")
    public Result deleteJobData(
            @ApiParam(name = "version", value = "版本号", required = true)
            @RequestParam(value = "version") Integer version,
            @ApiParam(name = "schemeId",value = "适配方案名", required = true)
            @RequestParam(value = "schemeId") Integer schemeId) {
        if (version != null && schemeId !=null) {
            String message =jobArrangeManager.deleteJobData(version, schemeId);
            @RequestParam(value = "version") String version){
        if (version != null) {
            String message =crawlerService.deleteJobData(version);
            if (StringUtil.isEmpty(message)){
                return Result.success("删除成功");
            } else {
@ -133,5 +131,17 @@ public class CrawlerController {
        }
    }
    /**
     * 分页显示任务编排数据
     */
    @RequestMapping(value = "list",method = RequestMethod.POST)
    @ApiOperation(value = "分页显示任务编排", produces = "application/json", notes = "分页显示任务编排")
    public DataGridResult listJobData(
            @ApiParam(name = "rows", value = "Limit the size of result set. Must be an integer")
            @RequestParam(value = "rows", required = false) Integer rows,
            @ApiParam(name = "page", value = "Start position of result set. Must be an integer")
            @RequestParam(value = "page", required = false) Integer page) {
        return null;
    }
}

+ 5 - 3
Hos-resource/src/main/java/com/yihu/ehr/crawler/dao/CrawlerFlowDao.java

@ -1,10 +1,7 @@
package com.yihu.ehr.crawler.dao;
import com.yihu.ehr.crawler.model.flow.CrawlerDataSetModel;
import com.yihu.ehr.crawler.model.flow.CrawlerFlowHeadModel;
import com.yihu.ehr.crawler.model.flow.CrawlerFlowModel;
import com.yihu.ehr.framework.common.dao.SQLGeneralDAO;
import com.yihu.ehr.standard.model.adapter.AdapterSchemeVersionModel;
import org.hibernate.Query;
import org.springframework.stereotype.Repository;
@ -25,4 +22,9 @@ public class CrawlerFlowDao extends SQLGeneralDAO {
        return modelList;
    }
    public void deleteCrawlerFlowList(Integer versionId)throws Exception  {
        String sql = "delete from crawler_flow where scheme_version_id='"+versionId+"'";
        super.execute(sql);
    }
}

+ 25 - 1
Hos-resource/src/main/java/com/yihu/ehr/crawler/service/CrawlerService.java

@ -10,6 +10,7 @@ import com.yihu.ehr.crawler.model.flow.resultModel.CrawlerDatasetResultDetailMod
import com.yihu.ehr.crawler.model.flow.resultModel.FlowEntrance;
import com.yihu.ehr.crawler.model.flow.resultModel.FlowLines;
import com.yihu.ehr.crawler.model.flow.resultModel.FlowMapping;
import com.yihu.ehr.framework.constrant.Constants;
import com.yihu.ehr.framework.model.ActionResult;
import com.yihu.ehr.framework.model.DetailModelResult;
import com.yihu.ehr.standard.model.adapter.AdapterDatasetModel;
@ -169,7 +170,6 @@ public class CrawlerService {
        }
    }
    /**
     * 获取适配方案映射数据
     *
@ -213,4 +213,28 @@ public class CrawlerService {
    }
    /**
     * 删除编排数据
     * @param version
     */
    @Transactional
    public String  deleteJobData(String version) {
        AdapterSchemeVersionModel versionModel =  adapterSchemeVersionService.getByVersion(version);
        if (versionModel == null || versionModel.getId() == null) {
            return "删除失败";
        }
        //删除对应表记录
        try {
            crawlerDatasetDao.deleteCrawlerDatasetList(versionModel.getId());
            crawlerFlowHeadDao.deleteCrawlerFlowHeadList(versionModel.getId());
            crawlerFlowDao.deleteCrawlerFlowList(versionModel.getId());
        } catch (Exception e) {
            e.printStackTrace();
            return "删除失败";
        }
        return Constants.EMPTY;
    }
}

+ 0 - 85
Hos-resource/src/main/java/com/yihu/ehr/crawler/service/JobArrangeManager.java

@ -1,85 +0,0 @@
package com.yihu.ehr.crawler.service;
import com.yihu.ehr.crawler.dao.CrawlerDao;
import com.yihu.ehr.crawler.model.flow.CrawlerDataSetModel;
import com.yihu.ehr.framework.constrant.Constants;
import com.yihu.ehr.standard.model.adapter.AdapterSchemeVersionModel;
import com.yihu.ehr.standard.service.adapter.AdapterSchemeService;
import net.sf.json.JSONArray;
import net.sf.json.JSONObject;
import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Transactional;
import javax.annotation.Resource;
/**
 * @author Yingjie Chen
 * @version $$Revision$$
 * @date 2016/4/27
 */
@Service
public class JobArrangeManager {
    @Resource
    CrawlerDao crawlerDao;
    @Resource
    AdapterSchemeService adapterSchemeService;
    /**
     * 保存任务编排数据
     * @param json
     * @return
     * @throws Exception
     */
    @Transactional
    public void saveJobData(String json) throws Exception {
        JSONArray jsonList = JSONArray.fromObject(json);
        for(Object item : jsonList)
        {
            JSONObject obj = JSONObject.fromObject(item);
            String schemeId = obj.getString("schemeId");
            String version = obj.getString("version");
            AdapterSchemeVersionModel versionModel = crawlerDao.getIdBySchemeAndVersion(schemeId, version);
            JSONArray dataSets = obj.getJSONArray("dataSets");
            //保存数据集
            for (Object o : dataSets) {
                JSONObject dataSet =  JSONObject.fromObject(o);
                String datasetId = dataSet.getString("datasetId");
                String datasetCode = dataSet.getString("datasetCode");
                String datasetName = dataSet.getString("datasetName");
                CrawlerDataSetModel dataSetModel = new CrawlerDataSetModel();
                dataSetModel.setSchemeVersionId(versionModel.getId());
                dataSetModel.setSchemeId(Integer.valueOf(schemeId));
                dataSetModel.setDatasetId(Integer.valueOf(datasetId));
                dataSetModel.setDatasetCode(datasetCode);
                dataSetModel.setDatasetName(datasetName);
                crawlerDao.saveOrUpdateEntity(dataSetModel);
            }
            if (obj.containsKey("relation")) {
                //保存编排关系
                adapterSchemeService.saveDataSetRelation(Integer.valueOf(schemeId), version, obj.getString("relation"));
            }
        }
    }
    /**
     * 删除编排数据
     * @param version
     * @param schemeId
     */
    @Transactional
    public String  deleteJobData(Integer version, Integer schemeId) {
        try {
            AdapterSchemeVersionModel versionModel =  crawlerDao.getIdBySchemeAndVersion(String.valueOf(schemeId), String.valueOf(version));
            System.out.println("versionModel = " + versionModel.getId());
        } catch (Exception e) {
            e.printStackTrace();
            return "删除失败!";
        }
        return Constants.EMPTY;
    }
}