Quellcode durchsuchen

Merge branch 'master' of chenyj/esb into master

esb vor 9 Jahren
Ursprung
Commit
c167fdacf0

Datei-Diff unterdrückt, da er zu groß ist
+ 502 - 576
.idea/workspace.xml


+ 6 - 6
Hos-resource/src/main/java/com/yihu/ehr/crawler/controller/CrawlerController.java

@ -4,7 +4,6 @@ import com.yihu.ehr.crawler.model.patient.Patient;
import com.yihu.ehr.crawler.service.CrawlerManager;
import com.yihu.ehr.crawler.service.CrawlerService;
import com.yihu.ehr.framework.model.ActionResult;
import com.yihu.ehr.framework.model.DataGridResult;
import com.yihu.ehr.framework.model.DetailModelResult;
import com.yihu.ehr.framework.model.Result;
import com.yihu.ehr.framework.util.operator.StringUtil;
@ -33,7 +32,7 @@ public class CrawlerController {
    @ApiOperation(value = "采集病人健康档案", produces = "application/json", notes = "采集病人健康档案")
    public Result crawler(
            @ApiParam(name = "patient", value = "病人索引信息", required = true)
            @RequestParam(value = "patient", required = true) String patientInfo) {
            @RequestParam(value = "patient") String patientInfo) {
        Patient patient = CrawlerManager.getInstance().parsePatient(patientInfo);
        if (patient != null) {
@ -55,7 +54,7 @@ public class CrawlerController {
    @ApiOperation(value = "保存任务编排", produces = "application/json", notes = "保存任务编排")
    public Result saveJobData(
            @ApiParam(name = "job", value = "任务编排信息", required = true)
            @RequestParam(value = "job", required = true) String jobInfo) {
            @RequestParam(value = "job") String jobInfo) {
        try {
            crawlerService.saveJobData(jobInfo);
        } catch (Exception e) {
@ -136,12 +135,13 @@ public class CrawlerController {
     */
    @RequestMapping(value = "list",method = RequestMethod.POST)
    @ApiOperation(value = "分页显示任务编排", produces = "application/json", notes = "分页显示任务编排")
    public DataGridResult listJobData(
    public DetailModelResult listJobData(
            @ApiParam(name = "rows", value = "Limit the size of result set. Must be an integer")
            @RequestParam(value = "rows", required = false) Integer rows,
            @ApiParam(name = "page", value = "Start position of result set. Must be an integer")
            @RequestParam(value = "page", required = false) Integer page) {
        return null;
            @RequestParam(value = "page", required = false) Integer page) throws Exception {
            return crawlerService.getDataSetResult(rows, page);
    }
}

+ 39 - 7
Hos-resource/src/main/java/com/yihu/ehr/crawler/service/CrawlerService.java

@ -25,6 +25,7 @@ import org.springframework.transaction.annotation.Transactional;
import javax.annotation.Resource;
import java.util.ArrayList;
import java.util.List;
import java.util.Objects;
@Transactional
@Service("CrawlerService")
@ -147,8 +148,10 @@ public class CrawlerService {
            String schemeId = obj.getString("schemeId");
            String version = obj.getString("version");
            AdapterSchemeVersionModel versionModel = adapterSchemeVersionService.getByVersion(version);
            //删除已存在的数据集
            crawlerDatasetDao.deleteCrawlerDatasetList(versionModel.getId());
            JSONArray dataSets = obj.getJSONArray("dataSets");
            //保存数据集
            //重新存入数据集
            for (Object o : dataSets) {
                JSONObject dataSet = JSONObject.fromObject(o);
                String datasetId = dataSet.getString("datasetId");
@ -160,12 +163,11 @@ public class CrawlerService {
                dataSetModel.setDatasetId(Integer.valueOf(datasetId));
                dataSetModel.setDatasetCode(datasetCode);
                dataSetModel.setDatasetName(datasetName);
                crawlerDatasetDao.saveOrUpdateEntity(dataSetModel);
                crawlerDatasetDao.saveEntity(dataSetModel);
            }
            if (obj.containsKey("lines") && obj.containsKey("entrances")) {
                // TODO: 2016/4/27  保存编排关系
//                 saveDataSetRelation(schemeId, version, json);
            //如果存在编排映射关系,进行保存操作
            if (obj.containsKey("relation") && !Objects.equals(obj.getString("relation"), "")) {
                 saveDataSetRelation(version,json);
            }
        }
    }
@ -233,8 +235,38 @@ public class CrawlerService {
            e.printStackTrace();
            return "删除失败";
        }
        return Constants.EMPTY;
    }
    /**
     * 数据集列表
     * @param limit rows
     * @param offset page
     * @return
     */
    public DetailModelResult getDataSetResult(Integer limit, Integer offset){
        try {
            StringBuffer stringBuffer = new StringBuffer();
            String sql = "SELECT a.scheme_id, a.scheme_version_id, GROUP_CONCAT(a.dataset_id SEPARATOR ';') AS datasetId, GROUP_CONCAT(a.dataset_name SEPARATOR ';') AS datasetName" +
                    " FROM ( SELECT scheme_id, scheme_version_id, CONCAT(dataset_id) AS dataset_id, CONCAT( dataset_code, ',', dataset_name ) AS dataset_name FROM crawler_dataset ) a" +
                    " GROUP BY a.scheme_id, a.scheme_version_id";
            stringBuffer.append(sql);
            if (limit != null && offset != null) {
                if (limit > 0 && offset > 0) {
                    stringBuffer.append("  LIMIT " + (offset - 1) * limit + "," + limit);
                }
            }
            stringBuffer.append(" ;");
            List list = crawlerDatasetDao.queryListBySql(stringBuffer.toString());
            DetailModelResult detailModelResult = DetailModelResult.success("获取数据集成功");
            detailModelResult.setDetailModelList(list);
            return detailModelResult;
        } catch (Exception e) {
            e.printStackTrace();
            return DetailModelResult.error("获取数据集失败");
        }
    }
}