Browse Source

采集上传逻辑接口修改

zhenglingfeng 8 years ago
parent
commit
2f79209e8b
26 changed files with 1700 additions and 63 deletions
  1. 18 18
      hos-camel/hos-camel.iml
  2. 15 0
      hos-camel/src/main/java/collect/processor/CollectProcessor0.java
  3. 15 0
      hos-camel/src/main/java/collect/route/CollectQuartzRoute.java
  4. 15 0
      hos-camel/src/main/java/crawler/processor/CrawlerProcessor0.java
  5. 3 3
      hos-camel/src/main/java/crawler/processor/Processor0.java
  6. 15 0
      hos-camel/src/main/java/crawler/route/CrawlerQuartzRoute.java
  7. 25 0
      hos-rest/src/main/java/com/yihu/hos/rest/common/dao/DatacollectDao.java
  8. 6 7
      hos-rest/src/main/java/com/yihu/hos/rest/common/dao/DatacollectLogDao.java
  9. 1 0
      hos-rest/src/main/java/com/yihu/hos/rest/common/filter/HibernateFilter.java
  10. 14 17
      hos-rest/src/main/java/com/yihu/hos/rest/controllers/CrawlerController.java
  11. 45 0
      hos-rest/src/main/java/com/yihu/hos/rest/controllers/DataCollectController.java
  12. 1 1
      hos-rest/src/main/java/com/yihu/hos/rest/controllers/DataPushController.java
  13. 18 0
      hos-rest/src/main/java/com/yihu/hos/rest/models/rs/RsJobConfig.java
  14. 43 7
      hos-rest/src/main/java/com/yihu/hos/rest/services/crawler/CrawlerManager.java
  15. 1 1
      hos-rest/src/main/java/com/yihu/hos/rest/services/crawler/DatacollectManager.java
  16. 1166 0
      hos-rest/src/main/java/com/yihu/hos/rest/services/crawler/DatacollectService.java
  17. 1 1
      hos-rest/src/main/java/com/yihu/hos/rest/services/standard/StdService.java
  18. 0 1
      hos-rest/src/main/java/com/yihu/hos/rest/services/standard/adapter/AdapterDatasetService.java
  19. 85 0
      hos-rest/src/main/resources/hibernate/RsJobConfig.hbm.xml
  20. 56 0
      hos-rest/src/main/resources/hibernate/RsJobDataset.hbm.xml
  21. 44 0
      hos-rest/src/main/resources/hibernate/RsJobLog.hbm.xml
  22. 104 0
      hos-rest/src/main/resources/hibernate/RsJobLogDetail.hbm.xml
  23. 3 3
      hos-rest/src/main/resources/spring/applicationContext.xml
  24. 4 2
      hos-web-framework/src/main/java/com/yihu/hos/web/framework/dao/SQLGeneralDAO.java
  25. 1 1
      src/main/webapp/WEB-INF/ehr/jsp/common/indexJs.jsp
  26. 1 1
      src/main/webapp/WEB-INF/ehr/jsp/system/datasource/configSourcesJs.jsp

+ 18 - 18
hos-camel/hos-camel.iml

@ -24,38 +24,31 @@
    <orderEntry type="library" name="Maven: com.sun.xml.bind:jaxb-impl:2.2.11" level="project" />
    <orderEntry type="library" name="Maven: org.apache.camel:camel-spring-boot:2.17.1" level="project" />
    <orderEntry type="library" name="Maven: org.apache.camel:camel-spring:2.17.1" level="project" />
    <orderEntry type="library" name="Maven: org.springframework:spring-core:4.3.3.RELEASE" level="project" />
    <orderEntry type="library" name="Maven: org.springframework:spring-aop:4.3.3.RELEASE" level="project" />
    <orderEntry type="library" name="Maven: org.springframework:spring-beans:4.3.3.RELEASE" level="project" />
    <orderEntry type="library" name="Maven: org.springframework:spring-context:4.3.3.RELEASE" level="project" />
    <orderEntry type="library" name="Maven: org.springframework:spring-expression:4.3.3.RELEASE" level="project" />
    <orderEntry type="library" name="Maven: org.springframework:spring-tx:4.3.3.RELEASE" level="project" />
    <orderEntry type="library" name="Maven: org.apache.camel:camel-http4:2.17.1" level="project" />
    <orderEntry type="library" name="Maven: org.apache.camel:camel-http-common:2.17.1" level="project" />
    <orderEntry type="library" name="Maven: javax.servlet:javax.servlet-api:3.1.0" level="project" />
    <orderEntry type="library" name="Maven: org.apache.httpcomponents:httpclient:4.5.2" level="project" />
    <orderEntry type="library" name="Maven: org.apache.httpcomponents:httpcore:4.4.5" level="project" />
    <orderEntry type="library" name="Maven: commons-codec:commons-codec:1.10" level="project" />
    <orderEntry type="library" name="Maven: org.apache.camel:camel-jms:2.17.1" level="project" />
    <orderEntry type="library" name="Maven: org.springframework:spring-jms:4.3.3.RELEASE" level="project" />
    <orderEntry type="library" name="Maven: org.springframework:spring-messaging:4.3.3.RELEASE" level="project" />
    <orderEntry type="library" name="Maven: org.springframework:spring-context:4.3.3.RELEASE" level="project" />
    <orderEntry type="library" name="Maven: org.springframework:spring-tx:4.3.3.RELEASE" level="project" />
    <orderEntry type="library" name="Maven: org.springframework:spring-beans:4.3.3.RELEASE" level="project" />
    <orderEntry type="library" name="Maven: org.apache.camel:camel-cxf:2.17.1" level="project" />
    <orderEntry type="library" name="Maven: org.apache.camel:camel-cxf-transport:2.17.1" level="project" />
    <orderEntry type="library" name="Maven: org.apache.cxf:cxf-rt-bindings-soap:3.1.5" level="project" />
    <orderEntry type="library" name="Maven: org.apache.cxf:cxf-rt-frontend-jaxrs:3.1.5" level="project" />
    <orderEntry type="library" name="Maven: org.apache.cxf:cxf-core:3.1.5" level="project" />
    <orderEntry type="library" name="Maven: org.codehaus.woodstox:woodstox-core-asl:4.4.1" level="project" />
    <orderEntry type="library" name="Maven: org.codehaus.woodstox:stax2-api:3.1.4" level="project" />
    <orderEntry type="library" name="Maven: org.apache.ws.xmlschema:xmlschema-core:2.2.1" level="project" />
    <orderEntry type="library" name="Maven: org.apache.cxf:cxf-rt-wsdl:3.1.5" level="project" />
    <orderEntry type="library" name="Maven: wsdl4j:wsdl4j:1.6.3" level="project" />
    <orderEntry type="library" name="Maven: org.ow2.asm:asm:5.0.4" level="project" />
    <orderEntry type="library" name="Maven: org.apache.cxf:cxf-rt-databinding-jaxb:3.1.5" level="project" />
    <orderEntry type="library" name="Maven: org.apache.cxf:cxf-rt-frontend-jaxrs:3.1.5" level="project" />
    <orderEntry type="library" name="Maven: javax.ws.rs:javax.ws.rs-api:2.0.1" level="project" />
    <orderEntry type="library" name="Maven: javax.annotation:javax.annotation-api:1.2" level="project" />
    <orderEntry type="library" name="Maven: org.apache.cxf:cxf-rt-transports-http:3.1.5" level="project" />
    <orderEntry type="library" name="Maven: org.apache.cxf:cxf-rt-frontend-jaxws:3.1.5" level="project" />
    <orderEntry type="library" name="Maven: xml-resolver:xml-resolver:1.2" level="project" />
    <orderEntry type="library" name="Maven: org.ow2.asm:asm:5.0.4" level="project" />
    <orderEntry type="library" name="Maven: org.apache.cxf:cxf-rt-bindings-xml:3.1.5" level="project" />
    <orderEntry type="library" name="Maven: org.apache.cxf:cxf-rt-frontend-simple:3.1.5" level="project" />
    <orderEntry type="library" name="Maven: org.apache.cxf:cxf-rt-ws-addr:3.1.5" level="project" />
@ -65,6 +58,11 @@
    <orderEntry type="library" name="Maven: org.apache.cxf:cxf-rt-rs-client:3.1.5" level="project" />
    <orderEntry type="library" name="Maven: net.oauth.core:oauth-provider:20100527" level="project" />
    <orderEntry type="library" name="Maven: net.oauth.core:oauth:20100527" level="project" />
    <orderEntry type="library" name="Maven: org.springframework:spring-core:4.3.3.RELEASE" level="project" />
    <orderEntry type="library" name="Maven: org.apache.cxf:cxf-rt-bindings-soap:3.1.5" level="project" />
    <orderEntry type="library" name="Maven: org.apache.cxf:cxf-rt-wsdl:3.1.5" level="project" />
    <orderEntry type="library" name="Maven: wsdl4j:wsdl4j:1.6.3" level="project" />
    <orderEntry type="library" name="Maven: org.apache.cxf:cxf-rt-databinding-jaxb:3.1.5" level="project" />
    <orderEntry type="library" name="Maven: org.apache.camel:camel-quartz:2.17.1" level="project" />
    <orderEntry type="library" name="Maven: org.quartz-scheduler:quartz:1.8.6" level="project" />
    <orderEntry type="library" name="Maven: org.apache.camel:camel-quartz2:2.17.1" level="project" />
@ -77,12 +75,12 @@
    <orderEntry type="library" name="Maven: org.apache.camel:camel-jetty9:2.17.1" level="project" />
    <orderEntry type="library" name="Maven: org.eclipse.jetty:jetty-server:9.3.11.v20160721" level="project" />
    <orderEntry type="library" name="Maven: org.eclipse.jetty:jetty-http:9.3.11.v20160721" level="project" />
    <orderEntry type="library" name="Maven: org.eclipse.jetty:jetty-util:9.3.11.v20160721" level="project" />
    <orderEntry type="library" name="Maven: org.eclipse.jetty:jetty-io:9.3.11.v20160721" level="project" />
    <orderEntry type="library" name="Maven: org.eclipse.jetty:jetty-servlet:9.3.11.v20160721" level="project" />
    <orderEntry type="library" name="Maven: org.eclipse.jetty:jetty-security:9.3.11.v20160721" level="project" />
    <orderEntry type="library" name="Maven: org.eclipse.jetty:jetty-servlets:9.3.11.v20160721" level="project" />
    <orderEntry type="library" name="Maven: org.eclipse.jetty:jetty-continuation:9.3.11.v20160721" level="project" />
    <orderEntry type="library" name="Maven: org.eclipse.jetty:jetty-util:9.3.11.v20160721" level="project" />
    <orderEntry type="library" name="Maven: org.eclipse.jetty:jetty-client:9.3.11.v20160721" level="project" />
    <orderEntry type="library" name="Maven: org.eclipse.jetty:jetty-jmx:9.3.11.v20160721" level="project" />
    <orderEntry type="library" name="Maven: org.apache.camel:camel-jetty-common:2.17.1" level="project" />
@ -104,7 +102,6 @@
    <orderEntry type="library" name="Maven: com.fasterxml:classmate:1.3.1" level="project" />
    <orderEntry type="library" name="Maven: com.fasterxml.jackson.core:jackson-databind:2.8.3" level="project" />
    <orderEntry type="library" name="Maven: com.fasterxml.jackson.core:jackson-annotations:2.8.3" level="project" />
    <orderEntry type="library" name="Maven: com.fasterxml.jackson.core:jackson-core:2.8.3" level="project" />
    <orderEntry type="library" name="Maven: org.springframework:spring-web:4.3.3.RELEASE" level="project" />
    <orderEntry type="library" name="Maven: org.springframework:spring-webmvc:4.3.3.RELEASE" level="project" />
    <orderEntry type="library" name="Maven: org.springframework.boot:spring-boot-starter-data-mongodb:1.4.1.RELEASE" level="project" />
@ -123,24 +120,27 @@
    <orderEntry type="library" name="Maven: commons-io:commons-io:2.2" level="project" />
    <orderEntry type="library" name="Maven: org.apache.commons:commons-lang3:3.3.2" level="project" />
    <orderEntry type="library" name="Maven: commons-net:commons-net:3.3" level="project" />
    <orderEntry type="library" name="Maven: commons-codec:commons-codec:1.10" level="project" />
    <orderEntry type="library" name="Maven: com.squareup.okhttp3:okhttp:3.4.1" level="project" />
    <orderEntry type="library" name="Maven: com.squareup.okio:okio:1.9.0" level="project" />
    <orderEntry type="library" name="Maven: com.belerweb:pinyin4j:2.5.0" level="project" />
    <orderEntry type="library" name="Maven: com.fasterxml.jackson.core:jackson-core:2.8.3" level="project" />
    <orderEntry type="library" name="Maven: log4j:log4j:1.2.17" level="project" />
    <orderEntry type="library" name="Maven: org.apache.logging.log4j:log4j-api:2.6.2" level="project" />
    <orderEntry type="library" name="Maven: org.apache.logging.log4j:log4j-core:2.6.2" level="project" />
    <orderEntry type="library" name="Maven: org.apache.httpcomponents:httpmime:4.5.2" level="project" />
    <orderEntry type="library" name="Maven: org.apache.httpcomponents:httpcore:4.4.5" level="project" />
    <orderEntry type="library" name="Maven: io.springfox:springfox-swagger2:2.4.0" level="project" />
    <orderEntry type="library" name="Maven: io.swagger:swagger-annotations:1.5.6" level="project" />
    <orderEntry type="library" name="Maven: io.swagger:swagger-models:1.5.6" level="project" />
    <orderEntry type="library" name="Maven: io.springfox:springfox-spi:2.4.0" level="project" />
    <orderEntry type="library" name="Maven: io.springfox:springfox-core:2.4.0" level="project" />
    <orderEntry type="library" name="Maven: com.google.guava:guava:18.0" level="project" />
    <orderEntry type="library" name="Maven: org.springframework.plugin:spring-plugin-core:1.2.0.RELEASE" level="project" />
    <orderEntry type="library" name="Maven: org.springframework.plugin:spring-plugin-metadata:1.2.0.RELEASE" level="project" />
    <orderEntry type="library" name="Maven: io.springfox:springfox-schema:2.4.0" level="project" />
    <orderEntry type="library" name="Maven: io.springfox:springfox-swagger-common:2.4.0" level="project" />
    <orderEntry type="library" name="Maven: io.springfox:springfox-spring-web:2.4.0" level="project" />
    <orderEntry type="library" name="Maven: com.google.guava:guava:18.0" level="project" />
    <orderEntry type="library" name="Maven: org.springframework.plugin:spring-plugin-core:1.2.0.RELEASE" level="project" />
    <orderEntry type="library" name="Maven: org.springframework.plugin:spring-plugin-metadata:1.2.0.RELEASE" level="project" />
    <orderEntry type="library" name="Maven: io.springfox:springfox-swagger-ui:2.4.0" level="project" />
    <orderEntry type="library" scope="TEST" name="Maven: org.springframework.boot:spring-boot-starter-test:1.4.1.RELEASE" level="project" />
    <orderEntry type="library" scope="TEST" name="Maven: org.springframework.boot:spring-boot-test:1.4.1.RELEASE" level="project" />
@ -149,10 +149,10 @@
    <orderEntry type="library" scope="TEST" name="Maven: net.minidev:json-smart:2.2.1" level="project" />
    <orderEntry type="library" scope="TEST" name="Maven: net.minidev:accessors-smart:1.1" level="project" />
    <orderEntry type="library" scope="TEST" name="Maven: junit:junit:4.12" level="project" />
    <orderEntry type="library" scope="TEST" name="Maven: org.hamcrest:hamcrest-core:1.3" level="project" />
    <orderEntry type="library" scope="TEST" name="Maven: org.assertj:assertj-core:2.5.0" level="project" />
    <orderEntry type="library" scope="TEST" name="Maven: org.mockito:mockito-core:1.10.19" level="project" />
    <orderEntry type="library" scope="TEST" name="Maven: org.objenesis:objenesis:2.1" level="project" />
    <orderEntry type="library" scope="TEST" name="Maven: org.hamcrest:hamcrest-core:1.3" level="project" />
    <orderEntry type="library" scope="TEST" name="Maven: org.hamcrest:hamcrest-library:1.3" level="project" />
    <orderEntry type="library" scope="TEST" name="Maven: org.skyscreamer:jsonassert:1.3.0" level="project" />
    <orderEntry type="library" scope="TEST" name="Maven: org.json:json:20140107" level="project" />

+ 15 - 0
hos-camel/src/main/java/collect/processor/CollectProcessor0.java

@ -0,0 +1,15 @@
package collect.processor;
import org.apache.camel.Exchange;
import org.apache.camel.Message;
import org.apache.camel.Processor;
/**
 * Created by Zdm on 2016/7/13.
 */
public class CollectProcessor0 implements Processor {
    public void process(Exchange exchange) throws Exception {
        Message outMessage = exchange.getOut();
        outMessage.setHeader(Exchange.HTTP_QUERY, "jobId=5ad5c11655d443c30155d477a6b10000");
    }
}

+ 15 - 0
hos-camel/src/main/java/collect/route/CollectQuartzRoute.java

@ -0,0 +1,15 @@
package collect.route;
import collect.processor.CollectProcessor0;
import org.apache.camel.Exchange;
import org.apache.camel.builder.RouteBuilder;
/**
 * @author HZY * @vsrsion 1.0 * Created at 2016/11/17.
 */
public class CollectQuartzRoute extends RouteBuilder {
    public void configure() throws Exception {
        from("quartz://myGroup/myTimerName?cron=0/3 * * * * ?").routeId("routeId").process(new CollectProcessor0())
                .setHeader(Exchange.HTTP_METHOD, constant("POST")).to("http4://localhost:8088/crawler/collect").to("stream:out");
    }
}

+ 15 - 0
hos-camel/src/main/java/crawler/processor/CrawlerProcessor0.java

@ -0,0 +1,15 @@
package crawler.processor;
import org.apache.camel.Exchange;
import org.apache.camel.Message;
import org.apache.camel.Processor;
/**
 * Created by Zdm on 2016/7/13.
 */
public class CrawlerProcessor0 implements Processor {
    public void process(Exchange exchange) throws Exception {
        Message outMessage = exchange.getOut();
        outMessage.setHeader(Exchange.HTTP_QUERY, "jobId=5ad5c11655d443c30155d477a6b10000");
    }
}

+ 3 - 3
hos-camel/src/main/java/crawler/processor/Processor0.java

@ -5,14 +5,14 @@ import org.apache.camel.Message;
import org.apache.camel.Processor;
import org.apache.camel.http.common.HttpMessage;
import java.util.Map;
/**
 * Created by Zdm on 2016/7/13.
 */
public class Processor0 implements Processor {
    public void process(Exchange exchange) throws Exception {
        Message outMessage = exchange.getOut();
        HttpMessage inMessage = (HttpMessage) exchange.getIn();
        outMessage.setHeader(Exchange.HTTP_QUERY, inMessage.getRequest().getQueryString());
        outMessage.setHeader(Exchange.HTTP_QUERY, "jobId=5ad5c11655d443c30155d477a6b10000");
    }
}

+ 15 - 0
hos-camel/src/main/java/crawler/route/CrawlerQuartzRoute.java

@ -0,0 +1,15 @@
package crawler.route;
import collect.processor.CollectProcessor0;
import org.apache.camel.Exchange;
import org.apache.camel.builder.RouteBuilder;
/**
 * @author HZY * @vsrsion 1.0 * Created at 2016/11/17.
 */
public class CrawlerQuartzRoute extends RouteBuilder {
    public void configure() throws Exception {
        from("quartz://myGroup/myTimerName?cron=0/3 * * * * ?").routeId("routeId").process(new CollectProcessor0())
                .setHeader(Exchange.HTTP_METHOD, constant("POST")).to("http4://localhost:8088/crawler/patientList").to("stream:out");
    }
}

+ 25 - 0
hos-rest/src/main/java/com/yihu/hos/rest/common/dao/DatacollectDao.java

@ -1,6 +1,7 @@
package com.yihu.hos.rest.common.dao;
import com.yihu.hos.core.datatype.StringUtil;
import com.yihu.hos.rest.models.rs.DtoJobDataset;
import com.yihu.hos.rest.models.rs.RsJobDataset;
import com.yihu.hos.web.framework.constrant.DateConvert;
import com.yihu.hos.web.framework.dao.SQLGeneralDAO;
@ -222,4 +223,28 @@ public class DatacollectDao extends SQLGeneralDAO {
        return super.queryListBySql(sql,SimpleChartItem.class);
    }
    /**
     * 通过任务ID获取相关数据集
     */
    public List<DtoJobDataset> getDatacollectDataset(String jobId) throws Exception
    {
        //通过jobId获取机构版本号
        String sqlVersion ="select a.adapter_std_version\n" +
                "from rs_job_config c,(SELECT s.adapter_std_version,v.version from adapter_scheme s,adapter_scheme_version v where s.id=v.scheme_id) a\n" +
                "where a.version = c.scheme_version\n" +
                "and c.id='"+jobId+"'";
        String version = super.scalarBySql(sqlVersion,String.class);
        String sql = "select d.*,\n" +
                "t.datasource_id,t.datasource_name,t.config,t.type,t.org_code\n" +
                "from rs_job_dataset d\n" +
                "LEFT JOIN (select s.dataset_id,s.datasource_id,ss.name as datasource_name,ss.config,ss.type,ss.org_code \n" +
                "\tfrom rs_datasource_dataset s,(select ds.id,ds.name,ds.config,ds.type,o.code as org_code from system_datasource ds,system_organization o where ds.org_id=o.id) ss where s.datasource_id=ss.id and s.Std_version='"+version+"') t \n" +
                "on d.job_dataset_id = t.dataset_id \n" +
                "where d.job_id='"+jobId+"'\n" +
                "order by d.sort";
        return super.queryListBySql(sql,DtoJobDataset.class);
    }
}

+ 6 - 7
hos-rest/src/main/java/com/yihu/hos/rest/common/dao/DatacollectLogDao.java

@ -1,16 +1,11 @@
package com.yihu.hos.rest.common.dao;
import com.yihu.hos.core.datatype.StringUtil;
import com.yihu.hos.rest.models.rs.RsDatapushLog;
import com.yihu.hos.web.framework.constrant.DateConvert;
import com.yihu.hos.web.framework.dao.SQLGeneralDAO;
import com.yihu.hos.web.framework.model.DataGridResult;
import org.springframework.stereotype.Repository;
import java.util.Date;
import java.util.GregorianCalendar;
import java.util.Map;
/**
 * Created by hzp on 2016/1/11.
@ -23,8 +18,7 @@ public class DatacollectLogDao extends SQLGeneralDAO {
    /**
     * 推数据新增日志
     */
    public void addDataPushLog(String type,String success,String content) throws Exception
    {
    public void addDataPushLog(String type,String success,String content) throws Exception {
        RsDatapushLog log = new RsDatapushLog();
        log.setType(type);
        log.setSuccess(success);
@ -32,4 +26,9 @@ public class DatacollectLogDao extends SQLGeneralDAO {
        log.setDatetime(new Date());
        this.saveEntity(log);
    }
    public void updateJobDatasetKeyvalue(String id,String jobDatasetKeyvalue) throws Exception {
        String sql = "update rs_job_dataset set job_dataset_keyvalue='"+jobDatasetKeyvalue+"' where id='"+id+"'";
        super.execute(sql);
    }
}

+ 1 - 0
hos-rest/src/main/java/com/yihu/hos/rest/common/filter/HibernateFilter.java

@ -11,4 +11,5 @@ import javax.servlet.annotation.WebFilter;
 */
@WebFilter(filterName="SpringOpenSessionInViewFilter",urlPatterns="/*")
public class HibernateFilter extends OpenSessionInViewFilter {
}

+ 14 - 17
hos-rest/src/main/java/com/yihu/hos/rest/controllers/CrawlerController.java

@ -87,18 +87,17 @@ public class CrawlerController {
            @ApiParam(name = "patient", value = "病人索引信息", required = true)
            @RequestParam(value = "patient") String patientInfo) {
//        Patient patient = crawlerManager.parsePatient(patientInfo);
//        if (patient != null) {
//            Boolean result = crawlerManager.collectProcess(patient);
//            if (result) {
//                return Result.success("采集上传成功");
//            } else {
//                return Result.error("采集上传失败");
//            }
//        } else {
//            return Result.error("参数转换病人实体失败");
//        }
        return Result.success("采集上传成功");
        Patient patient = crawlerManager.parsePatient(patientInfo);
        if (patient != null) {
            Boolean result = crawlerManager.collectProcess(patient);
            if (result) {
                return Result.success("采集上传成功");
            } else {
                return Result.error("采集上传失败");
            }
        } else {
            return Result.error("参数转换病人实体失败");
        }
    }
    @RequestMapping(value = "patient/flow", method = RequestMethod.POST)
@ -125,12 +124,10 @@ public class CrawlerController {
    @ApiOperation(value = "采集病人健康档案", produces = "application/json", notes = "采集病人健康档案")
    @ResponseBody
    public Result crawlerPatientList(
            @ApiParam(name = "beginDate", value = "开始时间", required = true)
            @RequestParam(value = "beginDate") Date beginDate,
            @ApiParam(name = "endDate", value = "开始时间", required = true)
            @RequestParam(value = "endDate") Date endDate) {
            @ApiParam(name = "jobId", value = "任务jobId", required = true)
            @RequestParam(value = "jobId") String jobId) {
        try {
            return crawlerManager.dataCrawler(beginDate, endDate);
            return crawlerManager.dataCrawlerByJobId(jobId);
        } catch (Exception e) {
            return Result.error("采集上传失败");
        }

+ 45 - 0
hos-rest/src/main/java/com/yihu/hos/rest/controllers/DataCollectController.java

@ -0,0 +1,45 @@
package com.yihu.hos.rest.controllers;
import com.yihu.hos.rest.services.crawler.DatacollectService;
import com.yihu.hos.web.framework.model.Result;
import com.yihu.hos.web.framework.util.controller.BaseController;
import io.swagger.annotations.ApiOperation;
import io.swagger.annotations.ApiParam;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Controller;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestMethod;
import org.springframework.web.bind.annotation.RequestParam;
import org.springframework.web.bind.annotation.ResponseBody;
import javax.annotation.Resource;
import javax.annotation.Resources;
/**
 * 数据采集配置页面
 * Created by hzp on 2015/8/12.
 */
@RequestMapping("/crawler")
@Controller("dataCollectController")
public class DataCollectController extends BaseController {
    @Resource(name = "DatacollectService")
    DatacollectService datacollectService;
    @RequestMapping(value = "collect", method = RequestMethod.POST)
    @ApiOperation(value = "采集医院数据", produces = "application/json", notes = "采集医院数据")
    @ResponseBody
    public Result collect(
            @ApiParam(name = "jobId", value = "任务jobId", required = true)
            @RequestParam(value = "jobId") String jobId) {
        try {
            if (datacollectService.collectByJobId(jobId)) {
                return Result.success("采集上传成功");
            } else {
                return Result.error("采集上传失败");
            }
        } catch (Exception e) {
            return Result.error("采集上传失败");
        }
    }
}

+ 1 - 1
hos-rest/src/main/java/com/yihu/hos/rest/controllers/DataPushController.java

@ -24,7 +24,7 @@ public class DataPushController extends BaseController {
    /*
    全流程推数据
     */
    @RequestMapping("datapush")
    @RequestMapping("push")
    @ResponseBody
    public Result datapush(String dataset,String data,String orgCode) {
        try {

+ 18 - 0
hos-rest/src/main/java/com/yihu/hos/rest/models/rs/RsJobConfig.java

@ -23,6 +23,8 @@ public class RsJobConfig implements java.io.Serializable {
	private Date repeatStartTime;
	private Date repeatEndTime;
	private Integer delayTime;
	private Integer flowId;
	private Integer flowTempId;
	// Constructors
@ -30,6 +32,22 @@ public class RsJobConfig implements java.io.Serializable {
	public RsJobConfig() {
	}
	public Integer getFlowTempId() {
		return flowTempId;
	}
	public void setFlowTempId(Integer flowTempId) {
		this.flowTempId = flowTempId;
	}
	public Integer getFlowId() {
		return flowId;
	}
	public void setFlowId(Integer flowId) {
		this.flowId = flowId;
	}
	public String getJobContent() {
		return jobContent;
	}

+ 43 - 7
hos-rest/src/main/java/com/yihu/hos/rest/services/crawler/CrawlerManager.java

@ -3,23 +3,28 @@ package com.yihu.hos.rest.services.crawler;
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.yihu.hos.core.datatype.CollectionUtil;
import com.yihu.hos.core.datatype.DateUtil;
import com.yihu.hos.core.datatype.StringUtil;
import com.yihu.hos.core.log.Logger;
import com.yihu.hos.core.log.LoggerFactory;
import com.yihu.hos.rest.common.dao.DatacollectDao;
import com.yihu.hos.rest.models.crawler.adapter.AdapterDataSet;
import com.yihu.hos.rest.models.crawler.config.SysConfig;
import com.yihu.hos.rest.models.crawler.patient.Patient;
import com.yihu.hos.rest.models.rs.RsJobConfig;
import com.yihu.hos.rest.models.standard.adapter.AdapterDatasetModel;
import com.yihu.hos.rest.models.standard.adapter.AdapterDictEntryModel;
import com.yihu.hos.rest.models.standard.adapter.AdapterSchemeVersionModel;
import com.yihu.hos.rest.models.standard.bo.AdapterVersion;
import com.yihu.hos.rest.services.standard.adapter.AdapterDatasetService;
import com.yihu.hos.rest.services.standard.adapter.AdapterSchemeVersionService;
import com.yihu.hos.web.framework.model.DataGridResult;
import com.yihu.hos.web.framework.model.DictItem;
import com.yihu.hos.web.framework.model.Result;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
import javax.annotation.Resource;
import java.util.*;
@Service("CrawlerManager")
@ -27,10 +32,13 @@ public class CrawlerManager {
    public static final String BEAN_ID = "CrawlerManager";
    private static String unstructured = "unstructured";
    private static Logger logger = LoggerFactory.getLogger(CrawlerManager.class);
    @Autowired
    @Resource
    private AdapterDatasetService adapterDatasetService;
    @Autowired
    AdapterSchemeVersionService adapterSchemeVersionService;
    @Resource
    private AdapterSchemeVersionService adapterSchemeVersionService;
    @Resource
    private DatacollectManager datacollect;
    private static DataCollectDispatcher dispatch = DataCollectDispatcher.getInstance();
    private static SysConfig sysConfig=SysConfig.getInstance();
    private static Map<String, Map<String, AdapterDataSet>> adapterDataSetVersionMap = new HashMap<>();
@ -49,6 +57,34 @@ public class CrawlerManager {
    }
    public Result dataCrawlerByJobId(String jobId) throws Exception {
        RsJobConfig rsJobConfig = datacollect.getJobById(jobId);
        String random = UUID.randomUUID().toString();
        logger.info("档案采集上传开始,流水号:" + random + ",jobId:"+jobId);
        Date begin = rsJobConfig.getRepeatStartTime();
        Date end = rsJobConfig.getRepeatEndTime();
        if (!rsJobConfig.getJobType().equals("0")) {
            //调整截止时间,当前时间-偏移量
            end = DateUtil.addDate(-rsJobConfig.getDelayTime(), DateUtil.getSysDateTime());
            if ((end.getTime() - begin.getTime()) <= 0) {
                return Result.success(""); //结束时间小于开始时间时,不获取
            }
        }
        DataGridResult dataGridResult = datacollect.getJobDatasetByJobId(jobId);
        this.datasetList = dataGridResult.getDetailModelList();
        this.schemeVersion = rsJobConfig.getSchemeVersion();
        Result result = dataCrawler(begin, end);
        if (!rsJobConfig.getJobType().equals("0")) {
            rsJobConfig.setRepeatStartTime(end);
            rsJobConfig.setRepeatEndTime(DateUtil.formatYMDToYMDHMS(DateUtil.getSysDateTime().toString()));
        }
        datacollect.updateJob(rsJobConfig);
        logger.info("档案采集上传结束,流水号:" + random + ",jobId:" + jobId + ",message:" + result.getMessage());
        return result;
    }
    public Result dataCrawler(Date beginDate, Date endDate) {
        Map<String, Object> condition = new HashMap<>();
        condition.put("beginDate", beginDate);
@ -95,10 +131,10 @@ public class CrawlerManager {
            }
            //getRemoteVersion
//            if (!dispatch.getRemoteVersion(patient.getOrgCode())) {
//                logger.error("远程版本获取失败");
//                return false;
//            }
            if (!dispatch.getRemoteVersion(patient.getOrgCode())) {
                logger.error("远程版本获取失败");
                return false;
            }
            if(StringUtil.isEmpty(sysConfig.getVersionMap().get(patient.getOrgCode()))){
                logger.error("版本获取失败");

+ 1 - 1
hos-rest/src/main/java/com/yihu/hos/rest/services/crawler/DatacollectManager.java

@ -20,7 +20,7 @@ import java.util.ArrayList;
import java.util.List;
import java.util.Map;
@Service
@Service("DatacollectManager")
public class DatacollectManager {
    @Resource(name = "DatacollectDao")

+ 1166 - 0
hos-rest/src/main/java/com/yihu/hos/rest/services/crawler/DatacollectService.java

@ -0,0 +1,1166 @@
package com.yihu.hos.rest.services.crawler;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.yihu.ehr.dbhelper.common.QueryCondition;
import com.yihu.ehr.dbhelper.common.enums.DBType;
import com.yihu.ehr.dbhelper.common.sqlparser.ParserMysql;
import com.yihu.ehr.dbhelper.common.sqlparser.ParserOracle;
import com.yihu.ehr.dbhelper.common.sqlparser.ParserSql;
import com.yihu.ehr.dbhelper.common.sqlparser.ParserSqlserver;
import com.yihu.ehr.dbhelper.jdbc.DBHelper;
import com.yihu.ehr.dbhelper.mongodb.MongodbHelper;
import com.yihu.hos.core.log.Logger;
import com.yihu.hos.core.log.LoggerFactory;
import com.yihu.hos.rest.common.dao.DatacollectDao;
import com.yihu.hos.rest.common.dao.DatacollectLogDao;
import com.yihu.hos.rest.models.crawler.config.SysConfig;
import com.yihu.hos.rest.models.crawler.patient.PatientIdentity;
import com.yihu.hos.rest.models.rs.*;
import com.yihu.hos.rest.services.standard.StdService;
import com.yihu.hos.web.framework.constrant.DateConvert;
import com.yihu.hos.web.framework.constrant.SqlConstants;
import com.yihu.hos.web.framework.model.ActionResult;
import com.yihu.hos.web.framework.util.GridFSUtil;
import org.bson.types.ObjectId;
import org.dom4j.Document;
import org.dom4j.Element;
import org.dom4j.io.SAXReader;
import org.json.JSONArray;
import org.json.JSONObject;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Transactional;
import javax.annotation.Resource;
import java.io.ByteArrayInputStream;
import java.sql.Blob;
import java.text.ParseException;
import java.text.SimpleDateFormat;
import java.util.*;
/**
 * 数据采集执行服务
 */
@Service("DatacollectService")
public class DatacollectService {
    static private final Logger logger = LoggerFactory.getLogger(DatacollectService.class);
    MongodbHelper mongoOrigin = new MongodbHelper("origin");
    MongodbHelper mongo = new MongodbHelper();
    String dateFormat = "yyyy-MM-dd HH:mm:ss"; //默认时间字符串格式
    int maxNum = 1000; //查询条数限制
    @Resource(name = "DatacollectManager")
    private DatacollectManager datacollect;
    @Resource(name = "StdService")
    private StdService stdService;
    @Resource(name = "DatacollectDao")
    private DatacollectDao datacollectDao;
    @Resource(name = "DatacollectLogDao")
    private DatacollectLogDao datacollectLogDao;
    @Autowired
    private ObjectMapper objectMapper;
    /**
     * 根据连接字符串获取数据库类型
     */
    private static DBType getDbType(String uri) {
        return uri.startsWith("jdbc:mysql") ? DBType.Mysql : (uri.startsWith("jdbc:oracle") ? DBType.Oracle : (uri.startsWith("jdbc:hive2") ? DBType.Hive : (uri.startsWith("jdbc:microsoft:sqlserver") ? DBType.Sqlserver : DBType.Mysql)));
    }
    /**
     * 执行任务
     */
    public Boolean collectByJobId(String jobId) throws Exception {
        //获取任务详细信息
        RsJobConfig job = datacollect.getJobById(jobId);
        RsJobLog log = new RsJobLog();
        log.setJobId(jobId);
        log.setJobStartTime(new Date());
        datacollectLogDao.saveEntity(log);
        String logId = log.getId();
        logger.info("任务" + jobId + "开始采集,新增日志" + logId + "。");
        StringBuilder logStr = new StringBuilder();
        int count = 0;
        int success = 0;
        Boolean flag = true;
        try {
            String schemeVersion = job.getSchemeVersion();
            //获取任务相关数据集
            List<DtoJobDataset> list = datacollectDao.getDatacollectDataset(jobId);
            logger.info("获取任务相关数据集,数量" + list.size() + "。");
            if (list != null && list.size() > 0) {
                count = list.size();
                logStr.append("/*********** 开始采集 *******************/\n");
                //遍历数据集
                for (DtoJobDataset ds : list) {
                    try {
                        String type = ds.getType();
                        String message = "";
                        logStr.append(DateConvert.toString(new Date(), dateFormat) + " " + ds.getJobDatasetName());
                        if (type != null) {
                            if (type.equals("1")) //Web Service
                            {
                                message = collectWebservice(ds, schemeVersion, logId) + "\n";
                            } else if (type.equals("2"))//文件系统
                            {
                                message = "文件系统采集。\n";
                            } else { //数据库
                                message = collectTable(ds, schemeVersion, logId) + "\n";
                            }
                        } else {
                            message = ds.getJobDatasetName() + "未关联数据源!\n";
                        }
                        logger.info(message); //文本日志
                        logStr.append(message);
                        success++;
                    } catch (Exception ex) {
                        logger.info("异常:" + ex.getMessage());
                        logStr.append(ex.getMessage() + "\n");
                    }
                }
                logStr.append("/*********** 结束采集 *******************/\n");
            }
        } catch (Exception ex) {
            flag = false;
            ex.printStackTrace();
            logger.info("异常:" + ex.getMessage());
            logStr.append(ex.getMessage() + "\n");
            logStr.append("/*********** 出现异常,中断采集 *******************/\n");
        }
        //任务主日志成功
        String jobContent = logStr.toString().replace("\"", "\\\"");
        if (jobContent.length() > 4000) {
            jobContent = jobContent.substring(0, 4000);
        }
        log.setJobContent(jobContent);
        log.setJobEndTime(new Date());
        log.setJobDatasetCount(count);
        log.setJobDatasetSuccess(success);
        logger.info("任务结束," + count + "个数据集成功采集" + success + "个。");
        datacollectLogDao.updateEntity(log);
        return flag;
    }
    /**
     * 根据日志详细补采数据
     */
    @Transactional
    public ActionResult repeatJob(String id) throws Exception {
        RsJobLogDetail log = datacollectLogDao.getEntity(RsJobLogDetail.class, id);
        if (log.getJobStatus().equals("2")) {
            return new ActionResult(false, "数据补采中!");
        }
        if (!log.getJobStatus().equals("0")) {
            return new ActionResult(false, "数据无需补采!");
        }
        try {
            log.setRepeatStartTime(new Date());
            log.setJobStatus("2"); //设置采集中状态
            datacollectLogDao.updateEntity(log);
        } catch (Exception e) {
            return new ActionResult(false, "补采失败!");
        }
        log.setJobStatus("0");
        datacollectLogDao.updateEntity(log);
        String stdDatasetCode = log.getStdDatasetCode();
        String sql = log.getJobSql();
        //数据库连接
        String datasourceId = log.getDatasourceId();
        String config = log.getConfig();
        DBHelper db = new DBHelper(datasourceId, config);
        //获取数据集字段映射结构
        String schemeVersion = log.getSchemeVersion();
        String datasetId = log.getJobDatasetId();
        List colString = stdService.getDatacolByScheme(schemeVersion, datasetId);
        JSONArray colList = new JSONArray(colString);
        List<JSONObject> list = db.query(sql);
        String message = intoMongodb(list, schemeVersion, stdDatasetCode, colList);
        if (message.length() > 0 || db.errorMessage.length() > 0) {
            log.setJobStatus("0");
            log.setRepeatEndTime(new Date());
            if (message.length() > 0) {
                log.setRepeatJobContent(message);
            } else {
                db.errorMessage.length();
            }
            datacollectLogDao.updateEntity(log);
            return new ActionResult(false, "补采失败!");
        } else {
            log.setJobStatus("3");
            log.setRepeatEndTime(new Date());
            log.setRepeatJobContent("补采成功!");
            datacollectLogDao.updateEntity(log);
            return new ActionResult(true, "补采成功!");
        }
    }
    /**
     * 根据数据库类型获取时间sql
     *
     * @return
     */
    private String getDateSqlByDBType(DBType dbType, Date date) throws Exception {
        String val = DateConvert.toString(date, dateFormat);
        if (dbType.equals(DBType.Mysql)) {
            return "date_format(\'" + val + "\',\'" + dateFormat + "\')";
        } else if (dbType.equals(DBType.Oracle)) {
            return "to_date(\'" + val + "\',\'" + dateFormat + "\')";
        } else {
            return val;
        }
    }
    /**
     * 根据数据库类型获取转换数值型sql
     */
    private String getToNumberSqlByDBType(DBType dbType, String key) throws Exception {
        if (dbType.equals(DBType.Mysql)) {
            return "cast(" + key + " as signed integer)";
        } else if (dbType.equals(DBType.Oracle)) {
            return "to_number(" + key + ")";
        } else {
            return key;
        }
    }
    /**
     * 根据数据库类型获取分页sql
     *
     * @return
     */
    private String getPageSqlByDBType(DBType dbType, String sql, int start, int rows) throws Exception {
        if (dbType.equals(DBType.Mysql)) {
            return sql + " LIMIT " + start + "," + rows;
        } else if (dbType.equals(DBType.Oracle)) {
            return " select * from (select t.*,ROWNUM RSCOM_RN from (" + sql + ") t where ROWNUM<" + (start + rows + 1) + ") where RSCOM_RN>= " + (start + 1);
        } else {
            return sql;
        }
    }
    /**
     * 字典全转换成中文
     */
    private List<JSONObject> translateDictCN(List<JSONObject> list, JSONArray colList, String schemeVersion) throws Exception {
        //获取字典列表
        List<DtoDictCol> dictColList = new ArrayList<>();
        for (int i = 0; i < colList.length(); i++) {
            JSONObject col = colList.getJSONObject(i);
            String dictId = col.optString("adapterDictId");
            if (dictId != null && dictId.length() > 0) {
                String dictType = col.optString("adapterDataType");
                String stdMetadataCode = col.optString("stdMetadataCode");
                DtoDictCol dictCol = new DtoDictCol();
                dictCol.setStdMetadataCode(stdMetadataCode);
                dictCol.setStdDictId(dictId);
                dictCol.setAdapterDataType(dictType.length() > 0 ? dictType : "1");//默认通过code转换字典
                //获取字典数据
                List dictString = stdService.getDictByScheme(schemeVersion, dictId);
                JSONArray dictAdapterArray = new JSONArray(dictString);
                dictCol.setDictList(dictAdapterArray);
                dictColList.add(dictCol);
            }
        }
        //翻译列表
        for (JSONObject data : list) {
            //遍历字典字段
            for (DtoDictCol col : dictColList) {
                String colNmae = col.getStdMetadataCode();
                String oldValue = data.optString(colNmae);
                String newValue = translateDictValueCN(oldValue, col.getAdapterDataType(), col.getDictList());
                if (newValue != null && newValue.length() > 0) {
                    data.put(colNmae, newValue);
                }
            }
        }
        return list;
    }
    /**
     * 转译字典成中文
     *
     * @return
     */
    private String translateDictValueCN(String oldValue, String type, JSONArray dictAdapterList) throws Exception {
        if (type.equals("0")) //原本就是值
        {
            return oldValue;
        }
        //遍历字典数据(编码->名称)
        for (int i = 0; i < dictAdapterList.length(); i++) {
            JSONObject dictItem = dictAdapterList.getJSONObject(i);
            if (oldValue != null && dictItem.has("stdEntryCode")) {
                if (oldValue.equals(dictItem.getString("stdEntryCode"))) {
                    String newValue = dictItem.getString("stdEntryValue"); //名称
                    return newValue;
                }
            }
        }
        return oldValue;
    }
    /**
     * 字典转换
     *
     * @param list
     * @param colList
     * @return
     * @throws Exception
     */
    private List<JSONObject> translateDict(List<JSONObject> list, JSONArray colList, String schemeVersion) throws Exception {
        //获取字典列表
        List<DtoDictCol> dictColList = new ArrayList<>();
        for (int i = 0; i < colList.length(); i++) {
            JSONObject col = colList.getJSONObject(i);
            String dictId = col.optString("adapterDictId");
            if (dictId != null && dictId.length() > 0) {
                String dictType = col.optString("adapterDataType");
                String stdMetadataCode = col.optString("stdMetadataCode");
                DtoDictCol dictCol = new DtoDictCol();
                dictCol.setStdMetadataCode(stdMetadataCode);
                dictCol.setStdDictId(dictId);
                dictCol.setAdapterDataType(dictType.length() > 0 ? dictType : "1");//默认通过code转换字典
                //获取字典数据
                List dictString = stdService.getDictByScheme(schemeVersion, dictId);
                JSONArray dictAdapterArray = new JSONArray(dictString);
                dictCol.setDictList(dictAdapterArray);
                dictColList.add(dictCol);
            }
        }
        //翻译列表
        for (JSONObject data : list) {
            //遍历字典字段
            for (DtoDictCol col : dictColList) {
                String colNmae = col.getStdMetadataCode();
                String oldValue = data.optString(colNmae);
                String newValue = translateDictValue(oldValue, col.getAdapterDataType(), col.getDictList());
                if (newValue != null && newValue.length() > 0) {
                    data.put(colNmae, newValue);
                }
            }
        }
        return list;
    }
    /**
     * 转译字典
     *
     * @return
     */
    private String translateDictValue(String oldValue, String type, JSONArray dictAdapterList) throws Exception {
        //应用标准字段
        String colName = "adapterEntryCode";
        if (type.equals("0")) //通过name转译
        {
            colName = "adapterEntryValue";
        }
        //遍历字典数据
        for (int i = 0; i < dictAdapterList.length(); i++) {
            JSONObject dictItem = dictAdapterList.getJSONObject(i);
            if (oldValue != null && dictItem.has(colName)) {
                if (oldValue.equals(dictItem.getString(colName))) {
                    String newValue = dictItem.getString("stdEntryCode");
                    return newValue;
                }
            }
        }
        //找不到适配字典数据则返回空
        return "";
    }
    /**
     * 获取过滤条件
     *
     * @return
     */
    private String getCondition(DBType dbType, String conditionString) {
        JSONArray array = new JSONArray(conditionString);
        if (array != null && array.length() > 0) {
            List<QueryCondition> conditions = new ArrayList<>();
            for (Object item : array) {
                JSONObject obj = (JSONObject) item;
                String logical = obj.getString("andOr");
                String operation = obj.getString("condition");
                String field = obj.getString("field");
                String keyword = obj.getString("value");
                conditions.add(new QueryCondition(logical, operation, field, keyword));
            }
            //条件语句转换
            ParserSql ps;
            switch (dbType) {
                case Oracle:
                    ps = new ParserOracle();
                    break;
                case Sqlserver:
                    ps = new ParserSqlserver();
                    break;
                default:
                    ps = new ParserMysql();
            }
            return ps.getConditionSql(conditions);
        }
        return "";
    }
    /**
     * 获取条件SQL
     *
     * @param dbType
     * @param conditionString
     * @return
     * @throws ParseException
     */
    private String getConditionSql(DBType dbType, String conditionString) throws ParseException {
        String conditionSql = "";
        JSONArray conditions = new JSONArray(conditionString);
        Iterator iterator = conditions.iterator();
        while (iterator.hasNext()) {
            JSONObject condition = (JSONObject) iterator.next();
            String logic = condition.getString("condition");
            String andOr = condition.getString("andOr");
            String field = condition.getString("field");
            String value = condition.getString("value");
            String fieldType = condition.getString("type");
            String keys = "";
            if (andOr.equals(" AND ")) {
                conditionSql = conditionSql + " and ";
            } else {
                conditionSql = conditionSql + " or ";
            }
            if (logic.equals(" IN ") || logic.equals(" NOT IN ")) {
                String[] keywords = value.split(",");
                for (String key : keywords) {
                    keys += "'" + key + "',";
                }
                keys = " (" + keys.substring(0, keys.length() - 1) + ") ";
            } else if (logic.equals(" LIKE ")) {
                keys += " '%" + value + "%' ";
            } else {
                if (fieldType.equals("DATE")) {
                    keys += getDateFormatSql(dbType, value);
                } else {
                    keys += " '" + value + "' ";
                }
            }
            conditionSql += field + logic + keys;
        }
        return conditionSql;
    }
    /**
     * 获取对应数据库时间格式
     *
     * @param dbType
     * @param key
     * @return
     * @throws ParseException
     */
    private String getDateFormatSql(DBType dbType, String key) throws ParseException {
        String dateFormat = "yyyy-MM-dd HH:mm:ss";
        SimpleDateFormat formatDate = new SimpleDateFormat("yyyy-MM-dd");
        Date d = formatDate.parse(key);
        SimpleDateFormat format = new SimpleDateFormat(dateFormat);
        switch (dbType) {
            case Oracle:
                key = "to_date(\'" + format.format(d) + "\',\'YYYY-MM-DD HH24:MI:SS\')";
                break;
            case Sqlserver:
                break;
            default:
                key = "date_format(\'" + format.format(d) + "\',\'%y-%m-%d %T\')";
        }
        return key;
    }
    /**
     * 采集入库
     *
     * @return
     */
    private String intoMongodb(List<JSONObject> list, String schemeVersion, String stdDatasetCode, JSONArray colList) {
        String patientIdCode = SqlConstants.PATIENT_ID.toUpperCase();
        String eventNoCode = SqlConstants.EVENT_NO.toUpperCase();
        PatientIdentity patientIdentity = SysConfig.getInstance().getPatientIdentity(stdDatasetCode);
        if (patientIdentity != null) {
            patientIdCode = patientIdentity.getPatientIDCode();
            eventNoCode = patientIdentity.getEventNoCode();
        }
        try {
            if (!mongo.createIndex(stdDatasetCode, "patientIndex", patientIdCode, eventNoCode)) {
                return "Mongodb索引创建失败!(表:" + stdDatasetCode + ")";
            }
            if (list != null && list.size() > 0) {
                //字典未转换前采集到原始库
                boolean b = mongoOrigin.insert(stdDatasetCode, translateDictCN(list, colList, schemeVersion));
                //字典转换
                list = translateDict(list, colList, schemeVersion);
                //采集到mongodb
                b = mongo.insert(stdDatasetCode, list);
                if (!b) {
                    if (mongo.errorMessage != null && mongo.errorMessage.length() > 0) {
                        logger.debug(mongo.errorMessage);
                        return mongo.errorMessage;
                    } else {
                        return "Mongodb保存失败!(表:" + stdDatasetCode + ")";
                    }
                }
            }
        } catch (Exception e) {
            return e.getMessage();
        }
        return "";
    }
    /**
     * 数据库表采集
     *
     * @return
     */
    private String collectTable(DtoJobDataset ds, String schemeVersion, String logId) throws Exception {
        String message = "";
        String datasetId = ds.getJobDatasetId();
        String jobDatasetName = ds.getJobDatasetName();
        String condition = ds.getJobDatasetCondition();
        String key = ds.getJobDatasetKey();
        String keytype = ds.getJobDatasetKeytype();
        String keyvalue = ds.getJobDatasetKeyvalue();
        String orgCode = ds.getOrgCode();
        String datasourceId = ds.getDatasourceId();
        String config = ds.getConfig(); //数据库连接
        DBHelper db = new DBHelper(datasourceId, config);
        DBType dbType = db.dbType;
        //获取数据集映射
        List datasetString = stdService.getDatasetByScheme(schemeVersion, datasetId);
        JSONArray datasetList = new JSONArray(datasetString);
        if (datasetList != null && datasetList.length() > 0) {
            String stdTableName = datasetList.getJSONObject(0).optString("stdDatasetCode");
            String adapterTableName = datasetList.getJSONObject(0).optString("adapterDatasetCode");
            //获取数据集字段映射结构
            List colString = stdService.getDatacolByScheme(schemeVersion, datasetId);
            JSONArray colList = new JSONArray(colString);
            if (colList != null && colList.length() > 0) {
                //拼接查询sql
                String strSql = "Select '" + orgCode + "' as RSCOM_ORG_CODE";
                for (int i = 0; i < colList.length(); i++) {
                    JSONObject col = colList.getJSONObject(i);
                    String adapterMetadataCode = col.optString("adapterMetadataCode");
                    if (adapterMetadataCode.length() > 0) {
                        strSql += "," + adapterMetadataCode + " as " + col.optString("stdMetadataCode");
                    }
                }
                strSql += " from " + adapterTableName;
                String strWhere = " where 1=1";
                //采集范围
                if (condition != null && condition.length() > 0) {
                    strWhere += getConditionSql(dbType, condition);
                }
                //增量采集
                String maxKey = "0";
                if (key != null && key.length() > 0) {
                    maxKey = key;
                    if (keytype.toUpperCase().equals("DATE")) //时间类型
                    {
                        if (keyvalue != null && keyvalue.length() > 0) {
                            Date keyDate = new Date();
                            //字符串转时间
                            keyDate = DateConvert.toDate(keyvalue);
                            //根据数据库类型获取时间sql
                            strWhere += " and " + maxKey + ">'" + getDateSqlByDBType(dbType, keyDate) + "'";
                        }
                    } else if (keytype.toUpperCase().equals("VARCHAR")) //字符串类型
                    {
                        maxKey = getToNumberSqlByDBType(dbType, key);
                        if (keyvalue != null && keyvalue.length() > 0) {
                            strWhere += " and " + maxKey + ">'" + keyvalue + "'";
                        }
                    } else {
                        if (keyvalue != null && keyvalue.length() > 0) {
                            strWhere += " and " + maxKey + ">'" + keyvalue + "'";
                        }
                    }
                    strWhere += " order by " + maxKey;
                }
                strSql += strWhere;
                //总条数
                String sqlCount = "select count(1) as COUNT from (" + strSql + ")";
                String sqlMax = "select max(" + maxKey + ") as MAX_KEYVALUE from " + adapterTableName + strWhere;
                JSONObject objCount = db.load(sqlCount);
                if (objCount == null) {
                    if (db.errorMessage.length() > 0) {
                        throw new Exception(db.errorMessage);
                    } else {
                        throw new Exception("查询异常:" + sqlCount);
                    }
                } else {
                    int count = objCount.getInt("COUNT");
                    if (count == 0) //0条记录,无需采集
                    {
                        message = "0条记录,无需采集。";
                    } else {
                        //获取最大值
                        JSONObject objMax = db.load(sqlMax);
                        int successCount = 0;
                        String maxKeyvalue = objMax.optString("MAX_KEYVALUE");
                        //修改最大值
                        if (maxKeyvalue != null && maxKeyvalue.length() > 0) {
                            datacollectLogDao.updateJobDatasetKeyvalue(ds.getId(), maxKeyvalue);
                            logger.info("修改任务数据集最大值为" + maxKeyvalue + "。"); //文本日志
                        }
                        int countPage = 1;
                        if (count > maxNum) //分页采集
                        {
                            countPage = count / maxNum + 1;
                        }
                        for (int i = 0; i < countPage; i++) {
                            int rows = maxNum;
                            if (i + 1 == countPage) {
                                rows = count - i * maxNum;
                            }
                            String sql = getPageSqlByDBType(dbType, strSql, i * maxNum, rows); //获取分页sql语句
                            RsJobLogDetail detail = new RsJobLogDetail();
                            detail.setStartTime(new Date());
                            detail.setJobLogId(logId);
                            detail.setDatasourceId(datasourceId);
                            detail.setConfig(config);
                            detail.setStdDatasetCode(stdTableName);
                            detail.setJobDatasetId(datasetId);
                            detail.setJobDatasetName(ds.getJobDatasetName());
                            detail.setJobId(ds.getJobId());
                            detail.setJobSql(sql);
                            detail.setJobNum(i + 1);
                            detail.setJobDatasetRows(rows);
                            detail.setSchemeVersion(schemeVersion);
                            List<JSONObject> list = db.query(sql);
                            String msg = "";
                            if (list != null) {
                                msg = intoMongodb(list, schemeVersion, stdTableName, colList); //返回信息
                            } else {
                                if (db.errorMessage.length() > 0) {
                                    msg = db.errorMessage;
                                } else {
                                    msg = "查询数据为空!";
                                }
                            }
                            if (msg.length() > 0) {
                                //任务日志细表异常操作
                                detail.setJobStatus("0");
                                detail.setJobContent(msg);
                                logger.info(msg); //文本日志
                            } else {
                                detail.setJobStatus("1");
                                detail.setJobContent("采集成功!");
                                successCount += rows;
                            }
                            detail.setEndTime(new Date());
                            datacollectLogDao.saveEntity(detail);
                        }
                        message = jobDatasetName + "采集成功" + successCount + "条数据,总条数" + count + "条。";
                    }
                }
            } else {
                throw new Exception(jobDatasetName + "数据集字段映射为空!");
            }
        } else {
            throw new Exception(jobDatasetName + "数据集映射为空!");
        }
        logger.info(message);
        return message;
    }
    /**
     * XML转JSONList
     *
     * @return
     */
    private List<JSONObject> getListFromXml(String xml) throws Exception {
        SAXReader reader = new SAXReader();
        Document doc = reader.read(new ByteArrayInputStream(xml.getBytes("UTF-8")));
        Element root = doc.getRootElement();
        List<JSONObject> re = new ArrayList<>();
        //xml数据列表
        Iterator iter = root.elementIterator("Data");
        while (iter.hasNext()) {
            JSONObject obj = new JSONObject();
            Element el = (Element) iter.next();
            Iterator cols = el.elementIterator();
            while (cols.hasNext()) {
                Element col = (Element) cols.next();
                obj.put(col.getName().toUpperCase(), col.getStringValue());
            }
            re.add(obj);
        }
        return re;
    }
    /**
     * webservice采集
     *
     * @return
     */
    private String collectWebservice(DtoJobDataset ds, String schemeVersion, String logId) throws Exception {
        String message = "";
        String datasetId = ds.getJobDatasetId();
        String jobDatasetName = ds.getJobDatasetName();
        String condition = ds.getJobDatasetCondition();
        String key = ds.getJobDatasetKey();
        String keytype = ds.getJobDatasetKeytype();
        String keyvalue = ds.getJobDatasetKeyvalue();
        String orgCode = ds.getOrgCode();
        String datasourceId = ds.getDatasourceId();
        String config = ds.getConfig(); //数据库连接
        DBType dbType = DBType.Oracle;//********** 先定死Oracle ****************************
        //webservice地址
        Map<String, String> mapConfig = objectMapper.readValue(config, Map.class);
        if (mapConfig.containsKey("protocol") && mapConfig.containsKey("url")) {
            String url = mapConfig.get("protocol") + "://" + mapConfig.get("url");
            //获取数据集映射
            List datasetString = stdService.getDatasetByScheme(schemeVersion, datasetId);
            JSONArray datasetList = new JSONArray(datasetString);
            if (datasetList != null && datasetList.length() > 0) {
                String stdTableName = datasetList.getJSONObject(0).optString("stdDatasetCode");
                String adapterTableName = datasetList.getJSONObject(0).optString("adapterDatasetCode");
                //获取数据集字段映射结构
                List colString = stdService.getDatacolByScheme(schemeVersion, datasetId);
                JSONArray colList = new JSONArray(colString);
                if (colList != null && colList.length() > 0) {
                    //拼接查询sql
                    String strSql = "Select '" + orgCode + "' as RSCOM_ORG_CODE";
                    for (int i = 0; i < colList.length(); i++) {
                        JSONObject col = colList.getJSONObject(i);
                        String adapterMetadataCode = col.optString("adapterMetadataCode");
                        if (adapterMetadataCode.length() > 0) {
                            strSql += "," + adapterMetadataCode + " as " + col.optString("stdMetadataCode");
                        }
                    }
                    strSql += " from " + adapterTableName;
                    String strWhere = " where 1=1";
                    //采集范围
                    if (condition != null && condition.length() > 0) {
                        strWhere += getConditionSql(dbType, condition);
                    }
                    //增量采集
                    String maxKey = "0";
                    String keyValue = ds.getJobDatasetKeyvalue();
                    if (key != null && key.length() > 0) {
                        maxKey = key;
                        if (keytype.toUpperCase().equals("DATE")) //时间类型
                        {
                            Date keyDate = new Date();
                            if (keyvalue != null && keyvalue.length() > 0) {
                                //字符串转时间
                                keyDate = DateConvert.toDate(keyvalue);
                                //根据数据库类型获取时间sql
                                strWhere += " and " + key + ">'" + getDateSqlByDBType(dbType, keyDate) + "'";
                                strWhere += " order by " + key;
                            }
                        } else if (keytype.toUpperCase().equals("VARCHAR")) //字符串类型
                        {
                            maxKey = getToNumberSqlByDBType(dbType, key);
                            if (keyvalue != null && keyvalue.length() > 0) {
                                strWhere += " and " + maxKey + ">'" + keyvalue + "'";
                                strWhere += " order by " + maxKey;
                            }
                        } else {
                            if (keyvalue != null && keyvalue.length() > 0) {
                                strWhere += " and " + key + ">'" + keyvalue + "'";
                                strWhere += " order by " + key;
                            }
                        }
                    }
                    strSql += strWhere;
                    //总条数和最大值查询
                    String sqlCount = "select count(1) as COUNT from (" + strSql + ")";
                    String sqlMax = "select max(" + maxKey + ") as MAX_KEYVALUE from " + adapterTableName + strWhere;
                    //webservice获取数据总条数
                    String strCount = "";//WebserviceUtil.request(url, "ExcuteSQL", new Object[]{"", sqlCount});
                    List<JSONObject> dataCount = getListFromXml(strCount);
                    if (dataCount != null && dataCount.size() > 0) {
                        Integer count = Integer.parseInt(dataCount.get(0).getString("COUNT"));
                        if (count == 0) //0条记录,无需采集
                        {
                            message = "0条记录,无需采集。";
                        } else {
                            //webservice获取最大值
                            String strMax = ""; //WebserviceUtil.request(url, "ExcuteSQL", new Object[]{"", sqlMax});
                            List<JSONObject> dataMax = getListFromXml(strCount);
                            int successCount = 0;
                            String maxKeyvalue = dataMax.get(0).getString("MAX_KEYVALUE");
                            //修改最大值
                            if (maxKeyvalue != null && maxKeyvalue.length() > 0) {
                                datacollectLogDao.updateJobDatasetKeyvalue(ds.getId(), maxKeyvalue);
                                logger.info("修改任务数据集最大值为" + maxKeyvalue + "。"); //文本日志
                            }
                            int countPage = 1;
                            if (count > maxNum) //分页采集
                            {
                                countPage = count / maxNum + 1;
                            }
                            for (int i = 0; i < countPage; i++) {
                                int rows = maxNum;
                                if (i + 1 == countPage) {
                                    rows = count - i * maxNum;
                                }
                                String sql = getPageSqlByDBType(dbType, strSql, i * maxNum, rows); //获取分页sql语句
                                RsJobLogDetail detail = new RsJobLogDetail();
                                detail.setStartTime(new Date());
                                detail.setJobLogId(logId);
                                detail.setDatasourceId(datasourceId);
                                detail.setConfig(config);
                                detail.setStdDatasetCode(stdTableName);
                                detail.setJobDatasetId(datasetId);
                                detail.setJobDatasetName(ds.getJobDatasetName());
                                detail.setJobId(ds.getJobId());
                                detail.setJobSql(sql);
                                detail.setJobNum(i + 1);
                                detail.setJobDatasetRows(rows);
                                detail.setSchemeVersion(schemeVersion);
                                String msg = "";
                                try {
                                    //获取分页数据
                                    String strList = ""; //WebserviceUtil.request(url, "ExcuteSQL", new Object[]{"", sql});
                                    List<JSONObject> list = getListFromXml(strList);
                                    if (list != null) {
                                        msg = intoMongodb(list, schemeVersion, stdTableName, colList); //返回信息
                                    } else {
                                        msg = "查询数据为空!";
                                    }
                                    if (msg.length() > 0) {
                                        //任务日志细表异常操作
                                        detail.setJobStatus("0");
                                        detail.setJobContent(msg);
                                        logger.info(msg); //文本日志
                                    } else {
                                        detail.setJobStatus("1");
                                        detail.setJobContent("采集成功!");
                                        successCount += rows;
                                    }
                                } catch (Exception ex) {
                                    msg = ex.getMessage();
                                }
                                detail.setEndTime(new Date());
                                datacollectLogDao.saveEntity(detail);
                            }
                            message = jobDatasetName + "采集成功" + successCount + "条数据,总条数" + count + "条。";
                        }
                    }
                } else {
                    throw new Exception(jobDatasetName + "数据集字段映射为空!");
                }
            } else {
                throw new Exception(jobDatasetName + "数据集映射为空!");
            }
        } else {
            throw new Exception("非法webservice路径!");
        }
        logger.info(message);
        return message;
    }
    /**
     * 采集入库(包含blob字段处理)
     *
     * @return
     */
    private String intoMongodb2(List<JSONObject> list, String schemeVersion, String stdDatasetCode, JSONArray colList) {
        String patientIdCode = SqlConstants.PATIENT_ID.toUpperCase();
        String eventNoCode = SqlConstants.EVENT_NO.toUpperCase();
        PatientIdentity patientIdentity = SysConfig.getInstance().getPatientIdentity(stdDatasetCode);
        if (patientIdentity != null) {
            patientIdCode = patientIdentity.getPatientIDCode();
            eventNoCode = patientIdentity.getEventNoCode();
        }
        try {
            if (!mongo.createIndex(stdDatasetCode, "patientIndex", patientIdCode, eventNoCode)) {
                return "Mongodb索引创建失败!(表:" + stdDatasetCode + ")";
            }
            if (list != null && list.size() > 0) {
                //TODO TOSET 判断是否是非结构化数据集
                if ("unstructured".equals(stdDatasetCode)) {
                    for (JSONObject jsonObject : list) {
                        //文件内容保存到GridFS,细表内容字段保存为文件objctId
                        Blob blob = (Blob) jsonObject.get("CONTENT");
                        String type = (String) jsonObject.get("FILE_TYPE");
                        String patientId = (String) jsonObject.get("patient_id");
                        String eventNo = (String) jsonObject.get("event_no");
                        Map<String, Object> params = new HashMap<>();
                        params.put("patient_id", patientId);
                        params.put("event_no", eventNo);
                        try {
                            ObjectId objectId = GridFSUtil.uploadFile("files", blob, type, params);
                            jsonObject.put("CONTENT", objectId);
                        } catch (Exception e) {
                            e.printStackTrace();
                        }
                    }
                }
                //字典未转换前采集到原始库
                boolean b = mongoOrigin.insert(stdDatasetCode, translateDictCN(list, colList, schemeVersion));
                //字典转换
                list = translateDict(list, colList, schemeVersion);
                //采集到mongodb
                b = mongo.insert(stdDatasetCode, list);
                if (!b) {
                    if (mongo.errorMessage != null && mongo.errorMessage.length() > 0) {
                        System.out.print(mongo.errorMessage);
                        return mongo.errorMessage;
                    } else {
                        return "Mongodb保存失败!(表:" + stdDatasetCode + ")";
                    }
                }
            }
        } catch (Exception e) {
            return e.getMessage();
        }
        return "";
    }
    /**
     * 数据库采集(包含Blob类型数据)
     *
     * @param ds
     * @param schemeVersion
     * @param logId
     * @return
     * @throws Exception
     */
    private String collectBlobTable(DtoJobDataset ds, String schemeVersion, String logId) throws Exception {
        String message = "";
        String datasetId = ds.getJobDatasetId();
        String jobDatasetName = ds.getJobDatasetName();
        String condition = ds.getJobDatasetCondition();
        String key = ds.getJobDatasetKey();
        String keytype = ds.getJobDatasetKeytype();
        String keyvalue = ds.getJobDatasetKeyvalue();
        String orgCode = ds.getOrgCode();
        String datasourceId = ds.getDatasourceId();
        String config = ds.getConfig(); //数据库连接
        DBHelper db = new DBHelper(datasourceId, config);
        DBType dbType = db.dbType;
        //获取数据集映射
        List datasetString = stdService.getDatasetByScheme(schemeVersion, datasetId);
        JSONArray datasetList = new JSONArray(datasetString);
        if (datasetList != null && datasetList.length() > 0) {
            String stdTableName = datasetList.getJSONObject(0).optString("stdDatasetCode");
            String adapterTableName = datasetList.getJSONObject(0).optString("adapterDatasetCode");
            //获取数据集字段映射结构
            List colString = stdService.getDatacolByScheme(schemeVersion, datasetId);
            JSONArray colList = new JSONArray(colString);
            if (colList != null && colList.length() > 0) {
                //拼接查询sql
                String strSql = "Select '" + orgCode + "' as RSCOM_ORG_CODE";
                for (int i = 0; i < colList.length(); i++) {
                    JSONObject col = colList.getJSONObject(i);
                    String adapterMetadataCode = col.optString("adapterMetadataCode");
                    if (adapterMetadataCode.length() > 0) {
                        strSql += "," + adapterMetadataCode + " as " + col.optString("stdMetadataCode");
                    }
                }
                strSql += " from " + adapterTableName;
                String strWhere = " where 1=1";
                //采集范围
                if (condition != null && condition.length() > 0) {
                    strWhere += getConditionSql(dbType, condition);
                }
                //增量采集
                String maxKey = "0";
                if (key != null && key.length() > 0) {
                    maxKey = key;
                    if (keytype.toUpperCase().equals("DATE")) //时间类型
                    {
                        if (keyvalue != null && keyvalue.length() > 0) {
                            Date keyDate = new Date();
                            //字符串转时间
                            keyDate = DateConvert.toDate(keyvalue);
                            //根据数据库类型获取时间sql
                            strWhere += " and " + maxKey + ">'" + getDateSqlByDBType(dbType, keyDate) + "'";
                        }
                    } else if (keytype.toUpperCase().equals("VARCHAR")) //字符串类型
                    {
                        maxKey = getToNumberSqlByDBType(dbType, key);
                        if (keyvalue != null && keyvalue.length() > 0) {
                            strWhere += " and " + maxKey + ">'" + keyvalue + "'";
                        }
                    } else {
                        if (keyvalue != null && keyvalue.length() > 0) {
                            strWhere += " and " + maxKey + ">'" + keyvalue + "'";
                        }
                    }
                    strWhere += " order by " + maxKey;
                }
                strSql += strWhere;
                //总条数
                String sqlCount = "select count(1) as COUNT from (" + strSql + ")";
                String sqlMax = "select max(" + maxKey + ") as MAX_KEYVALUE from " + adapterTableName + strWhere;
                JSONObject objCount = db.load(sqlCount);
                if (objCount == null) {
                    if (db.errorMessage.length() > 0) {
                        throw new Exception(db.errorMessage);
                    } else {
                        throw new Exception("查询异常:" + sqlCount);
                    }
                } else {
                    int count = objCount.getInt("COUNT");
                    if (count == 0) //0条记录,无需采集
                    {
                        message = "0条记录,无需采集。";
                    } else {
                        //获取最大值
                        JSONObject objMax = db.load(sqlMax);
                        int successCount = 0;
                        String maxKeyvalue = objMax.optString("MAX_KEYVALUE");
                        //修改最大值
                        if (maxKeyvalue != null && maxKeyvalue.length() > 0) {
                            datacollectLogDao.updateJobDatasetKeyvalue(ds.getId(), maxKeyvalue);
                            logger.info("修改任务数据集最大值为" + maxKeyvalue + "。"); //文本日志
                        }
                        int countPage = 1;
                        if (count > maxNum) //分页采集
                        {
                            countPage = count / maxNum + 1;
                        }
                        for (int i = 0; i < countPage; i++) {
                            int rows = maxNum;
                            if (i + 1 == countPage) {
                                rows = count - i * maxNum;
                            }
                            String sql = getPageSqlByDBType(dbType, strSql, i * maxNum, rows); //获取分页sql语句
                            RsJobLogDetail detail = new RsJobLogDetail();
                            detail.setStartTime(new Date());
                            detail.setJobLogId(logId);
                            detail.setDatasourceId(datasourceId);
                            detail.setConfig(config);
                            detail.setStdDatasetCode(stdTableName);
                            detail.setJobDatasetId(datasetId);
                            detail.setJobDatasetName(ds.getJobDatasetName());
                            detail.setJobId(ds.getJobId());
                            detail.setJobSql(sql);
                            detail.setJobNum(i + 1);
                            detail.setJobDatasetRows(rows);
                            detail.setSchemeVersion(schemeVersion);
                            List<JSONObject> list = db.query(sql);
                            String msg = "";
                            if (list != null) {
                                msg = intoMongodb2(list, schemeVersion, stdTableName, colList); //返回信息
                            } else {
                                if (db.errorMessage.length() > 0) {
                                    msg = db.errorMessage;
                                } else {
                                    msg = "查询数据为空!";
                                }
                            }
                            if (msg.length() > 0) {
                                //任务日志细表异常操作
                                detail.setJobStatus("0");
                                detail.setJobContent(msg);
                            } else {
                                detail.setJobStatus("1");
                                detail.setJobContent("采集成功!");
                                successCount += rows;
                            }
                            detail.setEndTime(new Date());
                            datacollectLogDao.saveEntity(detail);
                        }
                        message = jobDatasetName + "采集成功" + successCount + "条数据,总条数" + count + "条。";
                    }
                }
            } else {
                throw new Exception(jobDatasetName + "数据集字段映射为空!");
            }
        } else {
            throw new Exception(jobDatasetName + "数据集映射为空!");
        }
        return message;
    }
}

+ 1 - 1
hos-rest/src/main/java/com/yihu/hos/rest/services/standard/StdService.java

@ -29,7 +29,7 @@ import java.util.Map;
/**
 * Created by hzp on 2016/1/20.
 */
@Service
@Service("StdService")
public class StdService {
    @Resource(name = StdDatasetService.BEAN_ID)

+ 0 - 1
hos-rest/src/main/java/com/yihu/hos/rest/services/standard/adapter/AdapterDatasetService.java

@ -27,7 +27,6 @@ import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Transactional;
import java.util.*;
@Transactional
@Service("AdapterDatasetService")
public class AdapterDatasetService extends SQLGeneralDAO {

+ 85 - 0
hos-rest/src/main/resources/hibernate/RsJobConfig.hbm.xml

@ -0,0 +1,85 @@
<?xml version="1.0" encoding="utf-8"?>
<!DOCTYPE hibernate-mapping PUBLIC "-//Hibernate/Hibernate Mapping DTD 3.0//EN"
"http://hibernate.sourceforge.net/hibernate-mapping-3.0.dtd">
<!-- 
    Mapping file autogenerated by MyEclipse Persistence Tools
-->
<hibernate-mapping>
    <class name="com.yihu.hos.rest.models.rs.RsJobConfig" table="rs_job_config" >
        <id name="id" type="java.lang.String">
            <column name="id" length="50" />
            <generator class="uuid" />
        </id>
        <property name="jobName" type="java.lang.String">
            <column name="job_name" length="50" not-null="true">
                <comment>任务名称</comment>
            </column>
        </property>
        <property name="jobInfo" type="java.lang.String">
            <column name="job_info" length="200">
                <comment>任务描述</comment>
            </column>
        </property>
        <property name="jobType" type="java.lang.String">
            <column name="job_type" length="10">
                <comment>任务类型(0--单次执行 1--周期执行 2--监听任务)</comment>
            </column>
        </property>
        <property name="schemeId" type="java.lang.String">
            <column name="scheme_id" length="50">
                <comment>适配方案</comment>
            </column>
        </property>
        <property name="schemeVersion" type="java.lang.String">
            <column name="scheme_version" length="50">
                <comment>适配方案版本</comment>
            </column>
        </property>
        <property name="jobNextTime" type="java.util.Date">
            <column name="job_next_time">
                <comment>任务下次执行时间</comment>
            </column>
        </property>
        <property name="valid" type="java.lang.String">
            <column name="valid" length="10" not-null="true">
                <comment>是否有效</comment>
            </column>
        </property>
        <property name="jobContent" type="java.lang.String">
            <column name="job_content" length="50">
                <comment>任务内容</comment>
            </column>
        </property>
        <property name="jobContentType" type="java.lang.String">
            <column name="job_content_type" length="10">
                <comment>任务内容类型</comment>
            </column>
        </property>
        <property name="repeatStartTime" type="java.util.Date">
            <column name="repeat_Start_Time">
                <comment>采集开始时间</comment>
            </column>
        </property>
        <property name="repeatEndTime" type="java.util.Date">
            <column name="repeat_End_Time">
                <comment>采集结束时间</comment>
            </column>
        </property>
        <property name="delayTime" type="java.lang.Integer">
            <column name="delay_Time">
                <comment>延迟时间</comment>
            </column>
        </property>
        <property name="flowId" type="java.lang.Integer">
            <column name="flow_id">
                <comment>流程ID</comment>
            </column>
        </property>
        <property name="flowTempId" type="java.lang.Integer">
            <column name="flow_temp_id">
                <comment>流程模板ID</comment>
            </column>
        </property>
    </class>
</hibernate-mapping>

+ 56 - 0
hos-rest/src/main/resources/hibernate/RsJobDataset.hbm.xml

@ -0,0 +1,56 @@
<?xml version="1.0" encoding="utf-8"?>
<!DOCTYPE hibernate-mapping PUBLIC "-//Hibernate/Hibernate Mapping DTD 3.0//EN"
"http://hibernate.sourceforge.net/hibernate-mapping-3.0.dtd">
<!-- 
    Mapping file autogenerated by MyEclipse Persistence Tools
-->
<hibernate-mapping>
    <class name="com.yihu.hos.rest.models.rs.RsJobDataset" table="rs_job_dataset" >
        <id name="id" type="java.lang.String">
            <column name="id" length="50" />
            <generator class="uuid" />
        </id>
        <property name="jobId" type="java.lang.String">
            <column name="job_id" length="50" not-null="true">
                <comment>任务id</comment>
            </column>
        </property>
        <property name="jobDatasetId" type="java.lang.String">
            <column name="job_dataset_id" length="50" not-null="true">
                <comment>数据集id</comment>
            </column>
        </property>
        <property name="jobDatasetName" type="java.lang.String">
            <column name="job_dataset_name" length="50">
                <comment>数据集中文名(冗余)</comment>
            </column>
        </property>
        <property name="jobDatasetKeyvalue" type="java.lang.String">
            <column name="job_dataset_keyvalue" length="50">
                <comment>数据集关联主键值(最大值)</comment>
            </column>
        </property>
        <property name="jobDatasetCondition" type="java.lang.String">
            <column name="job_dataset_condition" length="1000">
                <comment>数据集过滤条件</comment>
            </column>
        </property>
        <property name="sort" type="java.lang.Integer">
            <column name="sort">
                <comment>排序</comment>
            </column>
        </property>
        <property name="jobDatasetKey" type="java.lang.String">
            <column name="job_dataset_key" length="50">
                <comment>数据集增量字段</comment>
            </column>
        </property>
        <property name="jobDatasetKeytype" type="java.lang.String">
            <column name="job_dataset_keytype" length="50">
                <comment>数据集增量字段类型</comment>
            </column>
        </property>
    </class>
</hibernate-mapping>

+ 44 - 0
hos-rest/src/main/resources/hibernate/RsJobLog.hbm.xml

@ -0,0 +1,44 @@
<?xml version="1.0" encoding="utf-8"?>
<!DOCTYPE hibernate-mapping PUBLIC "-//Hibernate/Hibernate Mapping DTD 3.0//EN"
"http://hibernate.sourceforge.net/hibernate-mapping-3.0.dtd">
<!-- 
    Mapping file autogenerated by MyEclipse Persistence Tools
-->
<hibernate-mapping>
    <class name="com.yihu.hos.rest.models.rs.RsJobLog" table="rs_job_log" >
        <id name="id" type="java.lang.String">
            <column name="id" length="50" />
            <generator class="uuid" />
        </id>
        <property name="jobStartTime" type="java.util.Date">
            <column name="job_start_time" >
                <comment>任务开始执行时间</comment>
            </column>
        </property>
        <property name="jobEndTime" type="java.util.Date">
            <column name="job_end_time" >
                <comment>任务结束执行时间</comment>
            </column>
        </property>
        <property name="jobId" type="java.lang.String">
            <column name="job_id" length="50" not-null="true">
                <comment>任务id</comment>
            </column>
        </property>
        <property name="jobContent" type="java.lang.String">
            <column name="job_content" length="1000">
                <comment>任务执行情况</comment>
            </column>
        </property>
        <property name="jobDatasetCount" type="java.lang.Integer">
            <column name="Job_dataset_count">
                <comment>执行数据集总数</comment>
            </column>
        </property>
        <property name="jobDatasetSuccess" type="java.lang.Integer">
            <column name="Job_dataset_success">
                <comment>成功数据集总数</comment>
            </column>
        </property>
    </class>
</hibernate-mapping>

+ 104 - 0
hos-rest/src/main/resources/hibernate/RsJobLogDetail.hbm.xml

@ -0,0 +1,104 @@
<?xml version="1.0" encoding="utf-8"?>
<!DOCTYPE hibernate-mapping PUBLIC "-//Hibernate/Hibernate Mapping DTD 3.0//EN"
"http://hibernate.sourceforge.net/hibernate-mapping-3.0.dtd">
<!-- 
    Mapping file autogenerated by MyEclipse Persistence Tools
-->
<hibernate-mapping>
    <class name="com.yihu.hos.rest.models.rs.RsJobLogDetail" table="rs_job_log_detail" >
        <id name="id" type="java.lang.String">
            <column name="id" length="50" />
            <generator class="uuid" />
        </id>
        <property name="startTime" type="java.util.Date">
            <column name="start_time">
                <comment>开始执行时间</comment>
            </column>
        </property>
        <property name="endTime" type="java.util.Date">
            <column name="end_time">
                <comment>结束执行时间</comment>
            </column>
        </property>
        <property name="jobLogId" type="java.lang.String">
            <column name="job_log_id" length="10">
                <comment>主日志ID</comment>
            </column>
        </property>
        <property name="jobStatus" type="java.lang.String">
            <column name="job_status" length="10">
                <comment>是否执行成功</comment>
            </column>
        </property>
        <property name="jobContent" type="java.lang.String">
            <column name="job_content" length="1000">
                <comment>任务执行情况</comment>
            </column>
        </property>
        <property name="datasourceId" type="java.lang.String">
            <column name="datasource_id" length="50">
                <comment>数据源ID</comment>
            </column>
        </property>
        <property name="config" type="java.lang.String">
            <column name="config" length="200">
                <comment>数据源配置</comment>
            </column>
        </property>
        <property name="stdDatasetCode" type="java.lang.String">
            <column name="std_dataset_code" length="50">
                <comment>标准数据集名称</comment>
            </column>
        </property>
        <property name="jobDatasetId" type="java.lang.String">
            <column name="job_dataset_id" length="50">
                <comment>数据集ID</comment>
            </column>
        </property>
        <property name="jobId" type="java.lang.String">
            <column name="job_id" length="50">
                <comment>任务ID</comment>
            </column>
        </property>
        <property name="jobDatasetName" type="java.lang.String">
            <column name="job_dataset_name" length="50">
                <comment>数据集中文名</comment>
            </column>
        </property>
        <property name="jobSql" type="java.lang.String">
            <column name="job_sql" length="4000">
                <comment>采集sql语句</comment>
            </column>
        </property>
        <property name="jobNum" type="java.lang.Integer">
            <column name="job_num">
                <comment>分页采集页码</comment>
            </column>
        </property>
        <property name="repeatStartTime" type="java.util.Date">
            <column name="repeat_start_time">
                <comment>补采开始时间</comment>
            </column>
        </property>
        <property name="repeatEndTime" type="java.util.Date">
            <column name="repeat_end_time">
                <comment>补采结束时间</comment>
            </column>
        </property>
        <property name="repeatJobContent" type="java.lang.String">
            <column name="repeat_job_content" length="4000">
                <comment>补采执行情况</comment>
            </column>
        </property>
        <property name="schemeVersion" type="java.lang.String">
            <column name="scheme_version" length="50">
                <comment>标准版本号</comment>
            </column>
        </property>
        <property name="jobDatasetRows" type="java.lang.Integer">
            <column name="job_dataset_rows">
                <comment>采集行数</comment>
            </column>
        </property>
    </class>
</hibernate-mapping>

+ 3 - 3
hos-rest/src/main/resources/spring/applicationContext.xml

@ -24,9 +24,9 @@
    <bean id="dataSource" class="org.apache.commons.dbcp2.BasicDataSource" destroy-method="close">
        <property name="driverClassName" value="com.mysql.jdbc.Driver"/>
        <property name="url" value="jdbc:mysql://172.19.103.71:3306/esb?useUnicode=true&amp;characterEncoding=UTF-8"/>
        <property name="username" value="hos"/>
        <property name="password" value="hos"/>
        <property name="url" value="jdbc:mysql://192.168.1.220:3306/esb?useUnicode=true&amp;characterEncoding=UTF-8"/>
        <property name="username" value="hos2"/>
        <property name="password" value="hos2"/>
        <property name="initialSize" value="1"/>
        <property name="maxTotal" value="100"/>
        <property name="maxIdle" value="50"/>

+ 4 - 2
hos-web-framework/src/main/java/com/yihu/hos/web/framework/dao/SQLGeneralDAO.java

@ -22,6 +22,8 @@ import org.springframework.jdbc.core.BeanPropertyRowMapper;
import org.springframework.jdbc.core.JdbcTemplate;
import org.springframework.jdbc.core.RowMapper;
import org.springframework.orm.hibernate4.HibernateTemplate;
import org.springframework.stereotype.Repository;
import org.springframework.transaction.annotation.Transactional;
import javax.annotation.Resource;
import java.io.IOException;
@ -45,8 +47,8 @@ import java.util.Map;
 * @vsrsion 1.0
 * Created at 2016/8/8.
 */
//@Transactional
//@Repository("sqlGeneralDAO")
@Transactional
@Repository("sqlGeneralDAO")
public class SQLGeneralDAO implements XSQLGeneralDAO {
    static private final Logger logger = LoggerFactory.getLogger(SQLGeneralDAO.class);

+ 1 - 1
src/main/webapp/WEB-INF/ehr/jsp/common/indexJs.jsp

@ -105,7 +105,7 @@
                {id: 92, pid: 9, text: '字典管理', url: '${contextRoot}/dict/initial'},
                {id: 93, pid: 9, text: '菜单配置', url: '${contextRoot}/menu/initial'},
                {id: 94, pid: 9, text: '菜单按钮配置', url: '${contextRoot}/menu/menuAction/initial'},
                <%--{id: 95, pid: 9, text: '数据源配置', url: '${contextRoot}/datasource/configSources'},--%>
                {id: 95, pid: 9, text: '数据源配置', url: '${contextRoot}/datasource/configSources'},
            ];
            me.menuTree = $('#ulTree').ligerTree({
                data: menu,

+ 1 - 1
src/main/webapp/WEB-INF/ehr/jsp/system/datasource/configSourcesJs.jsp

@ -14,7 +14,7 @@
                url: '${contextRoot}/datasource/getDatasource',
                columns: [
                    {display:'数据源名称',name:'name', width: '25%'},
                    {display:'机构代码',name:'orgId', width: '25%', dict:true,dictName:"SYSTEM_ORGANIZATION"},
                    {display:'机构名称',name:'orgId', width: '25%', dict:true,dictName:"SYSTEM_ORGANIZATION"},
                    {display:'数据源类型',name:'type', width: '25%', dict:true,dictName:"DATASOURCE_TYPE"},
                    {display:'操作', name: 'operator', width: '25%', render: function (row) {
                        var html = '<div class="m-inline-buttons" style="width:80px;">';