wangweiqun 6 anni fa
commit
287ebddb0c
57 ha cambiato i file con 12597 aggiunte e 0 eliminazioni
  1. 1159 0
      ehr-cloud/pom.xml
  2. 606 0
      ehr-ms-parent-pom/pom.xml
  3. 154 0
      pom.xml
  4. 53 0
      src/main/java/com/yihu/ehr/SvrPackAnalyzer.java
  5. 48 0
      src/main/java/com/yihu/ehr/analyze/config/RequireDatasetsConfig.java
  6. 42 0
      src/main/java/com/yihu/ehr/analyze/config/SchedulerConfig.java
  7. 27 0
      src/main/java/com/yihu/ehr/analyze/config/TenantConfiguration.java
  8. 115 0
      src/main/java/com/yihu/ehr/analyze/controller/DailyReportEndPoint.java
  9. 83 0
      src/main/java/com/yihu/ehr/analyze/controller/PackAnalyzeEndPoint.java
  10. 412 0
      src/main/java/com/yihu/ehr/analyze/controller/PackQcReportEndPoint.java
  11. 153 0
      src/main/java/com/yihu/ehr/analyze/controller/PackStatisticsEndPoint.java
  12. 61 0
      src/main/java/com/yihu/ehr/analyze/controller/SchedulerEndPoint.java
  13. 310 0
      src/main/java/com/yihu/ehr/analyze/controller/dataQuality/DataQualityStatisticsEndPoint.java
  14. 1090 0
      src/main/java/com/yihu/ehr/analyze/controller/dataQuality/ExportEndPoint.java
  15. 165 0
      src/main/java/com/yihu/ehr/analyze/controller/dataQuality/WarningRecordEndPoint.java
  16. 595 0
      src/main/java/com/yihu/ehr/analyze/controller/dataQuality/WarningSettingEndPoint.java
  17. 42 0
      src/main/java/com/yihu/ehr/analyze/controller/qc/QcRuleCheckEndpoint.java
  18. 23 0
      src/main/java/com/yihu/ehr/analyze/dao/DqDatasetWarningDao.java
  19. 17 0
      src/main/java/com/yihu/ehr/analyze/dao/DqPaltformReceiveWarningDao.java
  20. 23 0
      src/main/java/com/yihu/ehr/analyze/dao/DqPaltformResourceWarningDao.java
  21. 17 0
      src/main/java/com/yihu/ehr/analyze/dao/DqPaltformUploadWarningDao.java
  22. 11 0
      src/main/java/com/yihu/ehr/analyze/dao/DqWarningRecordDao.java
  23. 107 0
      src/main/java/com/yihu/ehr/analyze/feign/HosAdminServiceClient.java
  24. 34 0
      src/main/java/com/yihu/ehr/analyze/feign/PackageMgrClient.java
  25. 28 0
      src/main/java/com/yihu/ehr/analyze/feign/RedisServiceClient.java
  26. 58 0
      src/main/java/com/yihu/ehr/analyze/feign/StandardServiceClient.java
  27. 65 0
      src/main/java/com/yihu/ehr/analyze/job/PackDatasetDetailsJob.java
  28. 30 0
      src/main/java/com/yihu/ehr/analyze/job/PackageAnalyzeJob.java
  29. 100 0
      src/main/java/com/yihu/ehr/analyze/job/QcDataSetDetailJob.java
  30. 26 0
      src/main/java/com/yihu/ehr/analyze/job/WarningQuestionJob.java
  31. 139 0
      src/main/java/com/yihu/ehr/analyze/model/AdapterDatasetModel.java
  32. 187 0
      src/main/java/com/yihu/ehr/analyze/model/AdapterMetadataModel.java
  33. 334 0
      src/main/java/com/yihu/ehr/analyze/model/ZipPackage.java
  34. 71 0
      src/main/java/com/yihu/ehr/analyze/service/RedisService.java
  35. 1355 0
      src/main/java/com/yihu/ehr/analyze/service/dataQuality/DataQualityStatisticsService.java
  36. 53 0
      src/main/java/com/yihu/ehr/analyze/service/dataQuality/DqDatasetWarningService.java
  37. 98 0
      src/main/java/com/yihu/ehr/analyze/service/dataQuality/DqPaltformReceiveWarningService.java
  38. 90 0
      src/main/java/com/yihu/ehr/analyze/service/dataQuality/DqPaltformResourceWarningService.java
  39. 90 0
      src/main/java/com/yihu/ehr/analyze/service/dataQuality/DqPaltformUploadWarningService.java
  40. 66 0
      src/main/java/com/yihu/ehr/analyze/service/dataQuality/WarningProblemService.java
  41. 824 0
      src/main/java/com/yihu/ehr/analyze/service/dataQuality/WarningQuestionService.java
  42. 258 0
      src/main/java/com/yihu/ehr/analyze/service/dataQuality/WarningRecordService.java
  43. 80 0
      src/main/java/com/yihu/ehr/analyze/service/dataQuality/WarningSettingService.java
  44. 859 0
      src/main/java/com/yihu/ehr/analyze/service/pack/PackQcReportService.java
  45. 988 0
      src/main/java/com/yihu/ehr/analyze/service/pack/PackStatisticsService.java
  46. 182 0
      src/main/java/com/yihu/ehr/analyze/service/pack/PackageAnalyzeService.java
  47. 25 0
      src/main/java/com/yihu/ehr/analyze/service/pack/PackageAnalyzer.java
  48. 149 0
      src/main/java/com/yihu/ehr/analyze/service/pack/StdPackageAnalyzer.java
  49. 107 0
      src/main/java/com/yihu/ehr/analyze/service/qc/DataElementValue.java
  50. 239 0
      src/main/java/com/yihu/ehr/analyze/service/qc/PackageQcService.java
  51. 255 0
      src/main/java/com/yihu/ehr/analyze/service/qc/QcRuleCheckService.java
  52. 57 0
      src/main/java/com/yihu/ehr/analyze/service/qc/StatusReportService.java
  53. 126 0
      src/main/java/com/yihu/ehr/analyze/service/scheduler/SchedulerService.java
  54. 100 0
      src/main/java/com/yihu/ehr/analyze/service/scheduler/WarningSchedulerService.java
  55. 159 0
      src/main/resources/application.yml
  56. 29 0
      src/main/resources/banner.txt
  57. 23 0
      src/main/resources/bootstrap.yml

+ 1159 - 0
ehr-cloud/pom.xml

@ -0,0 +1,1159 @@
<?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.0.0"
         xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
         xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
    <modelVersion>4.0.0</modelVersion>
    <groupId>com.yihu.ehr</groupId>
    <artifactId>ehr-cloud</artifactId>
    <version>1.2.0</version>
    <packaging>pom</packaging>
    <name>ehr-cloud</name>
    <description>EHR parent pom for all</description>
    <url>http://ehr.yihu.com</url>
    <organization>
        <name>JKZL Software, Inc.</name>
        <url>http://www.yihu.com</url>
    </organization>
    <licenses>
        <license>
            <name>Apache License, Version 2.0</name>
            <url>http://www.apache.org/licenses/LICENSE-2.0</url>
        </license>
    </licenses>
    <developers>
        <developer>
            <id>sand</id>
            <name>Sand Wen</name>
            <email>sand.fj.wen@gmail.com</email>
            <organization>JKZL Software, Inc.</organization>
            <organizationUrl>http://www.yihu.com</organizationUrl>
            <roles>
                <role>Project lead</role>
                <role>Project designer</role>
                <role>Project programmer</role>
            </roles>
        </developer>
        <developer>
            <id>yzh</id>
            <name>叶泽华</name>
            <email>yzh@qq.com</email>
            <organization>JKZL Software, Inc.</organization>
            <organizationUrl>http://www.yihu.com</organizationUrl>
            <roles>
                <role>Project programmer</role>
            </roles>
        </developer>
        <developer>
            <id>cws</id>
            <name>陈维山</name>
            <email>hill9868@qq.com</email>
            <organization>JKZL Software, Inc.</organization>
            <organizationUrl>http://www.yihu.com</organizationUrl>
            <roles>
                <role>Project programmer</role>
            </roles>
        </developer>
        <developer>
            <id>hzy</id>
            <name>黄志勇</name>
            <email>hzy@qq.com</email>
            <organization>JKZL Software, Inc.</organization>
            <organizationUrl>http://www.yihu.com</organizationUrl>
            <roles>
                <role>Project programmer</role>
            </roles>
        </developer>
    </developers>
    <properties>
        <project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
        <version.spring-framework>4.3.13.RELEASE</version.spring-framework>
        <version.spring-boot>1.5.9.RELEASE</version.spring-boot>
        <version.spring-cloud-starter>1.3.2.RELEASE</version.spring-cloud-starter>
        <version.spring-cloud>1.4.2.RELEASE</version.spring-cloud>
        <version.spring-security>4.2.3.RELEASE</version.spring-security>
        <version.spring-oauth2>2.0.14.RELEASE</version.spring-oauth2>
        <version.spring-session>1.3.1.RELEASE</version.spring-session>
        <version.spring-session-hazelcast>2.0.1.RELEASE</version.spring-session-hazelcast>
        <version.spring-data-commons>1.13.9.RELEASE</version.spring-data-commons>
        <version.spring-data-hadoop>2.2.0.RELEASE</version.spring-data-hadoop>
        <version.spring-data-solr>2.1.3.RELEASE</version.spring-data-solr>
        <version.spring-data-redis>1.7.1.RELEASE</version.spring-data-redis>
        <version.spring-data-jpa>1.11.10.RELEASE</version.spring-data-jpa>
        <version.spring-kafka>1.0.5.RELEASE</version.spring-kafka>
        <version.commons-bean-utils>1.9.2</version.commons-bean-utils>
        <version.commons-codec>1.9</version.commons-codec>
        <version.commons-collections>3.2.1</version.commons-collections>
        <version.commons-compress>1.9</version.commons-compress>
        <version.commons-dbcp2>2.1.1</version.commons-dbcp2>
        <version.commons-dbutils>1.6</version.commons-dbutils>
        <version.commons-io>2.4</version.commons-io>
        <version.commons-lang3>3.2.1</version.commons-lang3>
        <version.commons-pool2>2.4.2</version.commons-pool2>
        <version.zookeeper>3.4.6</version.zookeeper>
        <version.hadoop-client>2.6.5</version.hadoop-client>
        <version.hbase-client>1.1.1</version.hbase-client>
        <version.solr>5.5.4</version.solr>
        <version.hibernate>4.3.11.Final</version.hibernate>
        <version.hibernate-validator>6.0.10.Final</version.hibernate-validator>
        <version.hibernate-jpa-api>1.0.0.Final</version.hibernate-jpa-api>
        <version.http-core>4.4.3</version.http-core>
        <version.http-client>4.5.1</version.http-client>
        <version.http-mime>4.5.1</version.http-mime>
        <version.io-dropwizard-metrics>3.1.2</version.io-dropwizard-metrics>
        <version.java>1.8</version.java>
        <version.jackson>2.6.6</version.jackson>
        <version.jedis>2.9.0</version.jedis>
        <version.jcl-over-slf4j>1.7.19</version.jcl-over-slf4j>
        <version.jul-over-slf4j>1.7.21</version.jul-over-slf4j>
        <version.joda-time>2.8.2</version.joda-time>
        <version.junit>4.12</version.junit>
        <version.logging>1.2</version.logging>
        <version.log4j>1.2.17</version.log4j>
        <version.log4j2>2.4.1</version.log4j2>
        <version.logback>1.1.7</version.logback>
        <version.mysql>5.1.45</version.mysql>
        <version.pinyin4j>2.5.0</version.pinyin4j>
        <version.quartz>2.2.3</version.quartz>
        <version.servlet-api>3.1.0</version.servlet-api>
        <version.slf4j>1.7.21</version.slf4j>
        <version.statsd-client>3.1.0</version.statsd-client>
        <version.swagger>2.7.0</version.swagger>
        <version.swagger-ui>2.7.0</version.swagger-ui>
        <version.thrift>0.9.1</version.thrift>
        <version.tomcat-embed>8.5.27</version.tomcat-embed>
        <version.websocket-api>1.1</version.websocket-api>
        <version.zip4j>1.3.2</version.zip4j>
        <version.poi>3.12</version.poi>
        <version.scala>2.10.6</version.scala>
        <version.elasticsearch>2.1.0</version.elasticsearch>
        <version.elasticsearch-sql>2.4.1.0</version.elasticsearch-sql>
        <version.jest>2.4.0</version.jest>
        <version.alibaba-druid>1.0.15</version.alibaba-druid>
        <version.feign>9.5.0</version.feign>
        <version.hystrix>1.5.10</version.hystrix>
        <version.archaius>0.7.5</version.archaius>
        <version.ehr>1.2.0</version.ehr>
        <version.eip>1.3.1</version.eip>
        <version.json>20160212</version.json>
        <version.json-lib>2.4</version.json-lib>
        <version.fastjson>1.2.17</version.fastjson>
        <version.commons-net>3.3</version.commons-net>
        <version.jxl>2.6</version.jxl>
        <version.fastdfs>1.27</version.fastdfs>
        <version.spring.boot.admin>1.5.7</version.spring.boot.admin>
        <version.jettison>1.3.7</version.jettison>
    </properties>
    <dependencyManagement>
        <dependencies>
            <!--<dependency>-->
                <!--<groupId>org.springframework.boot</groupId>-->
                <!--<artifactId>spring-boot-dependencies</artifactId>-->
                <!--<version>1.5.9.RELEASE</version>-->
                <!--<type>pom</type>-->
                <!--<scope>import</scope>-->
            <!--</dependency>-->
            <!--<dependency>-->
                <!--<groupId>org.springframework.cloud</groupId>-->
                <!--<artifactId>spring-cloud-dependencies</artifactId>-->
                <!--<version>Finchley.M5</version>-->
                <!--<type>pom</type>-->
                <!--<scope>import</scope>-->
            <!--</dependency>-->
            <!-- Base library-->
            <dependency>
                <groupId>javax.servlet</groupId>
                <artifactId>javax.servlet-api</artifactId>
                <version>${version.servlet-api}</version>
                <scope>provided</scope>
            </dependency>
            <dependency>
                <groupId>junit</groupId>
                <artifactId>junit</artifactId>
                <version>${version.junit}</version>
                <scope>test</scope>
            </dependency>
            <!-- Spring framework family -->
            <dependency>
                <groupId>org.springframework</groupId>
                <artifactId>spring-aop</artifactId>
                <version>${version.spring-framework}</version>
                <scope>${dependency.scope}</scope>
            </dependency>
            <dependency>
                <groupId>org.springframework</groupId>
                <artifactId>spring-aspects</artifactId>
                <version>${version.spring-framework}</version>
                <scope>${dependency.scope}</scope>
            </dependency>
            <dependency>
                <groupId>org.springframework</groupId>
                <artifactId>spring-beans</artifactId>
                <version>${version.spring-framework}</version>
                <scope>${dependency.scope}</scope>
            </dependency>
            <dependency>
                <groupId>org.springframework</groupId>
                <artifactId>spring-context</artifactId>
                <version>${version.spring-framework}</version>
                <scope>${dependency.scope}</scope>
            </dependency>
            <dependency>
                <groupId>org.springframework</groupId>
                <artifactId>spring-context-support</artifactId>
                <version>${version.spring-framework}</version>
                <scope>${dependency.scope}</scope>
            </dependency>
            <dependency>
                <groupId>org.springframework</groupId>
                <artifactId>spring-core</artifactId>
                <version>${version.spring-framework}</version>
                <scope>${dependency.scope}</scope>
            </dependency>
            <dependency>
                <groupId>org.springframework</groupId>
                <artifactId>spring-expression</artifactId>
                <version>${version.spring-framework}</version>
                <scope>${dependency.scope}</scope>
            </dependency>
            <dependency>
                <groupId>org.springframework</groupId>
                <artifactId>spring-jdbc</artifactId>
                <version>${version.spring-framework}</version>
                <scope>${dependency.scope}</scope>
            </dependency>
            <dependency>
                <groupId>org.springframework</groupId>
                <artifactId>spring-messaging</artifactId>
                <version>${version.spring-framework}</version>
                <scope>${dependency.scope}</scope>
            </dependency>
            <dependency>
                <groupId>org.springframework</groupId>
                <artifactId>spring-orm</artifactId>
                <version>${version.spring-framework}</version>
                <scope>${dependency.scope}</scope>
            </dependency>
            <dependency>
                <groupId>org.springframework</groupId>
                <artifactId>spring-oxm</artifactId>
                <version>${version.spring-framework}</version>
                <scope>${dependency.scope}</scope>
            </dependency>
            <dependency>
                <groupId>org.springframework</groupId>
                <artifactId>spring-test</artifactId>
                <version>${version.spring-framework}</version>
                <scope>${dependency.scope}</scope>
            </dependency>
            <dependency>
                <groupId>org.springframework</groupId>
                <artifactId>spring-tx</artifactId>
                <version>${version.spring-framework}</version>
                <scope>${dependency.scope}</scope>
            </dependency>
            <dependency>
                <groupId>org.springframework</groupId>
                <artifactId>spring-web</artifactId>
                <version>${version.spring-framework}</version>
                <scope>${dependency.scope}</scope>
            </dependency>
            <dependency>
                <groupId>org.springframework</groupId>
                <artifactId>spring-webmvc</artifactId>
                <version>${version.spring-framework}</version>
                <scope>${dependency.scope}</scope>
            </dependency>
            <!-- https://mvnrepository.com/artifact/org.springframework.kafka/spring-kafka -->
            <dependency>
                <groupId>org.springframework.kafka</groupId>
                <artifactId>spring-kafka</artifactId>
                <version>${version.spring-kafka}</version>
            </dependency>
            <!-- Spring boot family -->
            <dependency>
                <groupId>org.springframework.boot</groupId>
                <artifactId>spring-boot</artifactId>
                <version>${version.spring-boot}</version>
                <scope>${dependency.scope}</scope>
            </dependency>
            <dependency>
                <groupId>org.springframework.boot</groupId>
                <artifactId>spring-boot-actuator</artifactId>
                <version>${version.spring-boot}</version>
                <scope>${dependency.scope}</scope>
            </dependency>
            <dependency>
                <groupId>org.springframework.boot</groupId>
                <artifactId>spring-boot-autoconfigure</artifactId>
                <version>${version.spring-boot}</version>
                <scope>${dependency.scope}</scope>
            </dependency>
            <dependency>
                <groupId>org.springframework.boot</groupId>
                <artifactId>spring-boot-devtools</artifactId>
                <version>${version.spring-boot}</version>
                <scope>${dependency.scope}</scope>
            </dependency>
            <dependency>
                <groupId>org.springframework.boot</groupId>
                <artifactId>spring-boot-starter</artifactId>
                <version>${version.spring-boot}</version>
                <scope>${dependency.scope}</scope>
            </dependency>
            <dependency>
                <groupId>org.springframework.boot</groupId>
                <artifactId>spring-boot-starter-actuator</artifactId>
                <version>${version.spring-boot}</version>
                <scope>${dependency.scope}</scope>
            </dependency>
            <dependency>
                <groupId>org.springframework.boot</groupId>
                <artifactId>spring-boot-starter-aop</artifactId>
                <version>${version.spring-boot}</version>
                <scope>${dependency.scope}</scope>
            </dependency>
            <dependency>
                <groupId>org.springframework.boot</groupId>
                <artifactId>spring-boot-starter-data-jpa</artifactId>
                <version>${version.spring-boot}</version>
                <scope>${dependency.scope}</scope>
            </dependency>
            <dependency>
                <groupId>org.springframework.boot</groupId>
                <artifactId>spring-boot-starter-data-redis</artifactId>
                <version>${version.spring-boot}</version>
                <scope>${dependency.scope}</scope>
            </dependency>
            <dependency>
                <groupId>org.springframework.boot</groupId>
                <artifactId>spring-boot-starter-batch</artifactId>
                <version>${version.spring-boot}</version>
                <scope>${dependency.scope}</scope>
            </dependency>
            <dependency>
                <groupId>org.springframework.boot</groupId>
                <artifactId>spring-boot-starter-jdbc</artifactId>
                <version>${version.spring-boot}</version>
                <scope>${dependency.scope}</scope>
            </dependency>
            <dependency>
                <groupId>org.springframework.boot</groupId>
                <artifactId>spring-boot-starter-security</artifactId>
                <version>${version.spring-boot}</version>
                <scope>${dependency.scope}</scope>
                <optional>true</optional>
            </dependency>
            <dependency>
                <groupId>org.springframework.boot</groupId>
                <artifactId>spring-boot-starter-thymeleaf</artifactId>
                <version>${version.spring-boot}</version>
                <scope>${dependency.scope}</scope>
            </dependency>
            <dependency>
                <groupId>org.springframework.boot</groupId>
                <artifactId>spring-boot-starter-test</artifactId>
                <version>${version.spring-boot}</version>
                <scope>test</scope>
            </dependency>
            <dependency>
                <groupId>org.springframework.boot</groupId>
                <artifactId>spring-boot-starter-web</artifactId>
                <version>${version.spring-boot}</version>
                <scope>${dependency.scope}</scope>
            </dependency>
            <dependency>
                <groupId>org.springframework.boot</groupId>
                <artifactId>spring-boot-starter-tomcat</artifactId>
                <version>${version.spring-boot}</version>
                <scope>${dependency.scope}</scope>
            </dependency>
            <dependency>
                <groupId>org.springframework.boot</groupId>
                <artifactId>spring-boot-starter-data-mongodb</artifactId>
                <version>${version.spring-boot}</version>
                <scope>${dependency.scope}</scope>
            </dependency>
            <!-- Spring cloud family -->
            <dependency>
                <groupId>org.springframework.cloud</groupId>
                <artifactId>spring-cloud-starter</artifactId>
                <version>${version.spring-cloud-starter}</version>
            </dependency>
            <dependency>
                <groupId>org.springframework.cloud</groupId>
                <artifactId>spring-cloud-config-server</artifactId>
                <version>${version.spring-cloud}</version>
            </dependency>
            <dependency>
                <groupId>org.springframework.cloud</groupId>
                <artifactId>spring-cloud-starter-archaius</artifactId>
                <version>${version.spring-cloud}</version>
            </dependency>
            <dependency>
                <groupId>org.springframework.cloud</groupId>
                <artifactId>spring-cloud-starter-config</artifactId>
                <version>${version.spring-cloud}</version>
            </dependency>
            <dependency>
                <groupId>org.springframework.cloud</groupId>
                <artifactId>spring-cloud-starter-eureka</artifactId>
                <version>${version.spring-cloud}</version>
            </dependency>
            <dependency>
                <groupId>org.springframework.cloud</groupId>
                <artifactId>spring-cloud-starter-eureka-server</artifactId>
                <version>${version.spring-cloud}</version>
            </dependency>
            <dependency>
                <groupId>org.springframework.cloud</groupId>
                <artifactId>spring-cloud-netflix-eureka</artifactId>
                <version>${version.spring-cloud}</version>
            </dependency>
            <dependency>
                <groupId>org.springframework.cloud</groupId>
                <artifactId>spring-cloud-starter-feign</artifactId>
                <version>${version.spring-cloud}</version>
            </dependency>
            <dependency>
                <groupId>org.springframework.cloud</groupId>
                <artifactId>spring-cloud-starter-ribbon</artifactId>
                <version>${version.spring-cloud}</version>
            </dependency>
            <dependency>
                <groupId>org.springframework.cloud</groupId>
                <artifactId>spring-cloud-starter-zuul</artifactId>
                <version>${version.spring-cloud}</version>
            </dependency>
            <dependency>
                <groupId>org.springframework.cloud</groupId>
                <artifactId>spring-cloud-starter-hystrix</artifactId>
                <version>${version.spring-cloud}</version>
            </dependency>
            <dependency>
                <groupId>org.springframework.cloud</groupId>
                <artifactId>spring-cloud-starter-hystrix-dashboard</artifactId>
                <version>${version.spring-cloud}</version>
            </dependency>
            <!-- Feign -->
            <dependency>
                <groupId>io.github.openfeign</groupId>
                <artifactId>feign-core</artifactId>
                <version>${version.feign}</version>
            </dependency>
            <!-- Hystrix -->
            <dependency>
                <groupId>com.netflix.hystrix</groupId>
                <artifactId>hystrix-core</artifactId>
                <version>${version.hystrix}</version>
            </dependency>
            <!-- Archaius -->
            <dependency>
                <groupId>com.netflix.archaius</groupId>
                <artifactId>archaius-core</artifactId>
                <version>${version.archaius}</version>
            </dependency>
            <!-- Spring data family -->
            <dependency>
                <groupId>org.springframework.data</groupId>
                <artifactId>spring-data-commons</artifactId>
                <version>${version.spring-data-commons}</version>
                <scope>${dependency.scope}</scope>
            </dependency>
            <dependency>
                <groupId>org.springframework.data</groupId>
                <artifactId>spring-data-hadoop-hbase</artifactId>
                <version>${version.spring-data-hadoop}</version>
                <scope>${dependency.scope}</scope>
            </dependency>
            <dependency>
                <groupId>org.springframework.data</groupId>
                <artifactId>spring-data-solr</artifactId>
                <version>${version.spring-data-solr}</version>
                <scope>${dependency.scope}</scope>
            </dependency>
            <!-- Spring session family-->
            <dependency>
                <groupId>org.springframework.session</groupId>
                <artifactId>spring-session-hazelcast</artifactId>
                <version>${version.spring-session-hazelcast}</version>
                <scope>${dependency.scope}</scope>
            </dependency>
            <dependency>
                <groupId>org.springframework.session</groupId>
                <artifactId>spring-session</artifactId>
                <version>${version.spring-session}</version>
                <scope>${dependency.scope}</scope>
            </dependency>
            <dependency>
                <groupId>org.springframework.session</groupId>
                <artifactId>spring-session-data-redis</artifactId>
                <version>${version.spring-session}</version>
                <scope>${dependency.scope}</scope>
            </dependency>
            <!-- Spring Security -->
            <!--<dependency>
                <groupId>org.springframework.security</groupId>
                <artifactId>spring-security-config</artifactId>
                <version>${version.spring-security}</version>
                <scope>${dependency.scope}</scope>
            </dependency>
            <dependency>
                <groupId>org.springframework.security</groupId>
                <artifactId>spring-security-core</artifactId>
                <version>${version.spring-security}</version>
                <scope>${dependency.scope}</scope>
            </dependency>
            <dependency>
                <groupId>org.springframework.security</groupId>
                <artifactId>spring-security-crypto</artifactId>
                <version>${version.spring-security}</version>
                <scope>${dependency.scope}</scope>
            </dependency>
            <dependency>
                <groupId>org.springframework.security</groupId>
                <artifactId>spring-security-web</artifactId>
                <version>${version.spring-security}</version>
                <scope>${dependency.scope}</scope>
            </dependency>-->
            <!-- Oauth2 -->
            <dependency>
                <groupId>org.springframework.security.oauth</groupId>
                <artifactId>spring-security-oauth2</artifactId>
                <version>${version.spring-oauth2}</version>
                <scope>${dependency.scope}</scope>
                <exclusions>
                    <exclusion>
                        <groupId>org.springframework.security</groupId>
                        <artifactId>*</artifactId>
                    </exclusion>
                </exclusions>
            </dependency>
            <!--Jackson library -->
            <dependency>
                <groupId>com.fasterxml.jackson.core</groupId>
                <artifactId>jackson-annotations</artifactId>
                <version>${version.jackson}</version>
                <scope>${dependency.scope}</scope>
            </dependency>
            <dependency>
                <groupId>com.fasterxml.jackson.core</groupId>
                <artifactId>jackson-core</artifactId>
                <version>${version.jackson}</version>
                <scope>${dependency.scope}</scope>
            </dependency>
            <dependency>
                <groupId>com.fasterxml.jackson.core</groupId>
                <artifactId>jackson-databind</artifactId>
                <version>${version.jackson}</version>
                <scope>${dependency.scope}</scope>
            </dependency>
            <!-- Http library -->
            <dependency>
                <groupId>org.apache.httpcomponents</groupId>
                <artifactId>httpcore</artifactId>
                <version>${version.http-core}</version>
                <scope>${dependency.scope}</scope>
            </dependency>
            <dependency>
                <groupId>org.apache.httpcomponents</groupId>
                <artifactId>httpclient</artifactId>
                <version>${version.http-client}</version>
                <scope>${dependency.scope}</scope>
            </dependency>
            <dependency>
                <groupId>org.apache.httpcomponents</groupId>
                <artifactId>httpmime</artifactId>
                <version>${version.http-mime}</version>
                <scope>${dependency.scope}</scope>
            </dependency>
            <!--Apache commons library -->
            <dependency>
                <groupId>org.codehaus.woodstox</groupId>
                <artifactId>stax2-api</artifactId>
                <version>3.1.4</version>
                <scope>${dependency.scope}</scope>
            </dependency>
            <dependency>
                <groupId>org.codehaus.woodstox</groupId>
                <artifactId>woodstox-core-asl</artifactId>
                <version>4.4.1</version>
                <scope>${dependency.scope}</scope>
            </dependency>
            <dependency>
                <groupId>org.noggit</groupId>
                <artifactId>noggit</artifactId>
                <version>0.6</version>
                <scope>${dependency.scope}</scope>
            </dependency>
            <dependency>
                <groupId>org.apache.commons</groupId>
                <artifactId>commons-lang3</artifactId>
                <version>${version.commons-lang3}</version>
                <scope>${dependency.scope}</scope>
            </dependency>
            <dependency>
                <groupId>org.apache.poi</groupId>
                <artifactId>poi</artifactId>
                <version>${version.poi}</version>
                <scope>${dependency.scope}</scope>
            </dependency>
            <dependency>
                <groupId>commons-beanutils</groupId>
                <artifactId>commons-beanutils</artifactId>
                <version>${version.commons-bean-utils}</version>
                <scope>${dependency.scope}</scope>
            </dependency>
            <dependency>
                <groupId>org.apache.commons</groupId>
                <artifactId>commons-pool2</artifactId>
                <version>${version.commons-pool2}</version>
                <scope>${dependency.scope}</scope>
            </dependency>
            <dependency>
                <groupId>commons-dbutils</groupId>
                <artifactId>commons-dbutils</artifactId>
                <version>${version.commons-dbutils}</version>
                <scope>${dependency.scope}</scope>
            </dependency>
            <dependency>
                <groupId>org.apache.commons</groupId>
                <artifactId>commons-dbcp2</artifactId>
                <version>${version.commons-dbcp2}</version>
                <scope>${dependency.scope}</scope>
            </dependency>
            <dependency>
                <groupId>commons-io</groupId>
                <artifactId>commons-io</artifactId>
                <version>${version.commons-io}</version>
                <scope>${dependency.scope}</scope>
            </dependency>
            <dependency>
                <groupId>commons-collections</groupId>
                <artifactId>commons-collections</artifactId>
                <version>${version.commons-collections}</version>
                <scope>${dependency.scope}</scope>
            </dependency>
            <dependency>
                <groupId>commons-codec</groupId>
                <artifactId>commons-codec</artifactId>
                <version>${version.commons-codec}</version>
                <scope>${dependency.scope}</scope>
            </dependency>
            <dependency>
                <groupId>org.apache.commons</groupId>
                <artifactId>commons-compress</artifactId>
                <version>${version.commons-compress}</version>
                <scope>${dependency.scope}</scope>
            </dependency>
            <!-- Mysql library -->
            <dependency>
                <groupId>mysql</groupId>
                <artifactId>mysql-connector-java</artifactId>
                <version>${version.mysql}</version>
                <scope>${dependency.scope}</scope>
            </dependency>
            <!-- Hibernate framework library -->
            <dependency>
                <groupId>org.hibernate</groupId>
                <artifactId>hibernate-core</artifactId>
                <version>${version.hibernate}</version>
                <scope>${dependency.scope}</scope>
            </dependency>
            <dependency>
                <groupId>org.hibernate</groupId>
                <artifactId>hibernate-validator</artifactId>
                <version>${version.hibernate-validator}</version>
            </dependency>
            <dependency>
                <groupId>org.hibernate</groupId>
                <artifactId>hibernate-entitymanager</artifactId>
                <version>${version.hibernate}</version>
                <scope>${dependency.scope}</scope>
            </dependency>
            <dependency>
                <groupId>org.hibernate.javax.persistence</groupId>
                <artifactId>hibernate-jpa-2.1-api</artifactId>
                <version>${version.hibernate-jpa-api}</version>
                <scope>${dependency.scope}</scope>
            </dependency>
            <!-- Zookeeper library -->
            <dependency>
                <groupId>org.apache.zookeeper</groupId>
                <artifactId>zookeeper</artifactId>
                <version>${version.zookeeper}</version>
                <scope>${dependency.scope}</scope>
            </dependency>
            <!-- Hadoop library -->
            <dependency>
                <groupId>org.apache.hadoop</groupId>
                <artifactId>hadoop-annotations</artifactId>
                <version>${version.hadoop-client}</version>
                <scope>${dependency.scope}</scope>
            </dependency>
            <dependency>
                <groupId>org.apache.hadoop</groupId>
                <artifactId>hadoop-auth</artifactId>
                <version>${version.hadoop-client}</version>
                <scope>${dependency.scope}</scope>
            </dependency>
            <dependency>
                <groupId>org.apache.hadoop</groupId>
                <artifactId>hadoop-common</artifactId>
                <version>${version.hadoop-client}</version>
                <scope>${dependency.scope}</scope>
            </dependency>
            <dependency>
                <groupId>org.apache.hadoop</groupId>
                <artifactId>hadoop-distcp</artifactId>
                <version>${version.hadoop-client}</version>
                <scope>${dependency.scope}</scope>
            </dependency>
            <dependency>
                <groupId>org.apache.hadoop</groupId>
                <artifactId>hadoop-hdfs</artifactId>
                <version>${version.hadoop-client}</version>
                <scope>${dependency.scope}</scope>
            </dependency>
            <dependency>
                <groupId>org.apache.hadoop</groupId>
                <artifactId>hadoop-mapreduce-client-common</artifactId>
                <version>${version.hadoop-client}</version>
                <scope>${dependency.scope}</scope>
            </dependency>
            <dependency>
                <groupId>org.apache.hadoop</groupId>
                <artifactId>hadoop-mapreduce-client-core</artifactId>
                <version>${version.hadoop-client}</version>
                <scope>${dependency.scope}</scope>
            </dependency>
            <dependency>
                <groupId>org.apache.hadoop</groupId>
                <artifactId>hadoop-mapreduce-client-jobclient</artifactId>
                <version>${version.hadoop-client}</version>
                <scope>${dependency.scope}</scope>
            </dependency>
            <dependency>
                <groupId>org.apache.hadoop</groupId>
                <artifactId>hadoop-mapreduce-client-shuffle</artifactId>
                <version>${version.hadoop-client}</version>
                <scope>${dependency.scope}</scope>
            </dependency>
            <dependency>
                <groupId>org.apache.hadoop</groupId>
                <artifactId>hadoop-streaming</artifactId>
                <version>${version.hadoop-client}</version>
                <scope>${dependency.scope}</scope>
            </dependency>
            <dependency>
                <groupId>org.apache.hadoop</groupId>
                <artifactId>hadoop-yarn-api</artifactId>
                <version>${version.hadoop-client}</version>
                <scope>${dependency.scope}</scope>
            </dependency>
            <dependency>
                <groupId>org.apache.hadoop</groupId>
                <artifactId>hadoop-yarn-client</artifactId>
                <version>${version.hadoop-client}</version>
                <scope>${dependency.scope}</scope>
            </dependency>
            <dependency>
                <groupId>org.apache.hadoop</groupId>
                <artifactId>hadoop-yarn-common</artifactId>
                <version>${version.hadoop-client}</version>
                <scope>${dependency.scope}</scope>
            </dependency>
            <dependency>
                <groupId>org.apache.hadoop</groupId>
                <artifactId>hadoop-yarn-server-common</artifactId>
                <version>${version.hadoop-client}</version>
                <scope>${dependency.scope}</scope>
            </dependency>
            <dependency>
                <groupId>org.apache.hadoop</groupId>
                <artifactId>hadoop-yarn-server-nodemanager</artifactId>
                <version>${version.hadoop-client}</version>
                <scope>${dependency.scope}</scope>
            </dependency>
            <dependency>
                <groupId>org.apache.hbase</groupId>
                <artifactId>hbase-client</artifactId>
                <version>${version.hbase-client}</version>
                <scope>${dependency.scope}</scope>
                <exclusions>
                    <exclusion>
                        <groupId>org.apache.hadoop</groupId>
                        <artifactId>*</artifactId>
                    </exclusion>
                    <exclusion>
                        <groupId>org.slf4j</groupId>
                        <artifactId>slf4j-log4j12</artifactId>
                    </exclusion>
                </exclusions>
            </dependency>
            <dependency>
                <groupId>org.apache.hbase</groupId>
                <artifactId>hbase-common</artifactId>
                <version>${version.hbase-client}</version>
                <exclusions>
                    <exclusion>
                        <groupId>org.slf4j</groupId>
                        <artifactId>slf4j-log4j12</artifactId>
                    </exclusion>
                </exclusions>
            </dependency>
            <dependency>
                <groupId>org.apache.hbase</groupId>
                <artifactId>hbase-protocol</artifactId>
                <version>${version.hbase-client}</version>
                <exclusions>
                    <exclusion>
                        <groupId>org.slf4j</groupId>
                        <artifactId>slf4j-log4j12</artifactId>
                    </exclusion>
                </exclusions>
            </dependency>
            <!-- export library -->
            <dependency>
                <groupId>net.sourceforge.jexcelapi</groupId>
                <artifactId>jxl</artifactId>
                <version>${version.jxl}</version>
            </dependency>
            <!-- Google library -->
            <dependency>
                <groupId>com.google.guava</groupId>
                <artifactId>guava</artifactId>
                <version>18.0</version>
            </dependency>
            <dependency>
                <groupId>com.google.code.findbugs</groupId>
                <artifactId>jsr305</artifactId>
                <version>3.0.1</version>
            </dependency>
            <dependency>
                <groupId>com.google.code.gson</groupId>
                <artifactId>gson</artifactId>
                <version>2.6.2</version>
            </dependency>
            <dependency>
                <groupId>com.google.protobuf</groupId>
                <artifactId>protobuf-java</artifactId>
                <version>2.5.0</version>
            </dependency>
            <dependency>
                <groupId>com.google.inject</groupId>
                <artifactId>guice</artifactId>
                <version>4.1.0</version>
            </dependency>
            <!-- Solr library -->
            <dependency>
                <groupId>org.apache.solr</groupId>
                <artifactId>solr-core</artifactId>
                <version>${version.solr}</version>
                <scope>${dependency.scope}</scope>
                <exclusions>
                    <exclusion>
                        <groupId>commons-lang</groupId>
                        <artifactId>commons-lang</artifactId>
                    </exclusion>
                </exclusions>
            </dependency>
            <dependency>
                <groupId>org.apache.solr</groupId>
                <artifactId>solr-solrj</artifactId>
                <version>${version.solr}</version>
                <exclusions>
                    <exclusion>
                        <groupId>commons-lang</groupId>
                        <artifactId>commons-lang</artifactId>
                    </exclusion>
                </exclusions>
            </dependency>
            <!-- Elasticsearch -->
            <dependency>
                <groupId>org.elasticsearch</groupId>
                <artifactId>elasticsearch</artifactId>
                <version>${version.elasticsearch}</version>
            </dependency>
            <dependency>
                <groupId>org.nlpcn</groupId>
                <artifactId>elasticsearch-sql</artifactId>
                <version>${version.elasticsearch-sql}</version>
            </dependency>
            <!-- Jest -->
            <dependency>
                <groupId>io.searchbox</groupId>
                <artifactId>jest</artifactId>
                <version>${version.jest}</version>
            </dependency>
            <!-- Redis library -->
            <dependency>
                <groupId>redis.clients</groupId>
                <artifactId>jedis</artifactId>
                <version>${version.jedis}</version>
                <scope>${dependency.scope}</scope>
            </dependency>
            <!-- FastDFS library -->
            <dependency>
                <groupId>org.csource</groupId>
                <artifactId>fastdfs-client-java</artifactId>
                <version>${version.fastdfs}</version>
            </dependency>
            <!-- Quartz library -->
            <dependency>
                <groupId>org.quartz-scheduler</groupId>
                <artifactId>quartz</artifactId>
                <version>${version.quartz}</version>
                <scope>${dependency.scope}</scope>
            </dependency>
            <dependency>
                <groupId>org.quartz-scheduler</groupId>
                <artifactId>quartz-jobs</artifactId>
                <version>${version.quartz}</version>
            </dependency>
            <!-- Zip library -->
            <dependency>
                <groupId>net.lingala.zip4j</groupId>
                <artifactId>zip4j</artifactId>
                <version>${version.zip4j}</version>
                <scope>${dependency.scope}</scope>
            </dependency>
            <!-- Pinyin library -->
            <dependency>
                <groupId>com.belerweb</groupId>
                <artifactId>pinyin4j</artifactId>
                <version>${version.pinyin4j}</version>
                <scope>${dependency.scope}</scope>
            </dependency>
            <!-- Swagger-ui library -->
            <dependency>
                <groupId>io.springfox</groupId>
                <artifactId>springfox-swagger2</artifactId>
                <version>${version.swagger}</version>
                <scope>${dependency.scope}</scope>
            </dependency>
            <dependency>
                <groupId>io.springfox</groupId>
                <artifactId>springfox-swagger-ui</artifactId>
                <version>${version.swagger-ui}</version>
                <scope>${dependency.scope}</scope>
            </dependency>
            <!-- date Time util library -->
            <dependency>
                <groupId>joda-time</groupId>
                <artifactId>joda-time</artifactId>
                <version>${version.joda-time}</version>
                <scope>${dependency.scope}</scope>
            </dependency>
            <dependency>
                <groupId>org.apache.thrift</groupId>
                <artifactId>libthrift</artifactId>
                <version>${version.thrift}</version>
                <scope>${dependency.scope}</scope>
            </dependency>
            <dependency>
                <groupId>com.timgroup</groupId>
                <artifactId>java-statsd-client</artifactId>
                <version>${version.statsd-client}</version>
                <scope>${dependency.scope}</scope>
            </dependency>
            <dependency>
                <groupId>io.dropwizard.metrics</groupId>
                <artifactId>metrics-core</artifactId>
                <version>${version.io-dropwizard-metrics}</version>
                <scope>${dependency.scope}</scope>
            </dependency>
            <!-- Log framework library -->
            <dependency>
                <groupId>commons-logging</groupId>
                <artifactId>commons-logging</artifactId>
                <version>${version.logging}</version>
                <scope>${dependency.scope}</scope>
            </dependency>
            <dependency>
                <groupId>log4j</groupId>
                <artifactId>log4j</artifactId>
                <version>${version.log4j}</version>
                <scope>${dependency.scope}</scope>
            </dependency>
            <dependency>
                <groupId>org.slf4j</groupId>
                <artifactId>slf4j-api</artifactId>
                <version>${version.slf4j}</version>
                <scope>${dependency.scope}</scope>
            </dependency>
            <dependency>
                <groupId>ch.qos.logback</groupId>
                <artifactId>logback-classic</artifactId>
                <version>${version.logback}</version>
                <scope>${dependency.scope}</scope>
            </dependency>
            <!-- Alibaba -->
            <dependency>
                <groupId>com.alibaba</groupId>
                <artifactId>druid</artifactId>
                <version>${version.alibaba-druid}</version>
            </dependency>
            <dependency>
                <groupId>com.alibaba</groupId>
                <artifactId>fastjson</artifactId>
                <version>${version.fastjson}</version>
            </dependency>
            <!-- Spring Boot Admin -->
            <dependency>
                <groupId>de.codecentric</groupId>
                <artifactId>spring-boot-admin-starter-server</artifactId>
                <version>${version.spring.boot.admin}</version>
            </dependency>
            <dependency>
                <groupId>de.codecentric</groupId>
                <artifactId>spring-boot-admin-server-ui</artifactId>
                <version>${version.spring.boot.admin}</version>
            </dependency>
            <!-- Extend library-->
            <dependency>
                <groupId>org.codehaus.jettison</groupId>
                <artifactId>jettison</artifactId>
                <version>${version.jettison}</version>
            </dependency>
            <dependency>
                <groupId>org.json</groupId>
                <artifactId>json</artifactId>
                <version>${version.json}</version>
            </dependency>
            <dependency>
                <groupId>net.sf.json-lib</groupId>
                <artifactId>json-lib</artifactId>
                <version>${version.json-lib}</version>
            </dependency>
            <dependency>
                <groupId>commons-net</groupId>
                <artifactId>commons-net</artifactId>
                <version>${version.commons-net}</version>
            </dependency>
            <!-- 个推相关 -->
            <dependency>
                <groupId>com.gexin.platform</groupId>
                <artifactId>gexin-rp-sdk-http</artifactId>
                <version>4.0.1.17</version>
            </dependency>
            <dependency>
                <groupId>com.gexin.platform</groupId>
                <artifactId>gexin-rp-fastjson</artifactId>
                <version>1.0.0.1</version>
            </dependency>
            <dependency>
                <groupId>com.gexin.platform</groupId>
                <artifactId>gexin-rp-sdk-base</artifactId>
                <version>4.0.0.22</version>
            </dependency>
            <dependency>
                <groupId>com.gexin.platform</groupId>
                <artifactId>gexin-rp-sdk-template</artifactId>
                <version>4.0.0.16</version>
            </dependency>
            <dependency>
                <groupId>com.gexin.platform</groupId>
                <artifactId>protobuf-java</artifactId>
                <version>2.5.0</version>
            </dependency>
        </dependencies>
    </dependencyManagement>
    <repositories>
        <repository>
            <id>public</id>
            <name>public</name>
            <url>http://172.19.103.43:8081/nexus/content/groups/public/</url>
            <releases>
                <enabled>true</enabled>
            </releases>
            <snapshots>
                <enabled>false</enabled>
            </snapshots>
        </repository>
    </repositories>
    <pluginRepositories>
        <pluginRepository>
            <id>public</id>
            <name>public</name>
            <url>http://172.19.103.43:8081/nexus/content/groups/public/</url>
            <releases>
                <enabled>true</enabled>
            </releases>
            <snapshots>
                <enabled>false</enabled>
            </snapshots>
        </pluginRepository>
    </pluginRepositories>
    <build>
        <extensions>
            <extension>
                <groupId>org.apache.maven.wagon</groupId>
                <artifactId>wagon-ssh</artifactId>
                <version>2.10</version>
            </extension>
            <extension>
                <groupId>org.apache.maven.wagon</groupId>
                <artifactId>wagon-http-lightweight</artifactId>
                <version>2.10</version>
            </extension>
        </extensions>
        <plugins>
            <plugin>
                <groupId>org.apache.maven.plugins</groupId>
                <artifactId>maven-compiler-plugin</artifactId>
                <configuration>
                    <source>1.8</source>
                    <target>1.8</target>
                    <compilerArguments>
                        <verbose/>
                        <bootclasspath>${java.home}/lib/rt.jar:${java.home}/lib/jce.jar</bootclasspath>
                    </compilerArguments>
                </configuration>
                <version>3.1</version>
            </plugin>
        </plugins>
    </build>
</project>

+ 606 - 0
ehr-ms-parent-pom/pom.xml

@ -0,0 +1,606 @@
<?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.0.0"
         xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
         xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
    <modelVersion>4.0.0</modelVersion>
    <parent>
        <groupId>com.yihu.ehr</groupId>
        <artifactId>ehr-cloud</artifactId>
        <version>1.2.0</version>
        <relativePath>../ehr-cloud/pom.xml</relativePath>
    </parent>
    <artifactId>ehr-ms-parent-pom</artifactId>
    <packaging>pom</packaging>
    <description>EHR micro service parent pom</description>
    <build>
        <pluginManagement>
            <plugins>
                <plugin>
                    <groupId>org.jooq</groupId>
                    <artifactId>jooq-codegen-maven</artifactId>
                    <version>${jooq.version}</version>
                </plugin>
                <plugin>
                    <groupId>org.springframework.boot</groupId>
                    <artifactId>spring-boot-maven-plugin</artifactId>
                    <version>${spring-boot.version}</version>
                </plugin>
                <plugin>
                    <groupId>org.apache.maven.plugins</groupId>
                    <artifactId>maven-antrun-plugin</artifactId>
                    <version>1.7</version>
                </plugin>
                <plugin>
                    <groupId>org.apache.maven.plugins</groupId>
                    <artifactId>maven-assembly-plugin</artifactId>
                    <version>2.5.1</version>
                    <configuration>
                        <recompressZippedFiles>false</recompressZippedFiles>
                    </configuration>
                </plugin>
                <plugin>
                    <groupId>org.apache.maven.plugins</groupId>
                    <artifactId>maven-clean-plugin</artifactId>
                    <version>2.5</version>
                </plugin>
                <plugin>
                    <groupId>org.apache.maven.plugins</groupId>
                    <artifactId>maven-deploy-plugin</artifactId>
                    <version>2.8.2</version>
                </plugin>
                <plugin>
                    <groupId>org.apache.maven.plugins</groupId>
                    <artifactId>maven-dependency-plugin</artifactId>
                    <version>2.10</version>
                </plugin>
                <plugin>
                    <groupId>org.apache.maven.plugins</groupId>
                    <artifactId>maven-eclipse-plugin</artifactId>
                    <version>2.9</version>
                </plugin>
                <plugin>
                    <groupId>org.apache.maven.plugins</groupId>
                    <artifactId>maven-failsafe-plugin</artifactId>
                    <version>2.18</version>
                </plugin>
                <plugin>
                    <groupId>org.apache.maven.plugins</groupId>
                    <artifactId>maven-install-plugin</artifactId>
                    <version>2.5.2</version>
                </plugin>
                <plugin>
                    <groupId>org.apache.maven.plugins</groupId>
                    <artifactId>maven-help-plugin</artifactId>
                    <version>2.2</version>
                </plugin>
                <plugin>
                    <groupId>org.apache.maven.plugins</groupId>
                    <artifactId>maven-jar-plugin</artifactId>
                    <version>2.5</version>
                </plugin>
                <plugin>
                    <groupId>org.apache.maven.plugins</groupId>
                    <artifactId>maven-javadoc-plugin</artifactId>
                    <version>2.10.1</version>
                </plugin>
                <plugin>
                    <groupId>org.apache.maven.plugins</groupId>
                    <artifactId>maven-resources-plugin</artifactId>
                    <version>2.7</version>
                </plugin>
                <plugin>
                    <groupId>org.apache.maven.plugins</groupId>
                    <artifactId>maven-shade-plugin</artifactId>
                    <version>2.3</version>
                </plugin>
                <plugin>
                    <groupId>org.apache.maven.plugins</groupId>
                    <artifactId>maven-site-plugin</artifactId>
                    <version>3.3</version>
                    <dependencies>
                        <dependency>
                            <groupId>org.apache.maven.doxia</groupId>
                            <artifactId>doxia-module-markdown</artifactId>
                            <version>1.5</version>
                        </dependency>
                    </dependencies>
                </plugin>
                <plugin>
                    <groupId>org.apache.maven.plugins</groupId>
                    <artifactId>maven-source-plugin</artifactId>
                    <version>2.4</version>
                </plugin>
                <plugin>
                    <groupId>org.apache.maven.plugins</groupId>
                    <artifactId>maven-surefire-plugin</artifactId>
                    <version>2.18.1</version>
                </plugin>
                <plugin>
                    <groupId>org.apache.maven.plugins</groupId>
                    <artifactId>maven-war-plugin</artifactId>
                    <version>2.5</version>
                </plugin>
                <plugin>
                    <groupId>org.codehaus.mojo</groupId>
                    <artifactId>build-helper-maven-plugin</artifactId>
                    <version>1.9.1</version>
                </plugin>
                <plugin>
                    <groupId>org.codehaus.mojo</groupId>
                    <artifactId>exec-maven-plugin</artifactId>
                    <version>1.3.2</version>
                </plugin>
                <plugin>
                    <groupId>org.codehaus.mojo</groupId>
                    <artifactId>versions-maven-plugin</artifactId>
                    <version>2.2</version>
                </plugin>
                <plugin>
                    <groupId>pl.project13.maven</groupId>
                    <artifactId>git-commit-id-plugin</artifactId>
                    <version>2.1.11</version>
                </plugin>
            </plugins>
        </pluginManagement>
        <plugins>
            <plugin>
                <groupId>org.springframework.boot</groupId>
                <artifactId>spring-boot-maven-plugin</artifactId>
                <version>${version.spring-boot}</version>
                <executions>
                    <execution>
                        <goals>
                            <goal>repackage</goal>
                        </goals>
                    </execution>
                </executions>
            </plugin>
            <plugin>
                <groupId>org.apache.maven.plugins</groupId>
                <artifactId>maven-checkstyle-plugin</artifactId>
                <version>2.17</version>
                <executions>
                    <execution>
                        <id>checkstyle-validation</id>
                        <phase>validate</phase>
                        <configuration>
                            <skip>true</skip>
                            <configLocation>src/checkstyle/checkstyle.xml</configLocation>
                            <suppressionsLocation>src/checkstyle/checkstyle-suppressions.xml</suppressionsLocation>
                            <headerLocation>src/checkstyle/checkstyle-header.txt</headerLocation>
                            <propertyExpansion>checkstyle.build.directory=${project.build.directory}</propertyExpansion>
                            <encoding>UTF-8</encoding>
                            <consoleOutput>true</consoleOutput>
                            <failsOnError>true</failsOnError>
                            <includeTestSourceDirectory>true</includeTestSourceDirectory>
                        </configuration>
                        <goals>
                            <goal>check</goal>
                        </goals>
                    </execution>
                </executions>
            </plugin>
            <plugin>
                <groupId>org.apache.maven.plugins</groupId>
                <artifactId>maven-compiler-plugin</artifactId>
                <version>3.1</version>
                <configuration>
                    <source>${version.java}</source>
                    <target>${version.java}</target>
                </configuration>
            </plugin>
            <plugin>
                <groupId>org.apache.maven.plugins</groupId>
                <artifactId>maven-enforcer-plugin</artifactId>
                <version>1.4</version>
                <executions>
                    <execution>
                        <id>enforce-rules</id>
                        <goals>
                            <goal>enforce</goal>
                        </goals>
                        <configuration>
                            <rules>
                                <requireJavaVersion>
                                    <version>[1.8,)</version>
                                </requireJavaVersion>
                                <!--<requireProperty>
                                    <property>main.basedir</property>
                                </requireProperty>-->
                                <!--<requireProperty>
                                    <property>project.organization.name</property>
                                </requireProperty>-->
                                <!--<requireProperty>
                                    <property>project.name</property>
                                </requireProperty>-->
                                <!--<requireProperty>
                                    <property>project.description</property>
                                </requireProperty>-->
                            </rules>
                            <fail>true</fail>
                        </configuration>
                    </execution>
                </executions>
            </plugin>
            <plugin>
                <groupId>org.apache.maven.plugins</groupId>
                <artifactId>maven-jar-plugin</artifactId>
                <configuration>
                    <archive>
                        <manifest>
                            <addDefaultImplementationEntries>true</addDefaultImplementationEntries>
                            <addDefaultSpecificationEntries>true</addDefaultSpecificationEntries>
                        </manifest>
                    </archive>
                </configuration>
            </plugin>
            <plugin>
                <groupId>org.apache.maven.plugins</groupId>
                <artifactId>maven-surefire-plugin</artifactId>
                <configuration>
                    <includes>
                        <include>**/*Tests.java</include>
                    </includes>
                    <excludes>
                        <exclude>**/Abstract*.java</exclude>
                    </excludes>
                    <!--<systemPropertyVariables>-->
                    <!--<java.security.egd>file:/dev/./urandom</java.security.egd>-->
                    <!--<java.awt.headless>true</java.awt.headless>-->
                    <!--</systemPropertyVariables>-->
                    <argLine>-Xmx1024m</argLine>
                </configuration>
            </plugin>
            <plugin>
                <groupId>org.apache.maven.plugins</groupId>
                <artifactId>maven-war-plugin</artifactId>
                <configuration>
                    <failOnMissingWebXml>false</failOnMissingWebXml>
                </configuration>
            </plugin>
            <plugin>
                <groupId>org.apache.maven.plugins</groupId>
                <artifactId>maven-source-plugin</artifactId>
                <executions>
                    <execution>
                        <id>attach-sources</id>
                        <goals>
                            <goal>jar-no-fork</goal>
                        </goals>
                    </execution>
                </executions>
            </plugin>
            <!--<plugin>
                <groupId>org.codehaus.mojo</groupId>
                <artifactId>animal-sniffer-maven-plugin</artifactId>
                <configuration>
                    <skip>${disable.checks}</skip>
                    <signature>
                        <groupId>org.codehaus.mojo.signature</groupId>
                        <artifactId>java16</artifactId>
                        <version>1.0</version>
                    </signature>
                    <annotations>
                        <annotation>org.springframework.lang.UsesJava8</annotation>
                        <annotation>org.springframework.lang.UsesJava7</annotation>
                        <annotation>org.springframework.boot.lang.UsesUnsafeJava</annotation>
                    </annotations>
                </configuration>
                <executions>
                    <execution>
                        <id>enforce-java-6</id>
                        <phase>test</phase>
                        <goals>
                            <goal>check</goal>
                        </goals>
                    </execution>
                </executions>
            </plugin>-->
        </plugins>
    </build>
    <dependencyManagement>
        <dependencies>
            <dependency>
                <groupId>com.yihu.ehr</groupId>
                <artifactId>commons-admin-gateway-model</artifactId>
                <version>${version.ehr}</version>
            </dependency>
            <dependency>
                <groupId>com.yihu.ehr</groupId>
                <artifactId>commons-data-elasticsearch</artifactId>
                <version>${version.ehr}</version>
            </dependency>
            <dependency>
                <groupId>com.yihu.ehr</groupId>
                <artifactId>commons-data-fastdfs</artifactId>
                <version>${version.ehr}</version>
            </dependency>
            <dependency>
                <groupId>com.yihu.ehr</groupId>
                <artifactId>commons-data-hbase</artifactId>
                <version>${version.ehr}</version>
            </dependency>
            <dependency>
                <groupId>com.yihu.ehr</groupId>
                <artifactId>commons-data-mysql</artifactId>
                <version>${version.ehr}</version>
            </dependency>
            <dependency>
                <groupId>com.yihu.ehr</groupId>
                <artifactId>commons-data-query</artifactId>
                <version>${version.ehr}</version>
            </dependency>
            <dependency>
                <groupId>com.yihu.ehr</groupId>
                <artifactId>commons-data-redis</artifactId>
                <version>${version.ehr}</version>
            </dependency>
            <dependency>
                <groupId>com.yihu.ehr</groupId>
                <artifactId>commons-data-solr</artifactId>
                <version>${version.ehr}</version>
            </dependency>
            <dependency>
                <groupId>com.yihu.ehr</groupId>
                <artifactId>commons-ehr-constants</artifactId>
                <version>${version.ehr}</version>
            </dependency>
            <dependency>
                <groupId>com.yihu.ehr</groupId>
                <artifactId>commons-entity</artifactId>
                <version>${version.ehr}</version>
            </dependency>
            <!--<dependency>-->
            <!--<groupId>com.yihu.ehr</groupId>-->
            <!--<artifactId>commons-metrics</artifactId>-->
            <!--<version>${version.ehr}</version>-->
            <!--</dependency>-->
            <dependency>
                <groupId>com.yihu.ehr</groupId>
                <artifactId>commons-profile-core</artifactId>
                <version>${version.ehr}</version>
            </dependency>
            <dependency>
                <groupId>com.yihu.ehr</groupId>
                <artifactId>commons-redis-mq</artifactId>
                <version>${version.ehr}</version>
            </dependency>
            <dependency>
                <groupId>com.yihu.ehr</groupId>
                <artifactId>commons-rest-model</artifactId>
                <version>${version.ehr}</version>
            </dependency>
            <dependency>
                <groupId>com.yihu.hos</groupId>
                <artifactId>common-rest-model</artifactId>
                <version>${version.eip}</version>
            </dependency>
            <dependency>
                <groupId>com.yihu.ehr</groupId>
                <artifactId>commons-ui-swagger</artifactId>
                <version>${version.ehr}</version>
            </dependency>
            <dependency>
                <groupId>com.yihu.ehr</groupId>
                <artifactId>commons-util</artifactId>
                <version>${version.ehr}</version>
            </dependency>
            <dependency>
                <groupId>com.yihu.ehr</groupId>
                <artifactId>commons-web</artifactId>
                <version>${version.ehr}</version>
            </dependency>
            <!--<dependency>
                <groupId>com.yihu.ehr</groupId>
                <artifactId>commons-metrics</artifactId>
                <version>${version.ehr}</version>
            </dependency>-->
        </dependencies>
    </dependencyManagement>
    <!--
    profiles分为三种场景(dev,test,prod),三种部署模式(jar,war,docker).预计是9种模式,
    但目前仅使用到dev,test,prod的jar,test的war.若有需要可以组合配置这些部署模式.
    - dev的可执行jar包,在本机调试,不需要配置wagon参数。
    - test,prod的可执行jar包,需要在编译后传送到服务器上部署,故需配置wagon参数,参数可根据服务需要自行配置。
    - dev,test,prod的war包,编译后使用tomcat api部署,故需配置tomcat参数,参数可根据服务需要自行配置。
    -->
    <profiles>
        <profile>
            <id>dev-jar</id>
            <activation>
                <activeByDefault>true</activeByDefault>
            </activation>
            <properties>
                <skipTests>true</skipTests>
                <packaging.type>jar</packaging.type>
                <dependency.scope>compile</dependency.scope>
            </properties>
            <dependencies>
                <dependency>
                    <groupId>org.springframework.boot</groupId>
                    <artifactId>spring-boot-starter-web</artifactId>
                </dependency>
                <dependency>
                    <groupId>org.springframework.boot</groupId>
                    <artifactId>spring-boot-starter-tomcat</artifactId>
                </dependency>
                <!--<dependency>
                    <groupId>org.springframework.boot</groupId>
                    <artifactId>spring-boot-devtools</artifactId>
                    <scope>${dependency.scope}</scope>
                </dependency>-->
            </dependencies>
        </profile>
        <profile>
            <id>test-jar</id>
            <properties>
                <skipTests>true</skipTests>
                <packaging.type>jar</packaging.type>
                <dependency.scope>compile</dependency.scope>
                <wagonServerId/>
                <wagonUrl/>
            </properties>
            <build>
                <plugins>
                    <plugin>
                        <groupId>org.codehaus.mojo</groupId>
                        <artifactId>wagon-maven-plugin</artifactId>
                        <version>1.0</version>
                        <configuration>
                            <serverId>${wagonServerId}</serverId>
                            <fromFile>${project.build.directory}/${project.build.finalName}.jar</fromFile>
                            <url>${wagonUrl}</url>
                            <commands>
                                <command>pkill -f ${project.build.finalName}.jar</command>
                                <command>nohub java -Djava.security.egd=file:/dev/./urandom -jar
                                    ${project.build.finalName}.jar &amp;
                                </command>
                            </commands>
                            <displayCommandOutputs>true</displayCommandOutputs>
                        </configuration>
                    </plugin>
                </plugins>
            </build>
            <dependencies>
                <dependency>
                    <groupId>org.springframework.boot</groupId>
                    <artifactId>spring-boot-starter-web</artifactId>
                    <exclusions>
                        <exclusion>
                            <groupId>org.springframework.boot</groupId>
                            <artifactId>spring-boot-starter-logging</artifactId>
                        </exclusion>
                    </exclusions>
                </dependency>
                <dependency>
                    <groupId>org.springframework.boot</groupId>
                    <artifactId>spring-boot-devtools</artifactId>
                    <scope>${dependency.scope}</scope>
                </dependency>
            </dependencies>
        </profile>
        <profile>
            <id>test-war</id>
            <activation>
                <property>
                    <name>spring.profiles.active</name>
                    <value>default,test</value>
                </property>
            </activation>
            <properties>
                <skipTests>true</skipTests>
                <packaging.type>war</packaging.type>
                <dependency.scope>provided</dependency.scope>
            </properties>
            <dependencies>
                <dependency>
                    <groupId>org.springframework.boot</groupId>
                    <artifactId>spring-boot-starter-web</artifactId>
                    <scope>${dependency.scope}</scope>
                    <exclusions>
                        <exclusion>
                            <groupId>org.springframework.boot</groupId>
                            <artifactId>spring-boot-starter-logging</artifactId>
                        </exclusion>
                    </exclusions>
                </dependency>
                <dependency>
                    <groupId>org.springframework.boot</groupId>
                    <artifactId>spring-boot-starter-tomcat</artifactId>
                    <scope>${dependency.scope}</scope>
                </dependency>
                <dependency>
                    <groupId>org.springframework.boot</groupId>
                    <artifactId>spring-boot-devtools</artifactId>
                    <scope>${dependency.scope}</scope>
                </dependency>
            </dependencies>
            <build>
                <plugins>
                    <plugin>
                        <groupId>org.apache.tomcat.maven</groupId>
                        <artifactId>tomcat7-maven-plugin</artifactId>
                        <version>2.2</version>
                        <configuration>
                            <url>http://localhost:8080/manager/text</url>
                            <server>tomcat8</server>
                            <username>deployer</username>
                            <password>jkzldeployer</password>
                            <path>/${project.artifactId}</path>
                            <update>true</update>
                        </configuration>
                    </plugin>
                </plugins>
            </build>
        </profile>
        <profile>
            <id>prod-jar</id>
            <activation>
                <property>
                    <name>spring.profiles.active</name>
                    <value>default,prod</value>
                </property>
            </activation>
            <properties>
                <skipTests>true</skipTests>
                <packaging.type>war</packaging.type>
                <dependency.scope>compile</dependency.scope>
                <wagonServerId>11.1.2.21</wagonServerId>
                <wagonUrl>scp://user:password@11.1.2.21/home/root/ehr-release</wagonUrl>
            </properties>
            <dependencies>
                <dependency>
                    <groupId>org.springframework.boot</groupId>
                    <artifactId>spring-boot-starter-web</artifactId>
                    <scope>${dependency.scope}</scope>
                    <exclusions>
                        <exclusion>
                            <groupId>org.springframework.boot</groupId>
                            <artifactId>spring-boot-starter-logging</artifactId>
                        </exclusion>
                    </exclusions>
                </dependency>
            </dependencies>
        </profile>
        <profile>
            <id>prod-war</id>
            <properties>
                <skipTests>true</skipTests>
                <packaging.type>war</packaging.type>
                <dependency.scope>provided</dependency.scope>
            </properties>
            <dependencies>
                <dependency>
                    <groupId>org.springframework.boot</groupId>
                    <artifactId>spring-boot-starter-web</artifactId>
                    <scope>${dependency.scope}</scope>
                    <exclusions>
                        <exclusion>
                            <groupId>org.springframework.boot</groupId>
                            <artifactId>spring-boot-starter-logging</artifactId>
                        </exclusion>
                    </exclusions>
                </dependency>
                <dependency>
                    <groupId>org.springframework.boot</groupId>
                    <artifactId>spring-boot-starter-tomcat</artifactId>
                    <scope>${dependency.scope}</scope>
                </dependency>
                <dependency>
                    <groupId>org.springframework.boot</groupId>
                    <artifactId>spring-boot-devtools</artifactId>
                    <scope>${dependency.scope}</scope>
                </dependency>
            </dependencies>
            <build>
                <plugins>
                </plugins>
            </build>
        </profile>
    </profiles>
</project>

+ 154 - 0
pom.xml

@ -0,0 +1,154 @@
<?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.0.0"
         xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
         xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
    <modelVersion>4.0.0</modelVersion>
    <parent>
        <groupId>com.yihu.ehr</groupId>
        <artifactId>ehr-ms-parent-pom</artifactId>
        <version>1.2.0</version>
        <relativePath>ehr-ms-parent-pom/pom.xml</relativePath>
    </parent>
    <artifactId>svr-pack-analyzer</artifactId>
    <!--<packaging>${packaging.type}</packaging>-->
    <packaging>war</packaging>
    <properties>
        <wagonServerId>192.168.1.220</wagonServerId>
        <wagonUrl>scp://sand:timeneverstop@192.168.1.221/home/sand/ehr-release</wagonUrl>
    </properties>
    <dependencies>
        <!-- true -->
        <dependency>
            <groupId>org.springframework.boot</groupId>
            <artifactId>spring-boot-starter-test</artifactId>
            <scope>test</scope>
        </dependency>
        <dependency>
            <groupId>org.springframework.cloud</groupId>
            <artifactId>spring-cloud-starter-config</artifactId>
            <scope>${dependency.scope}</scope>
        </dependency>
        <dependency>
            <groupId>org.springframework.cloud</groupId>
            <artifactId>spring-cloud-starter-eureka</artifactId>
            <scope>${dependency.scope}</scope>
        </dependency>
        <dependency>
            <groupId>org.springframework.cloud</groupId>
            <artifactId>spring-cloud-starter-feign</artifactId>
            <scope>${dependency.scope}</scope>
        </dependency>
        <dependency>
            <groupId>org.springframework.boot</groupId>
            <artifactId>spring-boot-starter-actuator</artifactId>
        </dependency>
        <dependency>
            <groupId>com.timgroup</groupId>
            <artifactId>java-statsd-client</artifactId>
            <scope>${dependency.scope}</scope>
        </dependency>
        <dependency>
            <groupId>javax.servlet</groupId>
            <artifactId>javax.servlet-api</artifactId>
            <scope>${dependency.scope}</scope>
        </dependency>
        <dependency>
            <groupId>org.quartz-scheduler</groupId>
            <artifactId>quartz</artifactId>
            <scope>${dependency.scope}</scope>
        </dependency>
        <dependency>
            <groupId>com.yihu.ehr</groupId>
            <artifactId>commons-data-elasticsearch</artifactId>
            <scope>compile</scope>
        </dependency>
        <dependency>
            <groupId>com.yihu.ehr</groupId>
            <artifactId>commons-data-fastdfs</artifactId>
            <scope>compile</scope>
        </dependency>
        <dependency>
            <groupId>com.yihu.ehr</groupId>
            <artifactId>commons-ehr-constants</artifactId>
            <scope>compile</scope>
        </dependency>
        <dependency>
            <groupId>com.yihu.ehr</groupId>
            <artifactId>commons-entity</artifactId>
            <scope>compile</scope>
        </dependency>
        <dependency>
            <groupId>com.yihu.ehr</groupId>
            <artifactId>commons-util</artifactId>
            <scope>compile</scope>
        </dependency>
        <dependency>
            <groupId>com.yihu.ehr</groupId>
            <artifactId>commons-data-mysql</artifactId>
            <scope>compile</scope>
        </dependency>
        <dependency>
            <groupId>com.yihu.ehr</groupId>
            <artifactId>commons-rest-model</artifactId>
            <scope>compile</scope>
        </dependency>
        <dependency>
            <groupId>com.yihu.ehr</groupId>
            <artifactId>commons-data-redis</artifactId>
            <scope>compile</scope>
        </dependency>
        <dependency>
            <groupId>com.yihu.ehr</groupId>
            <artifactId>commons-ui-swagger</artifactId>
            <scope>compile</scope>
        </dependency>
        <dependency>
            <groupId>com.yihu.ehr</groupId>
            <artifactId>commons-web</artifactId>
            <scope>compile</scope>
        </dependency>
        <dependency>
            <groupId>com.yihu.ehr</groupId>
            <artifactId>commons-profile-core</artifactId>
            <scope>compile</scope>
        </dependency>
        <dependency>
            <groupId>com.yihu.ehr</groupId>
            <artifactId>commons-redis-mq</artifactId>
            <scope>compile</scope>
        </dependency>
        <dependency>
            <groupId>com.yihu.ehr</groupId>
            <artifactId>commons-util</artifactId>
        </dependency>
        <dependency>
            <groupId>com.yihu.ehr</groupId>
            <artifactId>commons-data-solr</artifactId>
        </dependency>
        <dependency>
            <groupId>com.alibaba</groupId>
            <artifactId>fastjson</artifactId>
        </dependency>
        <!-- 导word文档新增 3.12包中缺少 Paragraph接口 -->
        <dependency>
            <groupId>org.apache.poi</groupId>
            <artifactId>poi</artifactId>
            <version>3.17</version>
        </dependency>
        <dependency>
            <groupId>org.apache.poi</groupId>
            <artifactId>poi-ooxml</artifactId>
            <version>3.17</version>
        </dependency>
        <!-- export library -->
        <dependency>
            <groupId>net.sourceforge.jexcelapi</groupId>
            <artifactId>jxl</artifactId>
        </dependency>
    </dependencies>
</project>

+ 53 - 0
src/main/java/com/yihu/ehr/SvrPackAnalyzer.java

@ -0,0 +1,53 @@
package com.yihu.ehr;
import com.yihu.ehr.analyze.config.SchedulerConfig;
import com.yihu.ehr.analyze.service.scheduler.SchedulerService;
import com.yihu.ehr.analyze.service.scheduler.WarningSchedulerService;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.CommandLineRunner;
import org.springframework.boot.SpringApplication;
import org.springframework.boot.autoconfigure.EnableAutoConfiguration;
import org.springframework.boot.autoconfigure.security.SecurityAutoConfiguration;
import org.springframework.boot.builder.SpringApplicationBuilder;
import org.springframework.boot.web.support.SpringBootServletInitializer;
import org.springframework.cloud.client.discovery.EnableDiscoveryClient;
import org.springframework.cloud.netflix.feign.EnableFeignClients;
import org.springframework.context.annotation.ComponentScan;
import org.springframework.context.annotation.Configuration;
import org.springframework.data.web.config.EnableSpringDataWebSupport;
import org.springframework.scheduling.annotation.EnableAsync;
import org.springframework.scheduling.annotation.EnableScheduling;
@Configuration
@EnableAutoConfiguration(exclude = {
        SecurityAutoConfiguration.class})
@ComponentScan
@EnableDiscoveryClient
@EnableFeignClients
@EnableScheduling
@EnableAsync
@EnableSpringDataWebSupport
public class SvrPackAnalyzer extends SpringBootServletInitializer implements CommandLineRunner {
    @Autowired
    private SchedulerService schedulerService;
    @Autowired
    private SchedulerConfig schedulerConfig;
    @Autowired
    private WarningSchedulerService warningSchedulerService;
    public static void main(String[] args) throws NoSuchFieldException, IllegalAccessException {
        SpringApplication.run(SvrPackAnalyzer.class, args);
    }
    @Override
    public void run(String... strings) throws Exception {
        schedulerService.addJob(schedulerConfig.getJobMinSize(), schedulerConfig.getCronExp());
        warningSchedulerService.init();
    }
    @Override
    protected SpringApplicationBuilder configure(SpringApplicationBuilder application) {
        return application.sources(SvrPackAnalyzer.class);
    }
}

+ 48 - 0
src/main/java/com/yihu/ehr/analyze/config/RequireDatasetsConfig.java

@ -0,0 +1,48 @@
package com.yihu.ehr.analyze.config;
import com.yihu.ehr.profile.EventType;
import com.yihu.ehr.profile.exception.AnalyzerException;
import org.springframework.boot.context.properties.ConfigurationProperties;
import org.springframework.stereotype.Component;
import java.util.ArrayList;
import java.util.List;
/**
 * 就诊事件必要数据集配置
 * Created by progr1mmer on 2018/7/23.
 */
@Component
@ConfigurationProperties(prefix = "ehr.require-data-sets")
public class RequireDatasetsConfig {
    private List<String> clinic = new ArrayList<>();
    private List<String> resident = new ArrayList<>();
    private List<String> medicalExam = new ArrayList<>();
    public List<String> getRequireDataset(EventType eventType) {
        switch (eventType) {
            case Clinic:
                return clinic;
            case Resident:
                return resident;
            case MedicalExam:
                return medicalExam;
            default:
                throw new AnalyzerException("Unknown event type " + eventType);
        }
    }
    public List<String> getClinic() {
        return clinic;
    }
    public List<String> getResident() {
        return resident;
    }
    public List<String> getMedicalExam() {
        return medicalExam;
    }
}

+ 42 - 0
src/main/java/com/yihu/ehr/analyze/config/SchedulerConfig.java

@ -0,0 +1,42 @@
package com.yihu.ehr.analyze.config;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.scheduling.quartz.SchedulerFactoryBean;
/**
 * @author Airhead
 * @version 1.0
 * @created 2018.01.16
 */
@Configuration
public class SchedulerConfig {
    @Value("${analyze.job.maxSize}")
    private int jobMaxSize;
    @Value("${analyze.job.minSize}")
    private int jobMinSize;
    @Value("${analyze.job.cronExp}")
    private String cronExp;
    public int getJobMaxSize() {
        return jobMaxSize;
    }
    public int getJobMinSize() {
        return jobMinSize;
    }
    public String getCronExp() {
        return cronExp;
    }
    @Bean
    SchedulerFactoryBean schedulerFactoryBean() {
        SchedulerFactoryBean bean = new SchedulerFactoryBean();
        bean.setAutoStartup(true);
        bean.setSchedulerName("PackageAnalyzerScheduler");
        return bean;
    }
}

+ 27 - 0
src/main/java/com/yihu/ehr/analyze/config/TenantConfiguration.java

@ -0,0 +1,27 @@
package com.yihu.ehr.analyze.config;
import feign.RequestInterceptor;
import feign.RequestTemplate;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
/**
 *   ehr使用 feginClient 调用eip(涉及到app应用,标准等) 时,需要此类,增加请求头信息
 */
@Configuration
public class TenantConfiguration {
    @Value("${eip.tenant}")
    private String tenant;
    @Bean
    public RequestInterceptor tenantInterceptor() {
        return new RequestInterceptor() {
            @Override
            public void apply(RequestTemplate requestTemplate) {
                requestTemplate.header("tenant_name", tenant);
            }
        };
    }
}

+ 115 - 0
src/main/java/com/yihu/ehr/analyze/controller/DailyReportEndPoint.java

@ -0,0 +1,115 @@
package com.yihu.ehr.analyze.controller;
import com.yihu.ehr.constants.ApiVersion;
import com.yihu.ehr.constants.ServiceApi;
import com.yihu.ehr.controller.EnvelopRestEndPoint;
import com.yihu.ehr.elasticsearch.ElasticSearchUtil;
import com.yihu.ehr.util.rest.Envelop;
import io.swagger.annotations.Api;
import io.swagger.annotations.ApiOperation;
import io.swagger.annotations.ApiParam;
import org.apache.commons.lang3.StringUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.http.MediaType;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestMethod;
import org.springframework.web.bind.annotation.RequestParam;
import org.springframework.web.bind.annotation.RestController;
import java.util.List;
import java.util.Map;
@RestController
@RequestMapping(value = ApiVersion.Version1_0, produces = MediaType.APPLICATION_JSON_UTF8_VALUE)
@Api(value = "DailyReportEndPoint", description = "档案日报上传", tags = {"档案分析服务-档案日报上传"})
public class DailyReportEndPoint extends EnvelopRestEndPoint {
    public static final String INDEX = "qc";
    public static final String TYPE = "daily_report";
    private static final Logger log = LoggerFactory.getLogger(DailyReportEndPoint.class);
    @Autowired
    private ElasticSearchUtil elasticSearchUtil;
    @ApiOperation(value = "档案日报上传")
    @RequestMapping(value = ServiceApi.PackageAnalyzer.DailyReport, method = RequestMethod.POST)
    public Envelop dailyReport(
            @ApiParam(name = "report", value = "日报json对象", required = true)
            @RequestParam(value = "report") String report) throws Exception {
        String msg = "";
        List<Map<String, Object>> list = objectMapper.readValue(report, List.class);
        for (Map<String, Object> map : list){
            if (map.get("org_code")== null|| "".equals(map.get("org_code"))){
                msg = msg + "机构代码不能为空、";
            }
            if (map.get("event_date") == null || "".equals(map.get("event_date"))){
                msg = msg + "事件时间不能为空、";
            }
            if (map.get("HSI07_01_001") == null || "".equals(map.get("HSI07_01_001"))){
                msg = msg + "总诊疗人数不能为空、";
            }
            if (map.get("HSI07_01_002") == null || "".equals(map.get("HSI07_01_002"))){
                msg = msg + "门急诊人数不能为空、";
            }
            if (map.get("HSI07_01_004") == null|| "".equals(map.get("HSI07_01_004"))){
                msg = msg + "健康检查人数不能为空、";
            }
            if (map.get("HSI07_01_011") == null || "".equals(map.get("HSI07_01_011"))){
                msg = msg + "入院人数不能为空、";
            }
            if (map.get("HSI07_01_012") == null || "".equals(map.get("HSI07_01_012"))){
                msg = msg + "出院人数不能为空、";
            }
        }
        if (StringUtils.isNotEmpty(msg)){
            log.error(msg);
            return failed("参数校验失败");
        } else {
            for (Map<String, Object> map : list) {
                //补传的时候删除原来数据
                String filter = "event_date=" + map.get("event_date") + ";org_code?"+map.get("org_code");
                List<Map<String, Object>> res = elasticSearchUtil.list(INDEX, TYPE, filter);
                if(res!=null && res.size()>0){
                    for(Map<String, Object> m : res){
                        elasticSearchUtil.delete(INDEX, TYPE ,m.get("_id").toString());
                    }
                }
                elasticSearchUtil.index(INDEX, TYPE, map);
            }
            return success(true);
        }
    }
    @ApiOperation(value = "档案日报查询")
    @RequestMapping(value = ServiceApi.PackageAnalyzer.List, method = RequestMethod.POST)
    public Envelop list(
            @ApiParam(name = "filter", value = "过滤条件")
            @RequestParam(value = "filter", required = false) String filter) throws Exception {
        List<Map<String, Object>> list = elasticSearchUtil.list(INDEX, TYPE, filter);
        return success(list);
    }
    @ApiOperation(value = "根据某个字段查询档案")
    @RequestMapping(value = ServiceApi.PackageAnalyzer.FindByField, method = RequestMethod.POST)
    public Envelop findByField(
            @ApiParam(name = "field", value = "字段", required = true)
            @RequestParam(value = "field") String field,
            @ApiParam(name = "value", value = "字段值", required = true)
            @RequestParam(value = "value") String value) {
        List<Map<String, Object>> list =  elasticSearchUtil.findByField(INDEX, TYPE, field , value);
        return success(list);
    }
    @ApiOperation(value = "根据sql查询")
    @RequestMapping(value = ServiceApi.PackageAnalyzer.FindBySql, method = RequestMethod.POST)
    public List<Map<String,Object>> findBySql(
            @ApiParam(name = "field", value = "字段列表", required = true)
            @RequestParam(value = "field") String field,
            @ApiParam(name = "sql", value = "sql", required = true)
            @RequestParam(value = "sql") String sql) throws Exception {
        List<Map<String, Object>> list = elasticSearchUtil.findBySql(objectMapper.readValue(field, List.class), sql);
        return list;
    }
}

+ 83 - 0
src/main/java/com/yihu/ehr/analyze/controller/PackAnalyzeEndPoint.java

@ -0,0 +1,83 @@
package com.yihu.ehr.analyze.controller;
import com.yihu.ehr.analyze.feign.PackageMgrClient;
import com.yihu.ehr.analyze.service.pack.PackageAnalyzeService;
import com.yihu.ehr.analyze.model.ZipPackage;
import com.yihu.ehr.constants.ApiVersion;
import com.yihu.ehr.constants.ServiceApi;
import com.yihu.ehr.controller.EnvelopRestEndPoint;
import com.yihu.ehr.elasticsearch.ElasticSearchUtil;
import com.yihu.ehr.model.packs.EsSimplePackage;
import io.swagger.annotations.Api;
import io.swagger.annotations.ApiOperation;
import io.swagger.annotations.ApiParam;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.data.domain.Page;
import org.springframework.http.MediaType;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestMethod;
import org.springframework.web.bind.annotation.RequestParam;
import org.springframework.web.bind.annotation.RestController;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
@RestController
@RequestMapping(value = ApiVersion.Version1_0, produces = MediaType.APPLICATION_JSON_UTF8_VALUE)
@Api(value = "AnalyzerEndPoint", description = "档案分析服务", tags = {"档案分析服务-档案分析"})
public class PackAnalyzeEndPoint extends EnvelopRestEndPoint {
    @Autowired
    private PackageAnalyzeService packageAnalyzeService;
    @Autowired
    private ElasticSearchUtil elasticSearchUtil;
    @Autowired
    private PackageMgrClient packageMgrClient;
    @ApiOperation(value = "ES数据保存")
    @RequestMapping(value = ServiceApi.PackageAnalyzer.EsSaveData, method = RequestMethod.POST)
    public boolean esSaveData(
            @ApiParam(name = "index", value = "ES index", required = true)
            @RequestParam(value = "index") String index,
            @ApiParam(name = "type", value = "ES type", required = true)
            @RequestParam(value = "type") String type,
            @ApiParam(name = "dataList", value = "上传的数据集", required = true)
            @RequestParam(value = "dataList") String dataList) throws Exception {
        packageAnalyzeService.esSaveData(index, type, dataList);
        return true;
    }
    @RequestMapping(value = ServiceApi.PackageAnalyzer.UpdateStatus, method = RequestMethod.PUT)
    @ApiOperation(value = "根据条件批量修改档案包状态", notes = "修改档案包状态")
    public Integer update(
            @ApiParam(name = "filters", value = "条件", required = true)
            @RequestParam(value = "filters") String filters,
            @ApiParam(name = "status", value = "状态", required = true)
            @RequestParam(value = "status") String status,
            @ApiParam(name = "page", value = "page", required = true)
            @RequestParam(value = "page") Integer page,
            @ApiParam(name = "size", value = "size", required = true)
            @RequestParam(value = "size") Integer size) throws Exception {
        Page<Map<String, Object>> result = elasticSearchUtil.page("json_archives", "info", filters, page, size);
        List<Map<String, Object>> updateSourceList = new ArrayList<>();
        result.forEach(item -> {
            Map<String, Object> updateSource = new HashMap<>();
            updateSource.put("_id", item.get("_id"));
            updateSource.put("analyze_status", status);
            updateSourceList.add(updateSource);
        });
        elasticSearchUtil.bulkUpdate("json_archives", "info", updateSourceList);
        return result.getNumberOfElements();
    }
    @RequestMapping(value = ServiceApi.PackageAnalyzer.Analyzer, method = RequestMethod.PUT)
    @ApiOperation(value = "分析档案包", notes = "分析档案包")
    public ZipPackage analyzer(
            @ApiParam(name = "id", value = "档案包ID")
            @RequestParam(value = "id", required = false) String id) throws Throwable {
        EsSimplePackage esSimplePackage = packageMgrClient.getPackage(id);
        return packageAnalyzeService.analyze(esSimplePackage);
    }
}

+ 412 - 0
src/main/java/com/yihu/ehr/analyze/controller/PackQcReportEndPoint.java

@ -0,0 +1,412 @@
package com.yihu.ehr.analyze.controller;
import com.yihu.ehr.analyze.feign.HosAdminServiceClient;
import com.yihu.ehr.analyze.model.AdapterDatasetModel;
import com.yihu.ehr.analyze.model.AdapterMetadataModel;
import com.yihu.ehr.analyze.service.dataQuality.DqDatasetWarningService;
import com.yihu.ehr.analyze.service.pack.PackQcReportService;
import com.yihu.ehr.constants.ApiVersion;
import com.yihu.ehr.constants.ServiceApi;
import com.yihu.ehr.controller.EnvelopRestEndPoint;
import com.yihu.ehr.elasticsearch.ElasticSearchUtil;
import com.yihu.ehr.entity.quality.DqDatasetWarning;
import com.yihu.ehr.model.quality.MProfileInfo;
import com.yihu.ehr.redis.client.RedisClient;
import com.yihu.ehr.util.rest.Envelop;
import io.swagger.annotations.Api;
import io.swagger.annotations.ApiOperation;
import io.swagger.annotations.ApiParam;
import org.apache.commons.lang.StringUtils;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.data.domain.Page;
import org.springframework.http.ResponseEntity;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestMethod;
import org.springframework.web.bind.annotation.RequestParam;
import org.springframework.web.bind.annotation.RestController;
import java.util.*;
/**
 * @Author: zhengwei
 * @Date: 2018/5/31 16:20
 * @Description: 质控报表
 */
@RestController
@RequestMapping(ApiVersion.Version1_0)
@Api(value = "PackQcReportEndPoint", description = "档案分析服务", tags = {"档案分析服务-新质控管理报表"})
public class PackQcReportEndPoint extends EnvelopRestEndPoint {
    @Autowired
    private PackQcReportService packQcReportService;
    @Autowired
    private DqDatasetWarningService dqDatasetWarningService;
    @Autowired
    private ElasticSearchUtil elasticSearchUtil;
    @Autowired
    private HosAdminServiceClient hosAdminServiceClient;
    @Value("${quality.cloud}")
    private String cloud;
    @Autowired
    private RedisClient redisClient;
    @RequestMapping(value = ServiceApi.PackQcReport.dailyReport, method = RequestMethod.GET)
    @ApiOperation(value = "获取医院数据")
    public Envelop dailyReport(
            @ApiParam(name = "startDate", value = "开始日期")
            @RequestParam(name = "startDate") String startDate,
            @ApiParam(name = "endDate", value = "结束日期")
            @RequestParam(name = "endDate") String endDate,
            @ApiParam(name = "orgCode", value = "医院代码", required = false)
            @RequestParam(name = "orgCode") String orgCode) throws Exception {
        return packQcReportService.dailyReport(startDate, endDate, orgCode);
    }
    @RequestMapping(value = ServiceApi.PackQcReport.datasetWarningList, method = RequestMethod.GET)
    @ApiOperation(value = "预警数据集列表")
    public Envelop datasetWarningList(
            @ApiParam(name = "orgCode", value = "机构编码")
            @RequestParam(name = "orgCode", required = false) String orgCode,
            @ApiParam(name = "type", value = "类型(1平台接收,2平台上传)")
            @RequestParam(name = "type") String type,
            @ApiParam(name = "page", value = "分页大小", required = true, defaultValue = "1")
            @RequestParam(value = "page") int page,
            @ApiParam(name = "size", value = "页码", required = true, defaultValue = "15")
            @RequestParam(value = "size") int size) {
        Envelop envelop = new Envelop();
        try {
            String filters = "type=" + type;
            if (!StringUtils.isEmpty(orgCode)&&!cloud.equals(orgCode)) {
                filters += ";orgCode=" + orgCode;
            }
            List<DqDatasetWarning> list = dqDatasetWarningService.search(null, filters, "", page, size);
            int count = (int) dqDatasetWarningService.getCount(filters);
            envelop = getPageResult(list, count, page, size);
        } catch (Exception e) {
            e.printStackTrace();
            envelop.setSuccessFlg(false);
            envelop.setErrorMsg(e.getMessage());
        }
        return envelop;
    }
    @RequestMapping(value = ServiceApi.PackQcReport.resourceSuccess, method = RequestMethod.GET)
    @ApiOperation(value = "资源化成功的计数统计")
    public Envelop resourceSuccess(
            @ApiParam(name = "startDate", value = "开始日期")
            @RequestParam(name = "startDate") String startDate,
            @ApiParam(name = "endDate", value = "结束日期")
            @RequestParam(name = "endDate") String endDate,
            @ApiParam(name = "orgCode", value = "医院代码")
            @RequestParam(name = "orgCode", required = false) String orgCode) throws Exception {
        return packQcReportService.resourceSuccess(startDate, endDate, orgCode);
    }
    @RequestMapping(value = ServiceApi.PackQcReport.resourceSuccessPage, method = RequestMethod.GET)
    @ApiOperation(value = "资源化成功的计数统计(分页)")
    public Envelop resourceSuccessPage(
            @ApiParam(name = "startDate", value = "开始日期")
            @RequestParam(name = "startDate") String startDate,
            @ApiParam(name = "endDate", value = "结束日期")
            @RequestParam(name = "endDate") String endDate,
            @ApiParam(name = "orgCode", value = "医院代码")
            @RequestParam(name = "orgCode", required = false) String orgCode,
            @ApiParam(name = "size", value = "分页大小", defaultValue = "15")
            @RequestParam(value = "size", required = false) int size,
            @ApiParam(name = "page", value = "页码", defaultValue = "1")
            @RequestParam(value = "page", required = false) int page) throws Exception {
        return packQcReportService.resourceSuccess(startDate, endDate, orgCode,size,page);
    }
    @RequestMapping(value = ServiceApi.PackQcReport.archiveReport, method = RequestMethod.GET)
    @ApiOperation(value = "获取接收档案数据")
    public Envelop archiveReport(
            @ApiParam(name = "startDate", value = "开始日期")
            @RequestParam(name = "startDate") String startDate,
            @ApiParam(name = "endDate", value = "结束日期")
            @RequestParam(name = "endDate") String endDate,
            @ApiParam(name = "orgCode", value = "医院代码")
            @RequestParam(name = "orgCode", required = false) String orgCode) throws Exception {
        return packQcReportService.archiveReport(startDate, endDate, orgCode);
    }
    @RequestMapping(value = ServiceApi.PackQcReport.dataSetList, method = RequestMethod.GET)
    @ApiOperation(value = "获取接收数据集列表")
    public Envelop dataSetList(
            @ApiParam(name = "startDate", value = "开始日期")
            @RequestParam(name = "startDate") String startDate,
            @ApiParam(name = "endDate", value = "结束日期")
            @RequestParam(name = "endDate") String endDate,
            @ApiParam(name = "orgCode", value = "医院代码")
            @RequestParam(name = "orgCode", required = false) String orgCode) throws Exception {
        return packQcReportService.dataSetList(startDate, endDate, orgCode);
    }
    @RequestMapping(value = ServiceApi.PackQcReport.dataSetListPage, method = RequestMethod.GET)
    @ApiOperation(value = "获取接收数据集列表")
    public Envelop dataSetListPage(
            @ApiParam(name = "startDate", value = "开始日期")
            @RequestParam(name = "startDate") String startDate,
            @ApiParam(name = "endDate", value = "结束日期")
            @RequestParam(name = "endDate") String endDate,
            @ApiParam(name = "orgCode", value = "医院代码")
            @RequestParam(name = "orgCode", required = false) String orgCode,
            @ApiParam(name = "size", value = "分页大小", defaultValue = "15")
            @RequestParam(value = "size", required = false) int size,
            @ApiParam(name = "page", value = "页码", defaultValue = "1")
            @RequestParam(value = "page", required = false) int page) throws Exception {
        return packQcReportService.getDataSetListPage(startDate, endDate, orgCode,size,page);
    }
    @RequestMapping(value = ServiceApi.PackQcReport.archiveFailed, method = RequestMethod.GET)
    @ApiOperation(value = "获取资源化解析失败")
    public Envelop archiveFailed(
            @ApiParam(name = "startDate", value = "开始日期")
            @RequestParam(name = "startDate") String startDate,
            @ApiParam(name = "endDate", value = "结束日期")
            @RequestParam(name = "endDate") String endDate,
            @ApiParam(name = "orgCode", value = "医院代码")
            @RequestParam(name = "orgCode", required = false) String orgCode) throws Exception {
        return packQcReportService.archiveFailed(startDate, endDate, orgCode);
    }
    @RequestMapping(value = ServiceApi.PackQcReport.archiveFailedPage, method = RequestMethod.GET)
    @ApiOperation(value = "获取资源化解析失败")
    public Envelop archiveFailedPage(
            @ApiParam(name = "startDate", value = "开始日期")
            @RequestParam(name = "startDate") String startDate,
            @ApiParam(name = "endDate", value = "结束日期")
            @RequestParam(name = "endDate") String endDate,
            @ApiParam(name = "orgCode", value = "医院代码")
            @RequestParam(name = "orgCode", required = false) String orgCode,
            @ApiParam(name = "size", value = "分页大小", defaultValue = "15")
            @RequestParam(value = "size", required = false) int size,
            @ApiParam(name = "page", value = "页码", defaultValue = "1")
            @RequestParam(value = "page", required = false) int page) throws Exception {
        return packQcReportService.archiveFailed(startDate, endDate, orgCode,size,page);
    }
    @RequestMapping(value = ServiceApi.PackQcReport.metadataError, method = RequestMethod.GET)
    @ApiOperation(value = "获取解析异常")
    public Envelop metadataError(
            @ApiParam(name = "step", value = "异常环节")
            @RequestParam(name = "step") String step,
            @ApiParam(name = "startDate", value = "开始日期")
            @RequestParam(name = "startDate") String startDate,
            @ApiParam(name = "endDate", value = "结束日期")
            @RequestParam(name = "endDate") String endDate,
            @ApiParam(name = "orgCode", value = "医院代码")
            @RequestParam(name = "orgCode", required = false) String orgCode) throws Exception {
        return packQcReportService.metadataError(step, startDate, endDate, orgCode);
    }
    @RequestMapping(value = ServiceApi.PackQcReport.metadataErrorPage, method = RequestMethod.GET)
    @ApiOperation(value = "获取解析异常")
    public Envelop metadataErrorPage(
            @ApiParam(name = "step", value = "异常环节")
            @RequestParam(name = "step") String step,
            @ApiParam(name = "startDate", value = "开始日期")
            @RequestParam(name = "startDate") String startDate,
            @ApiParam(name = "endDate", value = "结束日期")
            @RequestParam(name = "endDate") String endDate,
            @ApiParam(name = "orgCode", value = "医院代码")
            @RequestParam(name = "orgCode", required = false) String orgCode,
            @ApiParam(name = "size", value = "分页大小", defaultValue = "15")
            @RequestParam(value = "size", required = false) int size,
            @ApiParam(name = "page", value = "页码", defaultValue = "1")
            @RequestParam(value = "page", required = false) int page) throws Exception {
        return packQcReportService.metadataError(step, startDate, endDate, orgCode,size,page);
    }
    @RequestMapping(value = ServiceApi.PackQcReport.analyzeErrorList, method = RequestMethod.GET)
    @ApiOperation(value = "解析失败问题查询")
    public Envelop analyzeErrorList(
            @ApiParam(name = "filters", value = "过滤")
            @RequestParam(value = "filters", required = false) String filters,
            @ApiParam(name = "sorts", value = "排序")
            @RequestParam(value = "sorts", required = false) String sorts,
            @ApiParam(name = "page", value = "分页大小", required = true, defaultValue = "1")
            @RequestParam(value = "page") int page,
            @ApiParam(name = "size", value = "页码", required = true, defaultValue = "15")
            @RequestParam(value = "size") int size) throws Exception {
        if (StringUtils.isNotEmpty(filters)) {
            filters = "analyze_status=2||archive_status=2;" + filters;
        } else {
            filters = "analyze_status=2||archive_status=2";
        }
        Page<Map<String, Object>> result = packQcReportService.analyzeErrorList(filters, sorts, page, size);
        Envelop envelop = getPageResult(result.getContent(), (int)result.getTotalElements(), page, size);
        return envelop;
    }
    @RequestMapping(value = ServiceApi.PackQcReport.metadataErrorList, method = RequestMethod.GET)
    @ApiOperation(value = "异常数据元列表")
    public Envelop metadataErrorList(
            @ApiParam(name = "filters", value = "过滤")
            @RequestParam(value = "filters", required = false) String filters,
            @ApiParam(name = "sorts", value = "排序")
            @RequestParam(value = "sorts", required = false) String sorts,
            @ApiParam(name = "page", value = "分页大小", required = true, defaultValue = "1")
            @RequestParam(value = "page") int page,
            @ApiParam(name = "size", value = "页码", required = true, defaultValue = "15")
            @RequestParam(value = "size") int size) throws Exception {
        Page<Map<String, Object>> result = packQcReportService.metadataErrorList(filters, sorts, page, size);
        Envelop envelop = getPageResult(result.getContent(), (int)result.getTotalElements(), page, size);
        return envelop;
    }
    @RequestMapping(value = ServiceApi.PackQcReport.metadataErrorDetail, method = RequestMethod.GET)
    @ApiOperation(value = "异常数据元详情")
    public Envelop metadataErrorDetail(
            @ApiParam(name = "id", value = "主键", required = true)
            @RequestParam(value = "id") String id) throws Exception {
        Envelop envelop = packQcReportService.metadataErrorDetail(id);
        return envelop;
    }
    @RequestMapping(value = ServiceApi.PackQcReport.archiveList, method = RequestMethod.GET)
    @ApiOperation(value = "档案包列表")
    public Envelop archiveList(
            @ApiParam(name = "filters", value = "过滤")
            @RequestParam(value = "filters", required = false) String filters,
            @ApiParam(name = "sorts", value = "排序")
            @RequestParam(value = "sorts", required = false) String sorts,
            @ApiParam(name = "page", value = "分页大小", required = true, defaultValue = "1")
            @RequestParam(value = "page") int page,
            @ApiParam(name = "size", value = "页码", required = true, defaultValue = "15")
            @RequestParam(value = "size") int size) throws Exception {
//        if(StringUtils.isNotEmpty(filters)){
//            filters="archive_status=3;"+filters;
//        }else{
//            filters="archive_status=3";
//        }
        Page<Map<String, Object>> result = packQcReportService.archiveList(filters, sorts, page, size);
        Envelop envelop = getPageResult(result.getContent(), (int)result.getTotalElements(), page, size);
        return envelop;
    }
    @RequestMapping(value = ServiceApi.PackQcReport.archiveDetail, method = RequestMethod.GET)
    @ApiOperation(value = "档案详情")
    public Envelop archiveDetail(
            @ApiParam(name = "id", value = "主键", required = true)
            @RequestParam(value = "id") String id) throws Exception {
        Envelop envelop = packQcReportService.archiveDetail(id);
        return envelop;
    }
    @RequestMapping(value = ServiceApi.PackQcReport.uploadRecordList, method = RequestMethod.GET)
    @ApiOperation(value = "上传记录列表")
    public Envelop uploadRecordList(
            @ApiParam(name = "filters", value = "过滤")
            @RequestParam(value = "filters", required = false) String filters,
            @ApiParam(name = "sorts", value = "排序")
            @RequestParam(value = "sorts", required = false) String sorts,
            @ApiParam(name = "page", value = "分页大小", required = true, defaultValue = "1")
            @RequestParam(value = "page") int page,
            @ApiParam(name = "size", value = "页码", required = true, defaultValue = "15")
            @RequestParam(value = "size") int size) throws Exception {
        Page<Map<String, Object>> result = packQcReportService.uploadRecordList(filters, sorts, page, size);
        Envelop envelop = getPageResult(result.getContent(), (int)result.getTotalElements(), page, size);
        return envelop;
    }
    @RequestMapping(value = ServiceApi.PackQcReport.uploadRecordDetail, method = RequestMethod.GET)
    @ApiOperation(value = "上传记录详情")
    public Envelop uploadRecordDetail(
            @ApiParam(name = "id", value = "主键", required = true)
            @RequestParam(value = "id") String id) throws Exception {
        Envelop envelop = packQcReportService.uploadRecordDetail(id);
        return envelop;
    }
    @RequestMapping(value = "/packQcReport/adapterDatasetList", method = RequestMethod.GET)
    @ApiOperation(value = "上传数据集列表")
    public Envelop adapterDatasetList(
            @ApiParam(name = "version", value = "版本号")
            @RequestParam(value = "version", required = true) String version,
            @ApiParam(name = "filters", value = "过滤")
            @RequestParam(value = "filters", required = false) String filters,
            @ApiParam(name = "sorts", value = "排序")
            @RequestParam(value = "sorts", required = false) String sorts,
            @ApiParam(name = "page", value = "分页大小", required = true, defaultValue = "1")
            @RequestParam(value = "page") int page,
            @ApiParam(name = "size", value = "页码", required = true, defaultValue = "15")
            @RequestParam(value = "size") int size){
        if(StringUtils.isNotEmpty(filters)){
            filters="needCrawer=1;"+filters;
        }else{
            filters="needCrawer=1;";
        }
        ResponseEntity<Collection<AdapterDatasetModel>> res = hosAdminServiceClient.adapterDatasetList(version, null, filters , sorts ,page, size);
        List<AdapterDatasetModel> list = (List<AdapterDatasetModel>)res.getBody();
        int totalCount = getTotalCount(res);
        return getPageResult(list,totalCount,page,size);
    }
    @RequestMapping(value = "/packQcReport/adapterMetadataList", method = RequestMethod.GET)
    @ApiOperation(value = "上传数据元列表")
    public Envelop adapterMetadataList(
            @ApiParam(name = "version", value = "版本号")
            @RequestParam(value = "version", required = true) String version,
            @ApiParam(name = "filters", value = "过滤")
            @RequestParam(value = "filters", required = false) String filters,
            @ApiParam(name = "sorts", value = "排序")
            @RequestParam(value = "sorts", required = false) String sorts,
            @ApiParam(name = "page", value = "分页大小", required = true, defaultValue = "1")
            @RequestParam(value = "page") int page,
            @ApiParam(name = "size", value = "页码", required = true, defaultValue = "15")
            @RequestParam(value = "size") int size){
        if(StringUtils.isNotEmpty(filters)){
            filters="needCrawer=1;"+filters;
        }else{
            filters="needCrawer=1;";
        }
        ResponseEntity<Collection<AdapterMetadataModel>> res = hosAdminServiceClient.adapterMetadataList(version, null, filters , sorts ,page, size);
        List<AdapterMetadataModel> list = (List<AdapterMetadataModel>)res.getBody();
        int totalCount = getTotalCount(res);
        return getPageResult(list,totalCount,page,size);
    }
    @RequestMapping(value = "/packQcReport/datasetDetail", method = RequestMethod.GET)
    @ApiOperation(value = "抽取数据集")
    public Envelop datasetDetail(
            @ApiParam(name = "date", value = "日期", required = true)
            @RequestParam(value = "date") String date) throws Exception {
        Envelop envelop = packQcReportService.datasetDetail(date);
        return envelop;
    }
    @RequestMapping(value = "/packQcReport/setStartTime", method = RequestMethod.GET)
    @ApiOperation(value = "设置抽取时间")
    public Envelop setStartTime(
            @ApiParam(name = "date", value = "日期", required = true)
            @RequestParam(value = "date") String date) throws Exception {
        Envelop envelop = new Envelop();
        redisClient.set("start_date",date);
        envelop.setSuccessFlg(true);
        return envelop;
    }
    @RequestMapping(value = "/packQcReport/getProfileInfo", method = RequestMethod.GET)
    @ApiOperation(value = "设置抽取时间")
    public Envelop getProfileInfo(@ApiParam(name = "startDate", value = "就诊开始日期")
                                  @RequestParam(name = "startDate") String startDate,
                                  @ApiParam(name = "endDate", value = "就诊结束日期")
                                  @RequestParam(name = "endDate") String endDate,
                                  @ApiParam(name = "orgCode", value = "医院代码")
                                  @RequestParam(name = "orgCode", required = false) String orgCode) throws Exception {
        List<MProfileInfo> profileInfo = packQcReportService.getProfileInfo(startDate, endDate, orgCode);
        Envelop envelop = new Envelop();
        envelop.setSuccessFlg(true);
        envelop.setDetailModelList(profileInfo);
        return envelop;
    }
}

+ 153 - 0
src/main/java/com/yihu/ehr/analyze/controller/PackStatisticsEndPoint.java

@ -0,0 +1,153 @@
package com.yihu.ehr.analyze.controller;
import com.yihu.ehr.analyze.service.pack.PackQcReportService;
import com.yihu.ehr.analyze.service.pack.PackStatisticsService;
import com.yihu.ehr.constants.ApiVersion;
import com.yihu.ehr.constants.ServiceApi;
import com.yihu.ehr.controller.EnvelopRestEndPoint;
import com.yihu.ehr.util.datetime.DateUtil;
import com.yihu.ehr.util.rest.Envelop;
import io.swagger.annotations.Api;
import io.swagger.annotations.ApiOperation;
import io.swagger.annotations.ApiParam;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestMethod;
import org.springframework.web.bind.annotation.RequestParam;
import org.springframework.web.bind.annotation.RestController;
import java.util.*;
/**
 * 质控报表
 *
 * @author zhengwei
 * @created 2018.04.24
 */
@RestController
@RequestMapping(ApiVersion.Version1_0)
@Api(value = "PackStatisticsEndPoint", description = "档案分析服务", tags = {"档案分析服务-质量监控报表"})
public class PackStatisticsEndPoint extends EnvelopRestEndPoint {
    @Autowired
    private PackStatisticsService statisticService;
    @Autowired
    private PackQcReportService packQcReportService;
    @RequestMapping(value = ServiceApi.StasticReport.GetArchiveReportAll, method = RequestMethod.GET)
    @ApiOperation(value = "获取一段时间内数据解析情况")
    public Envelop getArchiveReportAll(
            @ApiParam(name = "startDate", value = "开始日期")
            @RequestParam(name = "startDate") String startDate,
            @ApiParam(name = "endDate", value = "结束日期")
            @RequestParam(name = "endDate") String endDate,
            @ApiParam(name = "orgCode", value = "医院代码")
            @RequestParam(name = "orgCode" , required = false) String orgCode) throws Exception {
        Envelop envelop = new Envelop();
        Date start = DateUtil.formatCharDateYMD(startDate);
        Date end = DateUtil.formatCharDateYMD(endDate);
        int day = (int) ((end.getTime() - start.getTime()) / (1000 * 3600 * 24)) + 1;
        List<Map<String, List<Map<String, Object>>>> res = new ArrayList<>();
        for (int i = 0; i < day; i++) {
            Date date = DateUtil.addDate(i, start);
            Map<String, List<Map<String, Object>>> map = new HashMap<>();
            List<Map<String, Object>> list = statisticService.getArchivesCount(DateUtil.toString(date), orgCode);
            map.put(DateUtil.toString(date), list);
            res.add(map);
        }
        envelop.setSuccessFlg(true);
        envelop.setDetailModelList(res);
        return envelop;
    }
    @RequestMapping(value = ServiceApi.StasticReport.GetRecieveOrgCount, method = RequestMethod.GET)
    @ApiOperation(value = "根据接收日期统计各个医院的数据解析情况")
    public Envelop getRecieveOrgCount(
            @ApiParam(name = "date", value = "日期")
            @RequestParam(name = "date") String date) throws Exception {
        Envelop envelop = new Envelop();
        List<Map<String, Object>> list = statisticService.getRecieveOrgCount(date);
        envelop.setSuccessFlg(true);
        envelop.setDetailModelList(list);
        return envelop;
    }
    @RequestMapping(value = ServiceApi.StasticReport.GetArchivesInc, method = RequestMethod.GET)
    @ApiOperation(value = "获取某天数据新增情况")
    public Envelop getArchivesInc(
            @ApiParam(name = "date", value = "日期")
            @RequestParam(name = "date") String date,
            @ApiParam(name = "orgCode", value = "医院代码")
            @RequestParam(name = "orgCode", required = false) String orgCode) throws Exception {
        Envelop envelop = new Envelop();
        List<Map<String, Object>> list = statisticService.getArchivesInc(date, orgCode);
        envelop.setSuccessFlg(true);
        envelop.setDetailModelList(list);
        return envelop;
    }
    @RequestMapping(value = ServiceApi.StasticReport.GetArchivesFull, method = RequestMethod.GET)
    @ApiOperation(value = "完整性分析")
    public Envelop getArchivesFull(
            @ApiParam(name = "startDate", value = "开始日期")
            @RequestParam(name = "startDate") String startDate,
            @ApiParam(name = "endDate", value = "结束日期")
            @RequestParam(name = "endDate") String endDate,
            @ApiParam(name = "orgCode", value = "医院代码")
            @RequestParam(name = "orgCode", required = false) String orgCode) throws Exception {
        return statisticService.getArchivesFull(startDate, endDate, orgCode);
    }
    @RequestMapping(value = ServiceApi.StasticReport.GetArchivesTime, method = RequestMethod.GET)
    @ApiOperation(value = "及时性分析")
    public Envelop getArchivesTime(
            @ApiParam(name = "startDate", value = "开始日期")
            @RequestParam(name = "startDate") String startDate,
            @ApiParam(name = "endDate", value = "结束日期")
            @RequestParam(name = "endDate") String endDate,
            @ApiParam(name = "orgCode", value = "医院代码")
            @RequestParam(name = "orgCode", required = false) String orgCode) throws Exception {
        return statisticService.getArchivesTime(startDate, endDate, orgCode);
    }
    @RequestMapping(value = ServiceApi.StasticReport.GetDataSetCount, method = RequestMethod.GET)
    @ApiOperation(value = "获取数据集数量")
    public Envelop getDataSetCount(
            @ApiParam(name = "date", value = "日期")
            @RequestParam(name = "date") String date,
            @ApiParam(name = "orgCode", value = "医院代码")
            @RequestParam(name = "orgCode", required = false) String orgCode) throws Exception {
        return packQcReportService.dataSetList(date, date, orgCode);
    }
    @RequestMapping(value = ServiceApi.StasticReport.GetArchivesRight, method = RequestMethod.GET)
    @ApiOperation(value = "准确性分析")
    public Envelop getArchivesRight(
            @ApiParam(name = "startDate", value = "开始日期")
            @RequestParam(name = "startDate") String startDate,
            @ApiParam(name = "endDate", value = "结束日期")
            @RequestParam(name = "endDate") String endDate,
            @ApiParam(name = "orgCode", value = "医院代码")
            @RequestParam(name = "orgCode", required = false) String orgCode) throws Exception {
        return statisticService.getArchivesRight(startDate, endDate, orgCode);
    }
    @RequestMapping(value = ServiceApi.StasticReport.GetStasticByDay, method = RequestMethod.GET)
    @ApiOperation(value = "app接口")
    public Envelop getStasticByDay(
            @ApiParam(name = "date", value = "日期")
            @RequestParam(name = "date") String date) throws Exception {
        return statisticService.getStasticByDay(date);
    }
    @RequestMapping(value = "/stasticReport/getReceiveNum", method = RequestMethod.GET)
    @ApiOperation(value = "获取采集数据")
    public Envelop getReceiveNum(
            @ApiParam(name = "startDate", value = "开始日期")
            @RequestParam(name = "startDate") String startDate,
            @ApiParam(name = "endDate", value = "结束日期")
            @RequestParam(name = "endDate") String endDate) throws Exception {
        return statisticService.getReceiveNum(startDate, endDate);
    }
}

+ 61 - 0
src/main/java/com/yihu/ehr/analyze/controller/SchedulerEndPoint.java

@ -0,0 +1,61 @@
package com.yihu.ehr.analyze.controller;
import com.yihu.ehr.analyze.service.scheduler.SchedulerService;
import com.yihu.ehr.constants.ApiVersion;
import com.yihu.ehr.constants.ServiceApi;
import com.yihu.ehr.controller.EnvelopRestEndPoint;
import io.swagger.annotations.Api;
import io.swagger.annotations.ApiOperation;
import io.swagger.annotations.ApiParam;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.http.MediaType;
import org.springframework.http.ResponseEntity;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestMethod;
import org.springframework.web.bind.annotation.RequestParam;
import org.springframework.web.bind.annotation.RestController;
/**
 * @author Airhead
 * @version 1.0
 * @created 2016.01.18
 */
@RestController
@RequestMapping(value = ApiVersion.Version1_0, produces = MediaType.APPLICATION_JSON_UTF8_VALUE)
@Api(value = "SchedulerEndPoint", description = "档案分析任务", tags = {"档案分析服务-档案分析任务"})
public class SchedulerEndPoint extends EnvelopRestEndPoint {
    @Autowired
    private SchedulerService schedulerService;
    @ApiOperation(value = "设置任务调度器状态")
    @RequestMapping(value = ServiceApi.PackageAnalyzer.Scheduler, method = RequestMethod.PUT)
    public ResponseEntity<String> updateScheduler(
            @ApiParam(name = "pause", value = "true:暂停 , false:执行", required = true, defaultValue = "true")
            @RequestParam(value = "pause") boolean pause) {
        return schedulerService.updateScheduler(pause);
    }
    @ApiOperation(value = "调整当前任务数量,返回当前系统最大任务限制数")
    @RequestMapping(value = ServiceApi.PackageAnalyzer.Scheduler, method = RequestMethod.POST)
    public ResponseEntity<Integer> addJob(
            @ApiParam(name = "count", value = "任务数量(不要超过系统设定值)", required = true, defaultValue = "4")
            @RequestParam(value = "count") int count,
            @ApiParam(name = "cronExp", value = "触发器CRON表达式", required = true, defaultValue = "0/4 * * * * ?")
            @RequestParam(value = "cronExp") String cronExp) {
        return schedulerService.addJob(count, cronExp);
    }
    @ApiOperation(value = "删除解析任务")
    @RequestMapping(value = ServiceApi.PackageAnalyzer.Scheduler, method = RequestMethod.DELETE)
    public ResponseEntity<String> removeJob(
            @ApiParam(name = "count", value = "任务数量", required = true, defaultValue = "4")
            @RequestParam(value = "count") int count) {
        return schedulerService.removeJob(count);
    }
    @ApiOperation(value = "获取当前任务数量")
    @RequestMapping(value = ServiceApi.PackageAnalyzer.Scheduler, method = RequestMethod.GET)
    public ResponseEntity<Integer> count() {
        return schedulerService.count();
    }
}

+ 310 - 0
src/main/java/com/yihu/ehr/analyze/controller/dataQuality/DataQualityStatisticsEndPoint.java

@ -0,0 +1,310 @@
package com.yihu.ehr.analyze.controller.dataQuality;
import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONArray;
import com.alibaba.fastjson.JSONObject;
import com.yihu.ehr.analyze.service.dataQuality.DataQualityStatisticsService;
import com.yihu.ehr.analyze.service.dataQuality.WarningProblemService;
import com.yihu.ehr.constants.ApiVersion;
import com.yihu.ehr.constants.ServiceApi;
import com.yihu.ehr.controller.EnvelopRestEndPoint;
import com.yihu.ehr.util.datetime.DateTimeUtil;
import com.yihu.ehr.util.rest.Envelop;
import io.swagger.annotations.Api;
import io.swagger.annotations.ApiOperation;
import io.swagger.annotations.ApiParam;
import org.apache.commons.collections.map.HashedMap;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.http.MediaType;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestMethod;
import org.springframework.web.bind.annotation.RequestParam;
import org.springframework.web.bind.annotation.RestController;
import java.util.*;
/**
 * @author yeshijie on 2018/6/1.
 */
@RestController
@RequestMapping(value = ApiVersion.Version1_0, produces = MediaType.APPLICATION_JSON_UTF8_VALUE)
@Api(value = "DataQualityStatisticsEndPoint", description = "质控-统计", tags = {"档案分析服务-质控-统计"})
public class DataQualityStatisticsEndPoint extends EnvelopRestEndPoint {
    @Value("${quality.orgCode}")
    private String defaultQualityOrgCode;
    @Value("${quality.cloud}")
    private String defaultCloud;
    @Autowired
    private DataQualityStatisticsService dataQualityStatisticsService;
    @Autowired
    private WarningProblemService warningProblemService;
    @RequestMapping(value = ServiceApi.DataQuality.QualityMonitorProvincePlatform, method = RequestMethod.GET)
    @ApiOperation(value = "质量监控查询--省平台上传")
    public Envelop provincePaltformUpload(
            @ApiParam(name = "start", value = "开始时间")
            @RequestParam(value = "start", required = false) String start,
            @ApiParam(name = "end", value = "结束时间", defaultValue = "")
            @RequestParam(value = "end", required = false) String end,
            @ApiParam(name = "toPlatForm", value = "上传目标平台", defaultValue = "")
            @RequestParam(value = "toPlatForm", required = false) String toPlatForm) throws Exception {
        Envelop envelop = new Envelop();
        try {
            return success(dataQualityStatisticsService.findUploadStatistics(start,end,toPlatForm));
        }catch (Exception e){
            e.printStackTrace();
            envelop.setSuccessFlg(false);
            envelop.setErrorMsg(e.getMessage());
        }
        return envelop;
    }
    @RequestMapping(value = ServiceApi.DataQuality.QualityMonitoringList, method = RequestMethod.GET)
    @ApiOperation(value = "质量监控查询--平台接收")
    public Envelop qualityMonitoringList(
            @ApiParam(name = "start", value = "开始时间")
            @RequestParam(value = "start", required = false) String start,
            @ApiParam(name = "end", value = "结束时间", defaultValue = "")
            @RequestParam(value = "end", required = false) String end,
            @ApiParam(name = "eventType", value = "就诊类型 0门诊 1住院 2体检,null全部", defaultValue = "")
            @RequestParam(value = "eventType", required = false) Integer eventType) throws Exception {
        Envelop envelop = new Envelop();
        try {
            return success(dataQualityStatisticsService.dataset(start, end, eventType));
        }catch (Exception e){
            e.printStackTrace();
            envelop.setSuccessFlg(false);
            envelop.setErrorMsg(e.getMessage());
        }
        return envelop;
    }
    @RequestMapping(value = ServiceApi.DataQuality.ReceptionList, method = RequestMethod.GET)
    @ApiOperation(value = "质量监控查询--接收情况")
    public Envelop receptionList(
            @ApiParam(name = "start", value = "开始时间")
            @RequestParam(value = "start", required = false) String start,
            @ApiParam(name = "end", value = "结束时间", defaultValue = "")
            @RequestParam(value = "end", required = false) String end) throws Exception {
        Envelop envelop = new Envelop();
        try {
            return success(dataQualityStatisticsService.inTimeAndIntegrityRate(start,end));
        }catch (Exception e){
            e.printStackTrace();
            envelop.setSuccessFlg(false);
            envelop.setErrorMsg(e.getMessage());
        }
        return envelop;
    }
    @RequestMapping(value = ServiceApi.DataQuality.ReceivedPacketNumList, method = RequestMethod.GET)
    @ApiOperation(value = "及时/完整采集的档案包数量集合")
    public Envelop receivedPacketNumList(
            @ApiParam(name = "pageIndex", value = "第几页", required = true)
            @RequestParam(name = "pageIndex") Integer pageIndex,
            @ApiParam(name = "pageSize", value = "每页数", required = true)
            @RequestParam(name = "pageSize") Integer pageSize,
            @ApiParam(name = "type", value = "类型,1及时率,2完整率", required = false)
            @RequestParam(name = "type") String type,
            @ApiParam(name = "orgCode", value = "机构编码", required = true)
            @RequestParam(name = "orgCode") String orgCode,
            @ApiParam(name = "eventDateStart", value = "就诊时间(起始),格式 yyyy-MM-dd", required = true)
            @RequestParam(name = "eventDateStart") String eventDateStart,
            @ApiParam(name = "eventDateEnd", value = "就诊时间(截止),格式 yyyy-MM-dd", required = true)
            @RequestParam(name = "eventDateEnd") String eventDateEnd,
            @ApiParam(name = "eventType", value = "就诊类型,0门诊、1住院、2体检,不传则查全部就诊类型的")
            @RequestParam(name = "eventType", required = false) Integer eventType) {
        Envelop envelop = new Envelop();
        try {
            Map<String,Object> re = dataQualityStatisticsService.receivedPacketNumList(pageIndex, pageSize, orgCode, eventDateStart, eventDateEnd, eventType);
            List<Map<String, Object>> resultList = (List<Map<String, Object>>)re.get("list");
            int count = Integer.valueOf(re.get("count").toString());
            return getPageResult(resultList, count, pageIndex, pageSize);
        }catch (Exception e){
            e.printStackTrace();
            envelop.setSuccessFlg(false);
            envelop.setErrorMsg(e.getMessage());
        }
        return envelop;
    }
    @RequestMapping(value = ServiceApi.DataQuality.ReceivedPacketReportData, method = RequestMethod.GET)
    @ApiOperation(value = "档案包接收情况报告数据接口")
    public Envelop receivedPacketReportData(
            @ApiParam(name = "reporter", value = "报告人", required = true)
            @RequestParam(name = "reporter") String reporter,
            @ApiParam(name = "orgInfoList", value = "机构编码、名称,例:[{\"orgName\":\"xx\",\"orgCode\":\"jkzl\"}]。", required = true)
            @RequestParam(name = "orgInfoList") String orgInfoList,
            @ApiParam(name = "eventDateStart", value = "就诊时间(起始),格式 yyyy-MM-dd", required = true)
            @RequestParam(name = "eventDateStart") String eventDateStart,
            @ApiParam(name = "eventDateEnd", value = "就诊时间(截止),格式 yyyy-MM-dd", required = true)
            @RequestParam(name = "eventDateEnd") String eventDateEnd) throws Exception {
        Envelop envelop = new Envelop();
        try{
            eventDateStart = eventDateStart + " 00:00:00";
            eventDateEnd = eventDateEnd + " 23:59:59";
            Map<String, Object> resultMap = new HashMap<>();
            JSONArray jsonArray = JSON.parseArray(orgInfoList);
            List<Map<String, String>> list = new ArrayList<>();
            for(int i=0;i<jsonArray.size();i++){
                JSONObject json = jsonArray.getJSONObject(i);
                Map<String, String> map = new HashedMap();
                map.put("orgCode",json.getString("orgCode"));
                map.put("orgName",json.getString("orgName"));
                list.add(map);
            }
            // 接收档案包总量
            Long receivedCount = dataQualityStatisticsService.packetCount(list, null, eventDateStart, eventDateEnd);
            // 成功解析档案包总量
            Long successfulAnalysisCount = dataQualityStatisticsService.packetCount(list, "3", eventDateStart, eventDateEnd);
            // 机构档案包报告汇总
            List<Map<String, Object>> orgPackReportDataList = dataQualityStatisticsService.orgPackReportData(list, eventDateStart, eventDateEnd);
            resultMap.put("searchedDateRange", eventDateStart.replace("-", "") + "-" + eventDateEnd.replace("-", ""));
            resultMap.put("reportDate", DateTimeUtil.simpleDateFormat(new Date()));
            resultMap.put("reporter", reporter);
            resultMap.put("receivedCount", receivedCount);
            resultMap.put("successfulAnalysisCount", successfulAnalysisCount);
            resultMap.put("orgPackReportDataList", orgPackReportDataList);
            return success(resultMap);
        }catch (Exception e){
            e.printStackTrace();
            envelop.setSuccessFlg(false);
            envelop.setErrorMsg(e.getMessage());
        }
        return envelop;
    }
    @RequestMapping(value = ServiceApi.DataQuality.ReceiveDataset, method = RequestMethod.GET)
    @ApiOperation(value = "档案包接收情况报告数据接口")
    public Envelop receiveDataset(
            @ApiParam(name = "orgCode", value = "机构编码", required = true)
            @RequestParam(name = "orgCode") String orgCode,
            @ApiParam(name = "date", value = "时间 - 精确到天yyyy-MM-dd", required = true)
            @RequestParam(name = "date") String date,
            @ApiParam(name = "page", value = "页码", required = true)
            @RequestParam(name = "page") Integer page,
            @ApiParam(name = "size", value = "页数", required = true)
            @RequestParam(name = "size") Integer size) throws Exception {
        List<Map<String, Object>> list = warningProblemService.receiveDataset(orgCode, date);
        Envelop envelop = new Envelop();
        envelop.setSuccessFlg(true);
        envelop.setCurrPage(page);
        envelop.setPageSize(size);
        envelop.setTotalPage(list.size() % size > 0 ? list.size() / size + 1 : list.size() / size);
        envelop.setTotalCount(list.size());
        List result = new ArrayList();
        for (int i = (page - 1) * size; i < page * size; i ++) {
            if (i > list.size() - 1) {
                break;
            }
            result.add(list.get(i));
        }
        envelop.setDetailModelList(result);
        return envelop;
    }
    @RequestMapping(value = ServiceApi.DataQuality.uploadRecordDetail, method = RequestMethod.GET)
    @ApiOperation(value = "【上传统计】-- 获取上传档案统计数据")
    public Envelop uploadRecordList(
            @ApiParam(name = "startDate", value = "开始日期")
            @RequestParam(name = "startDate") String startDate,
            @ApiParam(name = "endDate", value = "结束日期")
            @RequestParam(name = "endDate") String endDate,
            @ApiParam(name = "orgCode", value = "医院代码")
            @RequestParam(name = "orgCode", required = false) String orgCode) throws Exception {
        Envelop envelop = new Envelop();
        List<Map<String, Object>> list = dataQualityStatisticsService.getUploadSuccessList(startDate, endDate, orgCode);
        envelop.setSuccessFlg(true);
        envelop.setDetailModelList(list);
        return envelop;
    }
    @RequestMapping(value = ServiceApi.DataQuality.uploadRecordDetailPage, method = RequestMethod.GET)
    @ApiOperation(value = "【上传统计】-- 获取上传档案统计数据")
    public Envelop uploadRecordListPage(
            @ApiParam(name = "startDate", value = "开始日期")
            @RequestParam(name = "startDate") String startDate,
            @ApiParam(name = "endDate", value = "结束日期")
            @RequestParam(name = "endDate") String endDate,
            @ApiParam(name = "orgCode", value = "医院代码")
            @RequestParam(name = "orgCode", required = false) String orgCode,
            @ApiParam(name = "size", value = "分页大小", defaultValue = "15")
            @RequestParam(value = "size", required = false) int size,
            @ApiParam(name = "page", value = "页码", defaultValue = "1")
            @RequestParam(value = "page", required = false) int page) throws Exception {
        return dataQualityStatisticsService.getUploadSuccessListPage(startDate, endDate, orgCode,size,page);
    }
    @RequestMapping(value = ServiceApi.DataQuality.UploadDataSetList, method = RequestMethod.GET)
    @ApiOperation(value = "【上传统计】-- 获取上传数据集统计数据")
    public Envelop uploadDataSetList(
            @ApiParam(name = "startDate", value = "开始日期")
            @RequestParam(name = "startDate") String startDate,
            @ApiParam(name = "endDate", value = "结束日期")
            @RequestParam(name = "endDate") String endDate,
            @ApiParam(name = "orgCode", value = "医院代码")
            @RequestParam(name = "orgCode", required = false) String orgCode) throws Exception {
        Envelop envelop = new Envelop();
        List<Map<String, Object>> list = dataQualityStatisticsService.getUploadDataSetList(startDate, endDate, orgCode);
        envelop.setSuccessFlg(true);
        envelop.setDetailModelList(list);
        return envelop;
    }
    @RequestMapping(value = ServiceApi.DataQuality.UploadDataSetListPage, method = RequestMethod.GET)
    @ApiOperation(value = "【上传统计】-- 获取上传数据集统计数据")
    public Envelop uploadDataSetListPage(
            @ApiParam(name = "startDate", value = "开始日期")
            @RequestParam(name = "startDate") String startDate,
            @ApiParam(name = "endDate", value = "结束日期")
            @RequestParam(name = "endDate") String endDate,
            @ApiParam(name = "orgCode", value = "医院代码")
            @RequestParam(name = "orgCode", required = false) String orgCode,
            @ApiParam(name = "size", value = "分页大小", defaultValue = "15")
            @RequestParam(value = "size", required = false) int size,
            @ApiParam(name = "page", value = "页码", defaultValue = "1")
            @RequestParam(value = "page", required = false) int page) throws Exception {
        return dataQualityStatisticsService.getUploadDataSetListPage(startDate,endDate,orgCode,size,page);
    }
    @RequestMapping(value = ServiceApi.DataQuality.UploadErrorList, method = RequestMethod.GET)
    @ApiOperation(value = "【上传统计】-- 获取上传失败档案统计数据")
    public Envelop uploadErrorList(
            @ApiParam(name = "startDate", value = "开始日期")
            @RequestParam(name = "startDate") String startDate,
            @ApiParam(name = "endDate", value = "结束日期")
            @RequestParam(name = "endDate") String endDate,
            @ApiParam(name = "orgCode", value = "医院代码")
            @RequestParam(name = "orgCode", required = false) String orgCode) throws Exception {
        Envelop envelop = new Envelop();
        List<Map<String, Object>> list = dataQualityStatisticsService.getUploadErrorList(startDate, endDate, orgCode);
        envelop.setSuccessFlg(true);
        envelop.setDetailModelList(list);
        return envelop;
    }
    @RequestMapping(value = ServiceApi.DataQuality.UploadErrorListPage, method = RequestMethod.GET)
    @ApiOperation(value = "【上传统计】-- 获取上传失败档案统计数据")
    public Envelop uploadErrorListPage(
            @ApiParam(name = "startDate", value = "开始日期")
            @RequestParam(name = "startDate") String startDate,
            @ApiParam(name = "endDate", value = "结束日期")
            @RequestParam(name = "endDate") String endDate,
            @ApiParam(name = "orgCode", value = "医院代码")
            @RequestParam(name = "orgCode", required = false) String orgCode,
            @ApiParam(name = "size", value = "分页大小", defaultValue = "15")
            @RequestParam(value = "size", required = false) int size,
            @ApiParam(name = "page", value = "页码", defaultValue = "1")
            @RequestParam(value = "page", required = false) int page) throws Exception {
        return dataQualityStatisticsService.getUploadErrorListPage(startDate, endDate, orgCode,size,page);
    }
}

+ 1090 - 0
src/main/java/com/yihu/ehr/analyze/controller/dataQuality/ExportEndPoint.java

@ -0,0 +1,1090 @@
package com.yihu.ehr.analyze.controller.dataQuality;
import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONArray;
import com.alibaba.fastjson.JSONObject;
import com.yihu.ehr.analyze.service.dataQuality.DataQualityStatisticsService;
import com.yihu.ehr.analyze.service.dataQuality.WarningRecordService;
import com.yihu.ehr.analyze.service.pack.PackQcReportService;
import com.yihu.ehr.constants.ApiVersion;
import com.yihu.ehr.constants.ServiceApi;
import com.yihu.ehr.controller.EnvelopRestEndPoint;
import com.yihu.ehr.elasticsearch.ElasticSearchUtil;
import com.yihu.ehr.entity.quality.DqWarningRecord;
import com.yihu.ehr.redis.client.RedisClient;
import com.yihu.ehr.util.datetime.DateTimeUtil;
import io.swagger.annotations.Api;
import io.swagger.annotations.ApiOperation;
import io.swagger.annotations.ApiParam;
import jxl.Workbook;
import jxl.format.CellFormat;
import jxl.write.*;
import org.apache.commons.collections.map.HashedMap;
import org.apache.commons.lang.ObjectUtils;
import org.apache.commons.lang.StringUtils;
import org.apache.poi.ss.usermodel.Cell;
import org.apache.poi.ss.usermodel.Row;
import org.apache.poi.ss.usermodel.Sheet;
import org.apache.poi.xssf.streaming.SXSSFWorkbook;
import org.apache.poi.xwpf.usermodel.*;
import org.openxmlformats.schemas.wordprocessingml.x2006.main.CTTblWidth;
import org.openxmlformats.schemas.wordprocessingml.x2006.main.STTblWidth;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.data.domain.Page;
import org.springframework.http.MediaType;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestMethod;
import org.springframework.web.bind.annotation.RequestParam;
import org.springframework.web.bind.annotation.RestController;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import java.io.OutputStream;
import java.math.BigInteger;
import java.sql.ResultSet;
import java.util.ArrayList;
import java.util.Date;
import java.util.List;
import java.util.Map;
/**
 * 导出
 * @author yeshijie on 2018/6/13.
 */
@RestController
@RequestMapping(value = ApiVersion.Version1_0, produces = MediaType.APPLICATION_JSON_UTF8_VALUE)
@Api(value = "ExportEndPoint", description = "质控-导出", tags = {"档案分析服务-质控-导出"})
public class ExportEndPoint extends EnvelopRestEndPoint {
    @Autowired
    private WarningRecordService warningRecordService;
    @Autowired
    private PackQcReportService packQcReportService;
    @Autowired
    private ElasticSearchUtil elasticSearchUtil;
    @Autowired
    private DataQualityStatisticsService dataQualityStatisticsService;
    @Autowired
    private RedisClient redisClient;
    @Value("${quality.cloud}")
    private String defaultCloud;
    public static int maxRowSize = 60000;
    @RequestMapping(value = ServiceApi.DataQuality.ExportQualityMonitoringListToExcel, method = RequestMethod.GET)
    @ApiOperation(value = "生成报告")
    public void exportQualityMonitoringListToExcel(
            @ApiParam(name = "reporter", value = "报告人", required = true)
            @RequestParam(name = "reporter") String reporter,
            @ApiParam(name = "orgInfoList", value = "机构编码、名称,例:[{\"orgName\":\"xx\",\"orgCode\":\"jkzl\"}]。", required = true)
            @RequestParam(name = "orgInfoList") String orgInfoList,
            @ApiParam(name = "eventDateStart", value = "接收时间(起始),格式 yyyy-MM-dd", required = true)
            @RequestParam(name = "eventDateStart") String eventDateStart,
            @ApiParam(name = "eventDateEnd", value = "接收时间(截止),格式 yyyy-MM-dd", required = true)
            @RequestParam(name = "eventDateEnd") String eventDateEnd,
            HttpServletRequest request,
            HttpServletResponse response) {
        OutputStream ostream = null;
        XWPFDocument document = null;
        try{
            String title = eventDateStart.replace("-", "") + "-" + eventDateEnd.replace("-", "")+"接收数据报告";
            response.setContentType("octets/stream");
            response.setHeader("Content-Disposition", "attachment; filename="
                    + new String( title.getBytes("gb2312"), "ISO8859-1" )+".doc");
//            //输出文件
//            response.setCharacterEncoding("UTF-8");
//            response.setContentType("application/msword");//导出word格式
//            response.addHeader("Content-Disposition", "attachment;filename=" +
//                    new String((title + ".doc").getBytes(),"UTF-8"));
            eventDateStart = eventDateStart + " 00:00:00";
            eventDateEnd = eventDateEnd + " 23:59:59";
            JSONArray jsonArray = JSON.parseArray(orgInfoList);
            List<Map<String, String>> list = new ArrayList<>();
            List<Map<String, String>> alllist = new ArrayList<>();
            for(int i=0;i<jsonArray.size();i++){
                JSONObject json = jsonArray.getJSONObject(i);
                Map<String, String> map = new HashedMap();
                map.put("orgCode",json.getString("orgCode"));
                map.put("orgName",json.getString("orgName"));
                list.add(map);
                if(defaultCloud.equals(json.getString("orgCode"))){
                    alllist.add(map);
                }
            }
            if(alllist.size()==0){
                alllist = list;
            }
            // 接收档案包总量
            Long receivedCount = dataQualityStatisticsService.packetCount(alllist, null, eventDateStart, eventDateEnd);
            // 成功解析档案包总量
            Long successfulAnalysisCount = dataQualityStatisticsService.packetCount(alllist, "3", eventDateStart, eventDateEnd);
            // 机构档案包报告汇总
            List<Map<String, Object>> orgPackReportDataList = dataQualityStatisticsService.orgPackReportData(list, eventDateStart, eventDateEnd);
            //设置word
            document = new XWPFDocument();
            //添加标题
            XWPFParagraph titleParagraph = document.createParagraph();
            //设置段落居中
            titleParagraph.setAlignment(ParagraphAlignment.CENTER);
            XWPFRun titleParagraphRun = titleParagraph.createRun();
            titleParagraphRun.setText(title);
            titleParagraphRun.setColor("000000");
            titleParagraphRun.setFontSize(20);
            titleParagraphRun.setBold(true);
            addEmptyRow(document);
            //段落
            String reportDate = DateTimeUtil.simpleDateFormat(new Date());
            XWPFParagraph paragraph1 = document.createParagraph();
            XWPFRun run1 = paragraph1.createRun();
            String text1 = "                    统计时间:"+reportDate+"\n\r" +
                    "                    报告时间:"+reportDate+"\n\r" +
                    "                    报告人: "+reporter;
            run1.setText(text1);
            run1.setFontSize(12);
            addEmptyRow(document);
            XWPFParagraph paragraph2 = document.createParagraph();
            XWPFRun run2 = paragraph2.createRun();
            String text2 = "接收总量:"+receivedCount+"\n\r" +
                    "成功解析:"+successfulAnalysisCount;
            run2.setText(text2);
            run2.setFontSize(18);
            run2.setBold(true);
            addEmptyRow(document);
            int h = 0;
            for (Map<String, Object> map:orgPackReportDataList){
                int i = 0;
                i++;
                h++;
                XWPFParagraph orgParagraph1 = document.createParagraph();
                XWPFRun orgRun1 = orgParagraph1.createRun();
                String orgText1 = h+"."+map.get("orgCode")+"(" + map.get("orgName")+")";
                orgRun1.setText(orgText1);
                orgRun1.setFontSize(16);
                orgRun1.setBold(true);
                XWPFParagraph orgParagraph2 = document.createParagraph();
                XWPFRun orgRun2 = orgParagraph2.createRun();
                String orgText2 = i+".数据接收情况";
                orgRun2.setText(orgText2);
                orgRun2.setFontSize(12);
                orgRun2.setBold(true);
                int j = 1;
                XWPFParagraph orgParagraph3 = document.createParagraph();
                XWPFRun orgRun3 = orgParagraph3.createRun();
                String orgText3 = i+"."+j+"医院上报及采集情况";
                orgRun3.setText(orgText3);
                orgRun3.setFontSize(12);
                //设置表格
                XWPFTable table1 = document.createTable();
                //列宽自动分割
                CTTblWidth width1 = table1.getCTTbl().addNewTblPr().addNewTblW();
                width1.setType(STTblWidth.DXA);
                width1.setW(BigInteger.valueOf(9072));
                //表格第一行
                XWPFTableRow table11RowTitle = table1.getRow(0);
                table11RowTitle.getCell(0).setText("环节");
                table11RowTitle.addNewTableCell().setText("门诊档案数");
                table11RowTitle.addNewTableCell().setText("住院档案数");
                table11RowTitle.addNewTableCell().setText("体检档案数");
                table11RowTitle.addNewTableCell().setText("总计");
                List<Map<String, Object>> reportedNumList1 = (List<Map<String, Object>>)map.get("reportedNumList1");
                if(reportedNumList1.size()>0){
                    Map<String, Object> orgMap = reportedNumList1.get(0);
                    XWPFTableRow tableRow = table1.createRow();
                    tableRow.getCell(0).setText("医院上报");
                    tableRow.getCell(1).setText(Double.valueOf(orgMap.get("outpatientNum").toString()).intValue()+"");
                    tableRow.getCell(2).setText(Double.valueOf(orgMap.get("hospitalDischargeNum").toString()).intValue()+"");
                    tableRow.getCell(3).setText(Double.valueOf(orgMap.get("healthExaminationNum").toString()).intValue()+"");
                    tableRow.getCell(4).setText(Double.valueOf(orgMap.get("total").toString()).intValue()+"");
                }
                double receiveAcrhive = 0;
                Map<String, Object> collectionMap = (Map<String, Object>)map.get("collectionMap");
                if(collectionMap.size()>0){
                    XWPFTableRow tableRow = table1.createRow();
                    tableRow.getCell(0).setText("平台接收");
                    tableRow.getCell(1).setText(Double.valueOf(collectionMap.get("outpatientNum").toString()).intValue()+"");
                    tableRow.getCell(2).setText(Double.valueOf(collectionMap.get("hospitalDischargeNum").toString()).intValue()+"");
                    tableRow.getCell(3).setText(Double.valueOf(collectionMap.get("healthExaminationNum").toString()).intValue()+"");
                    receiveAcrhive = Double.valueOf(collectionMap.get("total").toString());
                    tableRow.getCell(4).setText(Double.valueOf(collectionMap.get("total").toString()).intValue()+"");
                }
                addEmptyRow(document);
                j++;
                XWPFParagraph orgParagraph4 = document.createParagraph();
                XWPFRun orgRun4 = orgParagraph4.createRun();
                String orgText4 = i+"."+j+"采集内容";
                orgRun4.setText(orgText4);
                orgRun4.setFontSize(12);
                //设置表格
                XWPFTable table2 = document.createTable();
                //列宽自动分割
                CTTblWidth width2 = table2.getCTTbl().addNewTblPr().addNewTblW();
                width2.setType(STTblWidth.DXA);
                width2.setW(BigInteger.valueOf(9072));
                //表格第一行
                XWPFTableRow table12RowTitle = table2.getRow(0);
                table12RowTitle.getCell(0).setText("日期");
                table12RowTitle.addNewTableCell().setText("门诊档案数");
                table12RowTitle.addNewTableCell().setText("住院档案数");
                table12RowTitle.addNewTableCell().setText("体检档案数");
                table12RowTitle.addNewTableCell().setText("总计");
                List<Map<String, Object>> reportedNumList3 = (List<Map<String, Object>>)map.get("reportedNumList3");
                if(reportedNumList3.size()>0){
                    reportedNumList3.forEach(item->{
                        XWPFTableRow tableRow = table2.createRow();
                        tableRow.getCell(0).setText(item.get("receiveDate").toString());
                        tableRow.getCell(1).setText(Double.valueOf(item.get("outpatientNum").toString()).intValue()+"");
                        tableRow.getCell(2).setText(Double.valueOf(item.get("hospitalDischargeNum").toString()).intValue()+"");
                        tableRow.getCell(3).setText(Double.valueOf(item.get("healthExaminationNum").toString()).intValue()+"");
                        tableRow.getCell(4).setText(Double.valueOf(item.get("total").toString()).intValue()+"");
                    });
                }
                addEmptyRow(document);
                i++;
                XWPFParagraph orgParagraph5 = document.createParagraph();
                XWPFRun orgRun5 = orgParagraph5.createRun();
                String orgText5 = i+".数据解析情况";
                orgRun5.setText(orgText5);
                orgRun5.setFontSize(12);
                orgRun5.setBold(true);
                //设置表格
                XWPFTable table3 = document.createTable();
                //列宽自动分割
                CTTblWidth width3 = table3.getCTTbl().addNewTblPr().addNewTblW();
                width3.setType(STTblWidth.DXA);
                width3.setW(BigInteger.valueOf(9072));
                //表格第一行
                XWPFTableRow table13RowTitle = table3.getRow(0);
                table13RowTitle.getCell(0).setText("接收总量");
                table13RowTitle.addNewTableCell().setText("解析成功");
                table13RowTitle.addNewTableCell().setText("解析失败");
                table13RowTitle.addNewTableCell().setText("未解析");
                Map<String, Object> archiveMap = (Map<String, Object>)map.get("archiveMap");
                if(collectionMap.size()>0){
                    XWPFTableRow tableRow = table3.createRow();
                    int archive_status3 = Double.valueOf(archiveMap.get("archive_status3").toString()).intValue();
                    int archive_status2 = Double.valueOf(archiveMap.get("archive_status2").toString()).intValue();
                    int archive_status1 = Double.valueOf(archiveMap.get("archive_status1").toString()).intValue();
                    int archive_status0 = Double.valueOf(archiveMap.get("archive_status0").toString()).intValue();
                    tableRow.getCell(0).setText((archive_status3+archive_status2+archive_status1+archive_status0)+"");//0未解析 1正在解析 2解析失败 3解析完成
                    tableRow.getCell(1).setText(archive_status3+"");
                    tableRow.getCell(2).setText(archive_status2+"");
                    tableRow.getCell(3).setText(archive_status0+"");
                }
                addEmptyRow(document);
                i++;
                XWPFParagraph orgParagraph6 = document.createParagraph();
                XWPFRun orgRun6 = orgParagraph6.createRun();
                //设置表格
                XWPFTable table4 = document.createTable();
                //列宽自动分割
                CTTblWidth width4 = table4.getCTTbl().addNewTblPr().addNewTblW();
                width4.setType(STTblWidth.DXA);
                width4.setW(BigInteger.valueOf(9072));
                //表格第一行
                XWPFTableRow table14RowTitle = table4.getRow(0);
                table14RowTitle.getCell(0).setText("数据集编码");
                table14RowTitle.addNewTableCell().setText("数据集名称");
                table14RowTitle.addNewTableCell().setText("总数");
                table14RowTitle.addNewTableCell().setText("行数");
                List<Map<String, Object>> reportedNumList5 = (List<Map<String, Object>>)map.get("reportedNumList5");
                Double datasetnum = 0d;
                if(reportedNumList5.size()>0){
                    for(Map<String, Object> item:reportedNumList5){
                        XWPFTableRow tableRow = table4.createRow();
                        tableRow.getCell(0).setText(item.get("dataset").toString());
                        tableRow.getCell(1).setText(item.get("name").toString());
                        tableRow.getCell(2).setText(item.get("count").toString());
                        tableRow.getCell(3).setText(item.get("row").toString());
                        datasetnum++;
                    }
                }
                String orgText6 = i+".数据集总量("+datasetnum.longValue()+")";
                orgRun6.setText(orgText6);
                orgRun6.setFontSize(12);
                orgRun6.setBold(true);
                addEmptyRow(document);
                i++;
                XWPFParagraph orgParagraph7 = document.createParagraph();
                XWPFRun orgRun7 = orgParagraph7.createRun();
                String orgText7 = i+".分析";
                orgRun7.setText(orgText7);
                orgRun7.setFontSize(12);
                orgRun7.setBold(true);
                XWPFParagraph orgParagraph8 = document.createParagraph();
                XWPFRun orgRun8 = orgParagraph8.createRun();
                String orgText8 = i+".1解析失败分析";
                orgRun8.setText(orgText8);
                orgRun8.setFontSize(12);
                //设置表格
                XWPFTable table5 = document.createTable();
                //列宽自动分割
                CTTblWidth width5 = table5.getCTTbl().addNewTblPr().addNewTblW();
                width5.setType(STTblWidth.DXA);
                width5.setW(BigInteger.valueOf(9072));
                //表格第一行
                XWPFTableRow table15RowTitle = table5.getRow(0);
                table15RowTitle.getCell(0).setText("错误原因");
                table15RowTitle.addNewTableCell().setText("数量");
                List<Map<String, Object>> reportedNumList6 = (List<Map<String, Object>>)map.get("reportedNumList6");
                if(reportedNumList6.size()>0){
                    reportedNumList6.forEach(item->{
                        XWPFTableRow tableRow = table5.createRow();
                        tableRow.getCell(0).setText(getErrorType(item.get("error_type").toString()));
                        tableRow.getCell(1).setText(item.get("error_count").toString());
                    });
                }
            }
            ostream = response.getOutputStream();
            document.write(ostream);
            ostream.flush();
        }catch (Exception e){
            e.printStackTrace();
        }finally {
            if(document!=null){
                try {
                    document.close();
                }catch (Exception e){
                    e.printStackTrace();
                }
            }
            if(ostream!=null){
                try {
                    ostream.close();
                }catch (Exception e){
                    e.printStackTrace();
                }
            }
        }
    }
    /**
     * 添加空行
     * @param document
     */
    private void addEmptyRow (XWPFDocument document){
        XWPFParagraph paragraph = document.createParagraph();
        XWPFRun paragraphRun = paragraph.createRun();
        paragraphRun.setText("\r");
    }
    @RequestMapping(value = ServiceApi.DataQuality.ExportWarningRecordToExcel, method = RequestMethod.GET)
    @ApiOperation(value = "导出预警问题列表")
    public void exportToExcel(@ApiParam(name = "orgCode", value = "机构code", defaultValue = "jkzl")
                              @RequestParam(value = "orgCode", required = false) String orgCode,
                              @ApiParam(name = "quota", value = "指标(传warningType)", defaultValue = "101")
                              @RequestParam(value = "quota", required = false) String quota,
                              @ApiParam(name = "status", value = "状态(1未解决,2已解决)", defaultValue = "1")
                              @RequestParam(value = "status", required = false) String status,
                              @ApiParam(name = "type", value = "类型(1接收,2资源化,3上传)", defaultValue = "1")
                              @RequestParam(value = "type", required = true) String type,
                              @ApiParam(name = "startTime", value = "开始时间", defaultValue = "2018-06-11")
                              @RequestParam(value = "startTime", required = false) String startTime,
                              @ApiParam(name = "endTime", value = "结束时间", defaultValue = "2018-06-11")
                              @RequestParam(value = "endTime", required = false) String endTime,
                              HttpServletResponse response){
        WritableWorkbook wwb = null;
        OutputStream os = null;
        try {
            String fileName = "预警问题列表";
            //设置下载
            response.setContentType("octets/stream");
            response.setHeader("Content-Disposition", "attachment; filename="
                    + new String( fileName.getBytes("gb2312"), "ISO8859-1" )+".xls");
            os = response.getOutputStream();
            String filters = "type="+type;
            if(StringUtils.isNotBlank(orgCode)){
                filters += ";orgCode="+orgCode;
            }
            if(StringUtils.isNotBlank(quota)){
                filters += ";warningType="+quota;
            }
            if(StringUtils.isNotBlank(status)){
                filters += ";status="+status;
            }
            if(StringUtils.isNotBlank(startTime)){
                filters += ";recordTime>="+startTime;
            }
            if(StringUtils.isNotBlank(endTime)){
                filters += ";recordTime<="+endTime;
            }
            String sorts = "-warningTime";
            List<DqWarningRecord> list = new ArrayList<>();
            int pageSize = 10000;
            int count = (int) warningRecordService.getCount(filters);
            int pageNum = count % pageSize > 0 ? count / pageSize + 1 : count / pageSize;
            for(int i =0;i<pageNum;i++) {
                list.addAll(warningRecordService.search(null, filters, sorts, i+1, pageSize));
            }
            //写excel
            wwb = Workbook.createWorkbook(os);
            //创建Excel工作表 指定名称和位置
            WritableSheet ws = wwb.createSheet(fileName,0);
            //添加固定信息,题头等
            if("1".equals(type)){
                warningRecordService.addReceiveStaticCell(ws);
            }else if("2".equals(type)){
                warningRecordService.addResourceStaticCell(ws);
            }else if("3".equals(type)){
                warningRecordService.addUploadStaticCell(ws);
            }
            WritableCellFormat wc = new WritableCellFormat();
            wc.setBorder(jxl.format.Border.ALL, jxl.format.BorderLineStyle.THIN, Colour.SKY_BLUE);//边框
            for(int i=0;i<list.size();i++) {
                int j=i+1;
                DqWarningRecord record = list.get(i);
                //添加列表明细
                warningRecordService.addRow(type,wc,ws,record,j);
            }
            //写入工作表
            wwb.write();
            os.flush();
        } catch (Exception e) {
            e.printStackTrace();
        }finally {
            if(wwb!=null){
                try {
                    wwb.close();
                }catch (Exception e){
                    e.getMessage();
                }
            }
            if(os!=null){
                try {
                    os.close();
                }catch (Exception e){
                    e.getMessage();
                }
            }
        }
    }
    @RequestMapping(value = ServiceApi.DataQuality.ExportQualityMonitoring, method = RequestMethod.GET)
    @ApiOperation(value = "导出平台接收列表")
    public void exportQualityMonitoring( @ApiParam(name = "start", value = "开始时间")
                                         @RequestParam(value = "start", required = false) String start,
                                         @ApiParam(name = "end", value = "结束时间", defaultValue = "")
                                         @RequestParam(value = "end", required = false) String end,
                                         @ApiParam(name = "eventType", value = "就诊类型 0门诊 1住院 2体检,null全部", defaultValue = "")
                                         @RequestParam(value = "eventType", required = false) String eventType,
                                         HttpServletResponse response){
        try {
            String fileName = "平台接收列表";
            //设置下载
            response.setContentType("octets/stream");
            response.setHeader("Content-Disposition", "attachment; filename="
                    + new String( fileName.getBytes("gb2312"), "ISO8859-1" )+".xls");
            OutputStream os = response.getOutputStream();
            Integer type=null;
            if(StringUtils.isNotEmpty(eventType)&&!"null".equals(eventType)){
                type=Integer.parseInt(eventType);
            }
            List<Map<String, Object>> list = dataQualityStatisticsService.dataset(start, end, type);
            //写excel
            WritableWorkbook wwb = Workbook.createWorkbook(os);
            //创建Excel工作表 指定名称和位置
            WritableSheet ws = wwb.createSheet(fileName,0);
            //添加固定信息,题头等
            addCell(ws,0,0,"机构");
            addCell(ws,1,0,"医院档案数");
            addCell(ws,2,0,"医院数据集");
            addCell(ws,3,0,"接收档案数");
            addCell(ws,4,0,"接收数据集");
            addCell(ws,5,0,"接收质量异常数");
            addCell(ws,6,0,"资源化解析成功");
            addCell(ws,7,0,"资源化解析失败");
            addCell(ws,8,0,"资源化解析异常");
            WritableCellFormat wc = new WritableCellFormat();
            wc.setBorder(jxl.format.Border.ALL, jxl.format.BorderLineStyle.THIN, Colour.SKY_BLUE);//边框
            for(int i=0;i<list.size();i++) {
                int j=i+1;
                Map<String,Object> record = list.get(i);
                //添加列表明细
                addCell(ws,0,j,ObjectUtils.toString(record.get("orgName")),wc);
                addCell(ws,1,j,ObjectUtils.toString(record.get("hospitalArchives")),wc);
                addCell(ws,2,j,ObjectUtils.toString(record.get("hospitalDataset")),wc);
                addCell(ws,3,j,ObjectUtils.toString(record.get("receiveArchives")),wc);
                addCell(ws,4,j,ObjectUtils.toString(record.get("receiveDataset")),wc);
                addCell(ws,5,j,ObjectUtils.toString(record.get("receiveException")),wc);
                addCell(ws,6,j,ObjectUtils.toString(record.get("resourceSuccess")),wc);
                addCell(ws,7,j,ObjectUtils.toString(record.get("resourceFailure")),wc);
                addCell(ws,8,j,ObjectUtils.toString(record.get("resourceException")),wc);
            }
            //写入工作表
            wwb.write();
            wwb.close();
            os.flush();
            os.close();
        } catch (Exception e) {
            e.printStackTrace();
        }
    }
    @RequestMapping(value = ServiceApi.DataQuality.ExportReceptionList, method = RequestMethod.GET)
    @ApiOperation(value = "导出接收情况列表")
    public void exportReceptionList( @ApiParam(name = "start", value = "开始时间")
                                     @RequestParam(value = "start", required = false) String start,
                                     @ApiParam(name = "end", value = "结束时间", defaultValue = "")
                                     @RequestParam(value = "end", required = false) String end,
                                     HttpServletResponse response){
        try {
            String fileName = "接收情况列表";
            //设置下载
            response.setContentType("octets/stream");
            response.setHeader("Content-Disposition", "attachment; filename="
                    + new String( fileName.getBytes("gb2312"), "ISO8859-1" )+".xls");
            OutputStream os = response.getOutputStream();
            List<Map<String, Object>> list = dataQualityStatisticsService.inTimeAndIntegrityRate(start,end);
            //写excel
            WritableWorkbook wwb = Workbook.createWorkbook(os);
            //创建Excel工作表 指定名称和位置
            WritableSheet ws = wwb.createSheet(fileName,0);
            //添加固定信息,题头等
            addCell(ws,0,0,"机构");
            addCell(ws,1,0,"及时率-就诊");
            addCell(ws,2,0,"及时率-门诊");
            addCell(ws,3,0,"及时率-住院");
            addCell(ws,4,0,"及时率-体检");
            addCell(ws,5,0,"完整率-就诊");
            addCell(ws,6,0,"完整率-门诊");
            addCell(ws,7,0,"完整率-住院");
            addCell(ws,8,0,"完整率-体检");
            WritableCellFormat wc = new WritableCellFormat();
            wc.setBorder(jxl.format.Border.ALL, jxl.format.BorderLineStyle.THIN, Colour.SKY_BLUE);//边框
            for(int i=0;i<list.size();i++) {
                int j=i+1;
                Map<String,Object> record = list.get(i);
                //添加列表明细
                addCell(ws,0,j,ObjectUtils.toString(record.get("orgName")),wc);
                addCell(ws,1,j,ObjectUtils.toString(record.get("visitIntimeRate")),wc);
                addCell(ws,2,j,ObjectUtils.toString(record.get("outpatientInTimeRate")),wc);
                addCell(ws,3,j,ObjectUtils.toString(record.get("hospitalInTimeRate")),wc);
                addCell(ws,4,j,ObjectUtils.toString(record.get("peInTimeRate")),wc);
                addCell(ws,5,j,ObjectUtils.toString(record.get("visitIntegrityRate")),wc);
                addCell(ws,6,j,ObjectUtils.toString(record.get("outpatientIntegrityRate")),wc);
                addCell(ws,7,j,ObjectUtils.toString(record.get("hospitalIntegrityRate")),wc);
                addCell(ws,8,j,ObjectUtils.toString(record.get("visitIntegrityRate")),wc);
            }
            //写入工作表
            wwb.write();
            wwb.close();
            os.flush();
            os.close();
        } catch (Exception e) {
            e.printStackTrace();
        }
    }
    @RequestMapping(value = ServiceApi.DataQuality.ExportAnalyzeErrorList, method = RequestMethod.GET)
    @ApiOperation(value = "导出解析失败问题列表")
    public void exportAnalyzeErrorList( @ApiParam(name = "filters", value = "过滤")
                                        @RequestParam(value = "filters", required = false) String filters,
                                        @ApiParam(name = "sorts", value = "排序")
                                        @RequestParam(value = "sorts", required = false) String sorts,
                                        HttpServletResponse response){
        if(StringUtils.isNotEmpty(filters)){
            filters="analyze_status=2||archive_status=2;"+filters;
        }else{
            filters="analyze_status=2||archive_status=2";
        }
        try {
            String fileName = "解析失败问题列表";
            //设置下载
            response.setContentType("octets/stream");
            response.setHeader("Content-Disposition", "attachment; filename="
                    + new String( fileName.getBytes("gb2312"), "ISO8859-1" )+".xlsx");
            OutputStream os = response.getOutputStream();
            //写excel
            SXSSFWorkbook wwb = new SXSSFWorkbook(100);
            wwb.setCompressTempFiles(true);
            String[] title = {"接收时间","解析时间","医疗机构","序列号","失败原因","环节","失败信息"};
            int count = (int) elasticSearchUtil.count("json_archives", "info", filters);
            double pageNum = count % maxRowSize > 0 ? count / maxRowSize + 1 : count / maxRowSize;
            for (int i = 0; i < pageNum; i++) {
                Page<Map<String, Object>> result = packQcReportService.analyzeErrorList(filters,sorts,i+1,maxRowSize);
                //创建Excel工作表 指定名称和位置
                Sheet sheet = wwb.createSheet("Sheet" + (i+1));
                //添加固定信息,题头等
                Row titleRow = sheet.createRow(0);
                for (int t = 0; t < title.length; t++) {
                    Cell xcell = titleRow.createCell(t);
                    xcell.setCellValue(title[t] + "");
                }
                for (int j = 0; j < result.getNumberOfElements(); j++) {
                    Row row = sheet.createRow(j+ 1);
                    Map<String, Object> record = result.getContent().get(j);
                    //添加列表明细
                    row.createCell(0).setCellValue(ObjectUtils.toString(record.get("receive_date")));
                    row.createCell(1).setCellValue(ObjectUtils.toString(record.get("analyze_date")));
                    row.createCell(2).setCellValue(ObjectUtils.toString(record.get("org_name")));
                    row.createCell(3).setCellValue(ObjectUtils.toString(record.get("_id")));
                    row.createCell(4).setCellValue(getErrorType(ObjectUtils.toString(record.get("error_type"))));
                    if ("2".equals(record.get("analyze_status"))){
                        row.createCell(5).setCellValue("质控");
                    } else {
                        row.createCell(5).setCellValue("解析");
                    }
                    row.createCell(6).setCellValue(ObjectUtils.toString(record.get("message")));
                }
            }
            wwb.write(os);
            wwb.close();
            os.flush();
            os.close();
        } catch (Exception e) {
            e.printStackTrace();
        }
    }
    @RequestMapping(value = ServiceApi.DataQuality.ExportMetadataErrorList, method = RequestMethod.GET)
    @ApiOperation(value = "导出异常详情列表")
    public void exprotMetadataErrorList( @ApiParam(name = "filters", value = "过滤")
                                         @RequestParam(value = "filters", required = false) String filters,
                                         @ApiParam(name = "sorts", value = "排序")
                                         @RequestParam(value = "sorts", required = false) String sorts,
                                         HttpServletResponse response){
        try {
            String fileName = "异常详情列表";
            //设置下载
            response.setContentType("octets/stream");
            response.setHeader("Content-Disposition", "attachment; filename="
                    + new String( fileName.getBytes("gb2312"), "ISO8859-1" )+".xlsx");
            OutputStream os = response.getOutputStream();
            List<Map<String, Object>> orgs = packQcReportService.getOrgs();
            //写excel
            SXSSFWorkbook  wwb = new SXSSFWorkbook(100);
            wwb.setCompressTempFiles(true);
            String[] title = {"医疗机构","数据集","数据集名称","数据元","数据元名称","错误原因"};
            String sql = "SELECT org_code, dataset, metadata, qc_error_type ,version FROM json_archives_qc/qc_metadata_info" +
                    " where "+getWhere(filters)+"" +
                    " group by org_code,dataset,metadata,qc_error_type,version";
            ResultSet resultSet = elasticSearchUtil.findBySql(sql);
            //创建Excel工作表 指定名称和位置
            Sheet sheet = wwb.createSheet("Sheet1");
            //添加固定信息,题头等
            Row titleRow = sheet.createRow(0);
            for (int t = 0; t < title.length; t++) {
                Cell xcell = titleRow.createCell(t);
                xcell.setCellValue(title[t] + "");
            }
            int j=1;
            while (resultSet.next()) {
                Row row = sheet.createRow(j);
                //添加列表明细
                String dataset_name = redisClient.get("std_data_set_" + resultSet.getString("version") + ":" + resultSet.getString("dataset") + ":name");
                String metadata_name = redisClient.get("std_meta_data_" + resultSet.getString("version") + ":" + resultSet.getString("dataset")+"."+ resultSet.getString("metadata")+ ":name");
                row.createCell(0).setCellValue(packQcReportService.getOrgName(orgs,resultSet.getString("org_code")));
                row.createCell(1).setCellValue(resultSet.getString("dataset"));
                row.createCell(2).setCellValue(dataset_name);
                row.createCell(3).setCellValue(resultSet.getString("metadata"));
                row.createCell(4).setCellValue(metadata_name);
                row.createCell(5).setCellValue(getExceptionType(resultSet.getString("qc_error_type")));
                j++;
            }
            wwb.write(os);
            wwb.close();
            os.flush();
            os.close();
        } catch (Exception e) {
            e.printStackTrace();
        }
    }
    @RequestMapping(value = ServiceApi.DataQuality.ExportArchiveList, method = RequestMethod.GET)
    @ApiOperation(value = "导出档案包列表")
    public void exportArchiveList( @ApiParam(name = "filters", value = "过滤")
                                   @RequestParam(value = "filters", required = false) String filters,
                                   @ApiParam(name = "sorts", value = "排序")
                                   @RequestParam(value = "sorts", required = false) String sorts,
                                   HttpServletResponse response){
//        if(StringUtils.isNotEmpty(filters)){
//            filters="archive_status=3;"+filters;
//        }else{
//            filters="archive_status=3";
//        }
        try {
            String fileName = "接收包列表";
            //设置下载
            response.setContentType("octets/stream");
            response.setHeader("Content-Disposition", "attachment; filename="
                    + new String( fileName.getBytes("gb2312"), "ISO8859-1" )+".xlsx");
            OutputStream os = response.getOutputStream();
            //写excel
            SXSSFWorkbook  wwb = new SXSSFWorkbook(100);
            wwb.setCompressTempFiles(true);
            String[] title = {"接收时间","解析状态","医疗机构","序列号","患者姓名","证件号","就诊时间","就诊类型"};
            long starttime = System.currentTimeMillis();
            int count = (int) elasticSearchUtil.count("json_archives", "info", filters);
            double pageNum = count % maxRowSize > 0 ? count / maxRowSize + 1 : count / maxRowSize;
            for (int i = 0; i < pageNum; i++) {
                Page<Map<String, Object>> result = packQcReportService.archiveList(filters,sorts,i+1,maxRowSize);
                logger.info("查询耗时:" + (System.currentTimeMillis() - starttime) + "ms");
                //创建Excel工作表 指定名称和位置
                Sheet sheet = wwb.createSheet("Sheet" + (i+1));
                //添加固定信息,题头等
                Row titleRow = sheet.createRow(0);
                for (int t = 0; t < title.length; t++) {
                    Cell xcell = titleRow.createCell(t);
                    xcell.setCellValue(title[t] + "");
                }
                for (int j = 0; j < result.getNumberOfElements(); j++) {
                    Row row = sheet.createRow(j+ 1);
                    Map<String, Object> record = result.getContent().get(j);
                    //添加列表明细
                    row.createCell(0).setCellValue(ObjectUtils.toString(record.get("receive_date")));
                    row.createCell(1).setCellValue(getAnalyzerStatus(record.get("analyze_status")+"",record.get("archive_status")+""));
                    row.createCell(2).setCellValue(ObjectUtils.toString(record.get("org_name")));
                    row.createCell(3).setCellValue(ObjectUtils.toString(record.get("_id")));
                    row.createCell(4).setCellValue(ObjectUtils.toString(record.get("patient_name")));
                    row.createCell(5).setCellValue(ObjectUtils.toString(record.get("demographic_id")));
                    row.createCell(6).setCellValue(ObjectUtils.toString(record.get("event_date")));
                    row.createCell(7).setCellValue(getEventType(ObjectUtils.toString(record.get("event_type"))));
                }
            }
            wwb.write(os);
            wwb.close();
            os.flush();
            os.close();
            logger.info("导出耗时:" + (System.currentTimeMillis() - starttime) + "ms");
        } catch (Exception e) {
            e.printStackTrace();
        }
    }
    @RequestMapping(value = ServiceApi.DataQuality.ExportUploadRecordList, method = RequestMethod.GET)
    @ApiOperation(value = "导出上传纪录列表")
    public void exportUploadRecordList( @ApiParam(name = "filters", value = "过滤")
                                        @RequestParam(value = "filters", required = false) String filters,
                                        @ApiParam(name = "sorts", value = "排序")
                                        @RequestParam(value = "sorts", required = false) String sorts,
                                        HttpServletResponse response){
        try {
            String fileName = "上传纪录列表";
            //设置下载
            response.setContentType("octets/stream");
            response.setHeader("Content-Disposition", "attachment; filename="
                    + new String( fileName.getBytes("gb2312"), "ISO8859-1" )+".xlsx");
            OutputStream os = response.getOutputStream();
            //写excel
            SXSSFWorkbook  wwb = new SXSSFWorkbook(100);
            wwb.setCompressTempFiles(true);
            String[] title = {"上传时间","接收平台","医疗机构","序列号","患者姓名","证件号","就诊时间","就诊类型","数据集数量"};
            int count = (int) elasticSearchUtil.count("upload", "record", filters);
            double pageNum = count % maxRowSize > 0 ? count / maxRowSize + 1 : count / maxRowSize;
            for (int i = 0; i < pageNum; i++) {
                Page<Map<String, Object>> result = packQcReportService.uploadRecordList(filters,sorts,i+1,maxRowSize);
                //创建Excel工作表 指定名称和位置
                Sheet sheet = wwb.createSheet("Sheet" + (i+1));
                //添加固定信息,题头等
                Row titleRow = sheet.createRow(0);
                for (int t = 0; t < title.length; t++) {
                    Cell xcell = titleRow.createCell(t);
                    xcell.setCellValue(title[t] + "");
                }
                for (int j = 0; j < result.getNumberOfElements(); j++) {
                    Row row = sheet.createRow(j + 1);
                    Map<String, Object> record = result.getContent().get(j);
                    //添加列表明细
                    row.createCell(0).setCellValue(ObjectUtils.toString(record.get("analyze_date")));
                    row.createCell(1).setCellValue(getPlatform(ObjectUtils.toString(record.get("to_platform"))));
                    row.createCell(2).setCellValue(ObjectUtils.toString(record.get("org_name")));
                    row.createCell(3).setCellValue(ObjectUtils.toString(record.get("_id")));
                    row.createCell(4).setCellValue(ObjectUtils.toString(record.get("patient_name")));
                    row.createCell(5).setCellValue(ObjectUtils.toString(record.get("idcard_no")));
                    row.createCell(6).setCellValue(ObjectUtils.toString(record.get("event_date")));
                    row.createCell(7).setCellValue(getEventType(ObjectUtils.toString(record.get("event_type"))));
                    row.createCell(8).setCellValue(ObjectUtils.toString(record.get("dataset_count")));
                }
            }
            wwb.write(os);
            wwb.close();
            os.flush();
            os.close();
        } catch (Exception e) {
            e.printStackTrace();
        }
    }
    @RequestMapping(value = ServiceApi.DataQuality.ExportQualityUpload, method = RequestMethod.GET)
    @ApiOperation(value = "导出平台上传统计列表")
    public void exportQualityUpload( @ApiParam(name = "start", value = "开始时间")
                                         @RequestParam(value = "start", required = false) String start,
                                         @ApiParam(name = "end", value = "结束时间", defaultValue = "")
                                         @RequestParam(value = "end", required = false) String end,
                                         @ApiParam(name = "toPlatform", value = "上传平台代码", defaultValue = "jiangxi_001")
                                         @RequestParam(value = "toPlatform", required = false) String toPlatform,
                                         HttpServletResponse response){
        try {
            String fileName = "平台上传列表";
            //设置下载
            response.setContentType("octets/stream");
            response.setHeader("Content-Disposition", "attachment; filename="
                    + new String( fileName.getBytes("gb2312"), "ISO8859-1" )+".xls");
            OutputStream os = response.getOutputStream();
            List<Map<String, Object>> list = dataQualityStatisticsService.findUploadStatistics(start,end,toPlatform);
            //写excel
            WritableWorkbook wwb = Workbook.createWorkbook(os);
            //创建Excel工作表 指定名称和位置
            WritableSheet ws = wwb.createSheet(fileName,0);
            //添加固定信息,题头等
            addCell(ws,0,0,"机构");
            addCell(ws,1,0,"总档案数");
            addCell(ws,2,0,"门诊档案数");
            addCell(ws,3,0,"住院档案数");
            addCell(ws,4,0,"体检档案数");
            addCell(ws,5,0,"上传数据集数");
            addCell(ws,6,0,"上传异常数");
            WritableCellFormat wc = new WritableCellFormat();
            wc.setBorder(jxl.format.Border.ALL, jxl.format.BorderLineStyle.THIN, Colour.SKY_BLUE);//边框
            for(int i=0;i<list.size();i++) {
                int j=i+1;
                Map<String,Object> record = list.get(i);
                //添加列表明细
                addCell(ws,0,j,ObjectUtils.toString(record.get("orgName")),wc);
                addCell(ws,1,j,ObjectUtils.toString(record.get("total")),wc);
                addCell(ws,2,j,ObjectUtils.toString(record.get("outPatient")),wc);
                addCell(ws,3,j,ObjectUtils.toString(record.get("inPatient")),wc);
                addCell(ws,4,j,ObjectUtils.toString(record.get("exam")),wc);
                addCell(ws,5,j,ObjectUtils.toString(record.get("dataset")),wc);
                addCell(ws,6,j,ObjectUtils.toString(record.get("error")),wc);
            }
            //写入工作表
            wwb.write();
            wwb.close();
            os.flush();
            os.close();
        } catch (Exception e) {
            e.printStackTrace();
        }
    }
    /**
     * 添加单元格内容
     * @param ws
     * @param column
     * @param row
     * @param data
     */
    public void addCell(WritableSheet ws,int column,int row,String data){
        try {
            Label label = new Label(column,row,data);
            ws.addCell(label);
        } catch (Exception e) {
            e.printStackTrace();
        }
    }
    /**
     * 添加单元格内容带样式
     * @param ws
     * @param column
     * @param row
     * @param data
     * @param cellFormat
     */
    public void addCell(WritableSheet ws,int column,int row,String data,CellFormat cellFormat){
        try {
            Label label = new Label(column,row,data,cellFormat);
            ws.addCell(label);
        } catch (Exception e) {
            e.printStackTrace();
        }
    }
    /**
     * 错误类型
     * @param errorType
     * @return
     */
    public String getErrorType(String errorType){
        String re = "";
        switch (errorType){
            case "-1":
                re = "质控服务内部出错";
                break;
            case "-2":
                re = "解析服务内部出错";
                break;
            case "1":
                re = "压缩包错误";
                break;
            case "2":
                re = "Json文件错误";
                break;
            case "3":
                re = "Json数据错误";
                break;
            case "4":
                re = "数据元非空错误";
                break;
            case "5":
                re = "数据元超出值域错误";
                break;
            case "6":
                re = "字段类型错误";
                break;
            case "7":
                re = "字段格式错误";
                break;
            case "21":
                re = "数据缓存错误";
                break;
            default:
                break;
        }
        return re;
    }
    /**
     * 异常类型
     * @param exceptionType
     * @return
     */
    public String getExceptionType(String exceptionType){
        String re = "";
        switch (exceptionType){
            case "1":
                re = "字段值为空";
                break;
            case "2":
                re = "值域超出";
                break;
            case "3":
                re = "类型错误";
                break;
            case "4":
                re = "格式错误";
                break;
            case "5":
                re = "资源适配错误";
                break;
            case "6":
                re = "字典适配错误";
                break;
            default:
                break;
        }
        return re;
    }
    /**
     * 解析状态
     * @param analyzerStatus
     * @return
     */
    public String getAnalyzerStatus(String analyzerStatus, String archiveStatus){
        String re = "";
        if("3".equals(analyzerStatus)){
            if("0".equals(archiveStatus)){
                re = "待解析";
            }else if("1".equals(archiveStatus)){
                re = "正在解析";
            }else if("2".equals(archiveStatus)){
                re = "解析失败";
            }else if("3".equals(archiveStatus)){
                re = "解析完成";
            }
        }else{
            if("0".equals(analyzerStatus)){
                re = "待质控";
            }else if("1".equals(analyzerStatus)){
                re = "正在质控";
            }else if("2".equals(analyzerStatus)){
                re = "质控失败";
            }
        }
        return re;
    }
    /**
     * 就诊类型
     * @param eventType
     * @return
     */
    public String getEventType(String eventType){
        String re = "";
        switch (eventType){
            case "0":
                re = "门诊";
                break;
            case "1":
                re = "住院";
                break;
            case "2":
                re = "体检";
                break;
            default:
                break;
        }
        return re;
    }
    /**
     * 就诊类型
     * @param platform
     * @return
     */
    public String getPlatform(String platform){
        String re = "";
        switch (platform){
            case "jiangxi_001":
                re = "省平台";
                break;
            default:
                break;
        }
        return re;
    }
    private String getWhere(String filters){
        String whereStr = "";
        String [] filterArr = filters.split(";");
        for (String filter : filterArr) {
            if (filter.contains(">=")) {
                String [] condition = filter.split(">=");
                whereStr+=" and "+condition[0]+">='"+condition[1]+"'";
            } else if (filter.contains(">")) {
                String [] condition = filter.split(">");
                whereStr+=" and "+condition[0]+">'"+condition[1]+"'";
            } else if (filter.contains("<=")) {
                String [] condition = filter.split("<=");
                whereStr+=" and "+condition[0]+"<='"+condition[1]+"'";
            } else if (filter.contains("<")) {
                String [] condition = filter.split("<");
                whereStr+=" and "+condition[0]+"<'"+condition[1]+"'";
            } else if (filter.contains("=")) {
                String [] condition = filter.split("=");
                whereStr+=" and "+condition[0]+"='"+condition[1]+"'";
            }
        }
        if(StringUtils.isNotEmpty(whereStr)){
            return whereStr.substring(4, whereStr.length());
        }else{
            return "";
        }
    }
}

+ 165 - 0
src/main/java/com/yihu/ehr/analyze/controller/dataQuality/WarningRecordEndPoint.java

@ -0,0 +1,165 @@
package com.yihu.ehr.analyze.controller.dataQuality;
import com.yihu.ehr.analyze.service.dataQuality.WarningQuestionService;
import com.yihu.ehr.analyze.service.dataQuality.WarningRecordService;
import com.yihu.ehr.analyze.service.scheduler.WarningSchedulerService;
import com.yihu.ehr.constants.ApiVersion;
import com.yihu.ehr.constants.ServiceApi;
import com.yihu.ehr.controller.EnvelopRestEndPoint;
import com.yihu.ehr.entity.quality.DqWarningRecord;
import com.yihu.ehr.model.quality.MDqWarningRecord;
import com.yihu.ehr.util.rest.Envelop;
import io.swagger.annotations.Api;
import io.swagger.annotations.ApiOperation;
import io.swagger.annotations.ApiParam;
import org.apache.commons.lang.StringUtils;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.http.MediaType;
import org.springframework.web.bind.annotation.*;
import java.util.ArrayList;
import java.util.List;
/**
 * @author yeshijie on 2018/6/12.
 */
@RestController
@RequestMapping(value = ApiVersion.Version1_0, produces = MediaType.APPLICATION_JSON_UTF8_VALUE)
@Api(value = "WarningRecordEndPoint", description = "质控-预警问题", tags = {"档案分析服务-质控-预警问题"})
public class WarningRecordEndPoint extends EnvelopRestEndPoint {
    @Autowired
    private WarningRecordService warningRecordService;
    @Autowired
    private WarningQuestionService warningQuestionService;
    @Autowired
    private WarningSchedulerService warningSchedulerService;
    @RequestMapping(value = ServiceApi.DataQuality.WarningQuestionAnalyze, method = RequestMethod.POST)
    @ApiOperation(value = "生成指定日期的预警记录")
    public Envelop warningQuestionAnalyze(@ApiParam(name = "dateStr", value = "指定日期生成某天的预警信息", defaultValue = "2018-01-01")
                                          @RequestParam(value = "dateStr", required = false) String dateStr) {
        Envelop envelop = new Envelop();
        try {
            warningQuestionService.analyze(dateStr);
            return success(null);
        }catch (Exception e){
            e.printStackTrace();
            envelop.setSuccessFlg(false);
            envelop.setErrorMsg(e.getMessage());
        }
        return envelop;
    }
    @RequestMapping(value = ServiceApi.DataQuality.WarningQuestionJob, method = RequestMethod.POST)
    @ApiOperation(value = "手动启动预警问题job")
    public Envelop warningQuestionJob() {
        Envelop envelop = new Envelop();
        try {
            warningSchedulerService.init();
            return success(null);
        }catch (Exception e){
            e.printStackTrace();
            envelop.setSuccessFlg(false);
            envelop.setErrorMsg(e.getMessage());
        }
        return envelop;
    }
    @RequestMapping(value = ServiceApi.DataQuality.WarningRecordList, method = RequestMethod.GET)
    @ApiOperation(value = "预警问题列表")
    public Envelop warningRecordList(
            @ApiParam(name = "orgCode", value = "机构code", defaultValue = "jkzl")
            @RequestParam(value = "orgCode", required = false) String orgCode,
            @ApiParam(name = "quota", value = "指标(传warningType)", defaultValue = "101")
            @RequestParam(value = "quota", required = false) String quota,
            @ApiParam(name = "status", value = "状态(1未解决,2已解决)", defaultValue = "1")
            @RequestParam(value = "status", required = false) String status,
            @ApiParam(name = "type", value = "类型(1接收,2资源化,3上传)", defaultValue = "1")
            @RequestParam(value = "type", required = true) String type,
            @ApiParam(name = "startTime", value = "开始时间", defaultValue = "2018-06-11")
            @RequestParam(value = "startTime", required = false) String startTime,
            @ApiParam(name = "endTime", value = "结束时间", defaultValue = "2018-06-11")
            @RequestParam(value = "endTime", required = false) String endTime,
            @ApiParam(name = "size", value = "分页大小", defaultValue = "15")
            @RequestParam(value = "size", required = false) int size,
            @ApiParam(name = "page", value = "页码", defaultValue = "1")
            @RequestParam(value = "page", required = false) int page) {
        Envelop envelop = new Envelop();
        try {
            String filters = "type="+type;
            if(StringUtils.isNotBlank(orgCode)){
                filters += ";orgCode="+orgCode;
            }
            if(StringUtils.isNotBlank(quota)){
                filters += ";warningType="+quota;
            }
            if(StringUtils.isNotBlank(status)){
                filters += ";status="+status;
            }
            if(StringUtils.isNotBlank(startTime)){
                filters += ";recordTime>="+startTime;
            }
            if(StringUtils.isNotBlank(endTime)){
                filters += ";recordTime<="+endTime;
            }
            String sorts = "-warningTime";
            List<DqWarningRecord> list = warningRecordService.search(null, filters, sorts, page, size);
            List<MDqWarningRecord> records = (List<MDqWarningRecord>) convertToModels(list, new ArrayList<>(list.size()), MDqWarningRecord.class, null);
            return getPageResult(records,(int)warningRecordService.getCount(filters), page, size);
        }catch (Exception e){
            e.printStackTrace();
            envelop.setSuccessFlg(false);
            envelop.setErrorMsg(e.getMessage());
        }
        return envelop;
    }
    @RequestMapping(value = ServiceApi.DataQuality.WarningRecord, method = RequestMethod.GET)
    @ApiOperation(value = "根据id查询平台接收预警")
    public Envelop warningRecord(
            @ApiParam(name = "id", value = "id", defaultValue = "")
            @PathVariable(value = "id") String id) {
        Envelop envelop = new Envelop();
        try {
            DqWarningRecord warning =  warningRecordService.findById(id);
            return success(convertToModel(warning, MDqWarningRecord.class));
        }catch (Exception e){
            e.printStackTrace();
            envelop.setSuccessFlg(false);
            envelop.setErrorMsg(e.getMessage());
        }
        return envelop;
    }
    @RequestMapping(value = ServiceApi.DataQuality.WarningRecordUpd, method = RequestMethod.POST)
    @ApiOperation(value = "处理问题")
    public Envelop warningRecordUpd(
            @ApiParam(name = "solveTime", value = "解决时间", defaultValue = "2018-06-12")
            @RequestParam(value = "solveTime", required = true) String solveTime,
            @ApiParam(name = "solveId", value = "解决人id", defaultValue = "101")
            @RequestParam(value = "solveId", required = true) String solveId,
            @ApiParam(name = "solveName", value = "解决人姓名", defaultValue = "1")
            @RequestParam(value = "solveName", required = true) String solveName,
            @ApiParam(name = "solveType", value = "解决方式(1已解决,2忽略,3无法解决,4不是问题)", defaultValue = "1")
            @RequestParam(value = "solveType", required = true) String solveType,
            @ApiParam(name = "id", value = "id", defaultValue = "1")
            @RequestParam(value = "id", required = true) String id) {
        Envelop envelop = new Envelop();
        try {
            int re = warningRecordService.warningRecordUpd(solveTime, solveId, solveName, solveType, id);
            if(re==-1){
                envelop.setSuccessFlg(false);
                envelop.setErrorMsg("记录不存在");
            }else {
                return success(null);
            }
        }catch (Exception e){
            e.printStackTrace();
            envelop.setSuccessFlg(false);
            envelop.setErrorMsg(e.getMessage());
        }
        return envelop;
    }
}

+ 595 - 0
src/main/java/com/yihu/ehr/analyze/controller/dataQuality/WarningSettingEndPoint.java

@ -0,0 +1,595 @@
package com.yihu.ehr.analyze.controller.dataQuality;
import com.yihu.ehr.analyze.feign.StandardServiceClient;
import com.yihu.ehr.analyze.service.dataQuality.DqDatasetWarningService;
import com.yihu.ehr.analyze.service.dataQuality.DqPaltformReceiveWarningService;
import com.yihu.ehr.analyze.service.dataQuality.DqPaltformResourceWarningService;
import com.yihu.ehr.analyze.service.dataQuality.DqPaltformUploadWarningService;
import com.yihu.ehr.constants.ApiVersion;
import com.yihu.ehr.constants.ServiceApi;
import com.yihu.ehr.controller.EnvelopRestEndPoint;
import com.yihu.ehr.entity.quality.DqDatasetWarning;
import com.yihu.ehr.entity.quality.DqPaltformReceiveWarning;
import com.yihu.ehr.entity.quality.DqPaltformResourceWarning;
import com.yihu.ehr.entity.quality.DqPaltformUploadWarning;
import com.yihu.ehr.model.quality.MDqDatasetWarning;
import com.yihu.ehr.model.quality.MDqPaltformReceiveWarning;
import com.yihu.ehr.model.quality.MDqPaltformResourceWarning;
import com.yihu.ehr.model.quality.MDqPaltformUploadWarning;
import com.yihu.ehr.util.rest.Envelop;
import com.yihu.hos.model.standard.MStdDataSet;
import com.yihu.hos.model.standard.MStdMetaData;
import io.swagger.annotations.Api;
import io.swagger.annotations.ApiOperation;
import io.swagger.annotations.ApiParam;
import jxl.Sheet;
import jxl.Workbook;
import org.apache.commons.lang.StringUtils;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.http.MediaType;
import org.springframework.http.ResponseEntity;
import org.springframework.web.bind.annotation.*;
import org.springframework.web.multipart.MultipartFile;
import java.util.ArrayList;
import java.util.List;
/**
 * @author yeshijie on 2018/5/28.
 */
@RestController
@RequestMapping(value = ApiVersion.Version1_0, produces = MediaType.APPLICATION_JSON_UTF8_VALUE)
@Api(value = "WarningSettingEndPoint", description = "质控-预警设置", tags = {"档案分析服务-质控-预警设置"})
public class WarningSettingEndPoint extends EnvelopRestEndPoint {
    @Autowired
    private DqDatasetWarningService dqDatasetWarningService;
    @Autowired
    private DqPaltformReceiveWarningService dqPaltformReceiveWarningService;
    @Autowired
    private DqPaltformResourceWarningService dqPaltformResourceWarningService;
    @Autowired
    private DqPaltformUploadWarningService dqPaltformUploadWarningService;
    @Value("${quality.orgCode}")
    private String defaultOrgCode;
    @Value("${quality.version}")
    private String defaultQualityVersion;
    @Autowired
    private StandardServiceClient standardServiceClient;
    @RequestMapping(value = ServiceApi.DataQuality.PaltformReceiveWarningList, method = RequestMethod.GET)
    @ApiOperation(value = "平台接收预警列表")
    public Envelop paltformReceiveWarningList(
            @ApiParam(name = "orgCode", value = "机构code", defaultValue = "jkzl")
            @RequestParam(value = "orgCode", required = false) String orgCode,
            @ApiParam(name = "size", value = "分页大小", defaultValue = "15")
            @RequestParam(value = "size", required = false) int size,
            @ApiParam(name = "page", value = "页码", defaultValue = "1")
            @RequestParam(value = "page", required = false) int page){
        Envelop envelop = new Envelop();
        try {
            String filters = null;
            if(StringUtils.isNotBlank(orgCode)){
                filters = "orgCode="+orgCode;
            }else {
                filters = "orgCode<>"+defaultOrgCode;
            }
            String sorts = "-updateTime";
            List<DqPaltformReceiveWarning> list = dqPaltformReceiveWarningService.search(null, filters, sorts, page, size);
            List<MDqPaltformReceiveWarning> warnings = (List<MDqPaltformReceiveWarning>)convertToModels(list, new ArrayList<>(list.size()), MDqPaltformReceiveWarning.class, null);
            return getPageResult(warnings, (int)dqPaltformReceiveWarningService.getCount(filters), page, size);
        }catch (Exception e){
            e.printStackTrace();
            envelop.setSuccessFlg(false);
            envelop.setErrorMsg(e.getMessage());
        }
        return envelop;
    }
    @RequestMapping(value = ServiceApi.DataQuality.PaltformResourceWarningList, method = RequestMethod.GET)
    @ApiOperation(value = "平台资源化预警列表")
    public Envelop paltformResourceWarningList(
            @ApiParam(name = "orgCode", value = "机构code", defaultValue = "jkzl")
            @RequestParam(value = "orgCode", required = false) String orgCode,
            @ApiParam(name = "size", value = "分页大小", defaultValue = "15")
            @RequestParam(value = "size", required = false) int size,
            @ApiParam(name = "page", value = "页码", defaultValue = "1")
            @RequestParam(value = "page", required = false) int page){
        Envelop envelop = new Envelop();
        try {
            String filters = null;
            if(StringUtils.isNotBlank(orgCode)){
                filters = "orgCode="+orgCode;
            }
            String sorts = "-updateTime";
            List<DqPaltformResourceWarning> list = dqPaltformResourceWarningService.search(null, filters, sorts, page, size);
            List<MDqPaltformResourceWarning> warnings = (List<MDqPaltformResourceWarning>)convertToModels(list, new ArrayList<>(list.size()), MDqPaltformResourceWarning.class, null);
            return getPageResult(warnings, (int)dqPaltformResourceWarningService.getCount(filters), page, size);
        }catch (Exception e){
            e.printStackTrace();
            envelop.setSuccessFlg(false);
            envelop.setErrorMsg(e.getMessage());
        }
        return envelop;
    }
    @RequestMapping(value = ServiceApi.DataQuality.PaltformUploadWarningList, method = RequestMethod.GET)
    @ApiOperation(value = "平台上传预警列表")
    public Envelop paltformUploadWarningList(
            @ApiParam(name = "orgCode", value = "机构code", defaultValue = "jkzl")
            @RequestParam(value = "orgCode", required = false) String orgCode,
            @ApiParam(name = "size", value = "分页大小", defaultValue = "15")
            @RequestParam(value = "size", required = false) int size,
            @ApiParam(name = "page", value = "页码", defaultValue = "1")
            @RequestParam(value = "page", required = false) int page) {
        Envelop envelop = new Envelop();
        try {
            String filters = null;
            if(StringUtils.isNotBlank(orgCode)){
                filters = "orgCode="+orgCode;
            }
            String sorts = "-updateTime";
            List<DqPaltformUploadWarning> list = dqPaltformUploadWarningService.search(null, filters, sorts, page, size);
            List<MDqPaltformUploadWarning> warnings = (List<MDqPaltformUploadWarning>)convertToModels(list, new ArrayList<>(list.size()), MDqPaltformUploadWarning.class, null);
            return getPageResult(warnings, (int)dqPaltformUploadWarningService.getCount(filters), page, size);
        }catch (Exception e){
            e.printStackTrace();
            envelop.setSuccessFlg(false);
            envelop.setErrorMsg(e.getMessage());
        }
        return envelop;
    }
    @RequestMapping(value = ServiceApi.DataQuality.PaltformReceiveWarningDefault, method = RequestMethod.GET)
    @ApiOperation(value = "查找默认的平台接收预警")
    public Envelop paltformReceiveWarningDefault() {
        Envelop envelop = new Envelop();
        try {
            DqPaltformReceiveWarning warning =  dqPaltformReceiveWarningService.findByOrgCode(defaultOrgCode);
            List<DqDatasetWarning> warningList = dqDatasetWarningService.findByOrgCodeAndType(warning.getOrgCode(),"1");
            warning.setDatasetWarningNum(warningList.size());
            warning.setDatasetWarningList(warningList);
            return success(convertToModel(warning, MDqPaltformReceiveWarning.class));
        }catch (Exception e){
            e.printStackTrace();
            envelop.setSuccessFlg(false);
            envelop.setErrorMsg(e.getMessage());
        }
        return envelop;
    }
    @RequestMapping(value = ServiceApi.DataQuality.PaltformReceiveWarning, method = RequestMethod.GET)
    @ApiOperation(value = "根据id查询平台接收预警")
    public Envelop getMDqPaltformReceiveWarningById(
            @ApiParam(name = "id", value = "id", defaultValue = "")
            @PathVariable(value = "id") Long id) {
        Envelop envelop = new Envelop();
        try {
            DqPaltformReceiveWarning warning =  dqPaltformReceiveWarningService.findById(id);
            List<DqDatasetWarning> warningList = dqDatasetWarningService.findByOrgCodeAndType(warning.getOrgCode(),"1");
            warning.setDatasetWarningNum(warningList.size());
            return success(convertToModel(warning, MDqPaltformReceiveWarning.class),warningList);
        }catch (Exception e){
            e.printStackTrace();
            envelop.setSuccessFlg(false);
            envelop.setErrorMsg(e.getMessage());
        }
        return envelop;
    }
    @RequestMapping(value = ServiceApi.DataQuality.PaltformReceiveWarningIsExist, method = RequestMethod.GET)
    @ApiOperation(value = "验证机构是否存在")
    public Envelop paltformReceiveWarningIsExist(
            @ApiParam(name = "orgCode", value = "机构code", defaultValue = "")
            @RequestParam(value = "orgCode") String orgCode) {
        Envelop envelop = new Envelop();
        try {
            DqPaltformReceiveWarning warning =  dqPaltformReceiveWarningService.findByOrgCode(orgCode);
            if(warning!=null){
                return success(1);
            }else {
                return success(0);
            }
        }catch (Exception e){
            e.printStackTrace();
            envelop.setSuccessFlg(false);
            envelop.setErrorMsg(e.getMessage());
        }
        return envelop;
    }
    @RequestMapping(value = ServiceApi.DataQuality.PaltformUploadWarningIsExist, method = RequestMethod.GET)
    @ApiOperation(value = "验证机构是否存在")
    public Envelop paltformUploadWarningIsExist(
            @ApiParam(name = "orgCode", value = "机构code", defaultValue = "")
            @RequestParam(value = "orgCode") String orgCode) {
        Envelop envelop = new Envelop();
        try {
            DqPaltformUploadWarning warning =  dqPaltformUploadWarningService.findByOrgCode(orgCode);
            if(warning!=null){
                return success(1);
            }else {
                return success(0);
            }
        }catch (Exception e){
            e.printStackTrace();
            envelop.setSuccessFlg(false);
            envelop.setErrorMsg(e.getMessage());
        }
        return envelop;
    }
    @RequestMapping(value = ServiceApi.DataQuality.PaltformResourceWarning, method = RequestMethod.GET)
    @ApiOperation(value = "根据id查询平台资源化预警")
    public Envelop getMDqPaltformResourceWarningById(
            @ApiParam(name = "id", value = "id", defaultValue = "")
            @PathVariable(value = "id") Long id) {
        Envelop envelop = new Envelop();
        try {
            DqPaltformResourceWarning warning =  dqPaltformResourceWarningService.findById(id);
            return success(convertToModel(warning, MDqPaltformResourceWarning.class));
        }catch (Exception e){
            e.printStackTrace();
            envelop.setSuccessFlg(false);
            envelop.setErrorMsg(e.getMessage());
        }
        return envelop;
    }
    @RequestMapping(value = ServiceApi.DataQuality.PaltformUploadWarning, method = RequestMethod.GET)
    @ApiOperation(value = "根据id查询平台上传预警")
    public Envelop getMDqPaltformUploadWarningById(
            @ApiParam(name = "id", value = "id", defaultValue = "")
            @PathVariable(value = "id") Long id) {
        Envelop envelop = new Envelop();
        try {
            DqPaltformUploadWarning warning =  dqPaltformUploadWarningService.findById(id);
            List<DqDatasetWarning> warningList = dqDatasetWarningService.findByOrgCodeAndType(warning.getOrgCode(),"2");
            warning.setDatasetWarningNum(warningList.size());
            return success(convertToModel(warning, MDqPaltformUploadWarning.class),warningList);
        }catch (Exception e){
            e.printStackTrace();
            envelop.setSuccessFlg(false);
            envelop.setErrorMsg(e.getMessage());
        }
        return envelop;
    }
    @RequestMapping(value = ServiceApi.DataQuality.DatasetWarningListNoPage, method = RequestMethod.GET)
    @ApiOperation(value = "预警数据集列表(不分页)")
    public Envelop datasetWarningListNoPage(
            @ApiParam(name = "orgCode", value = "机构code", defaultValue = "jkzl")
            @RequestParam(value = "orgCode", required = true) String orgCode,
            @ApiParam(name = "type", value = "类型(1平台接收,2平台上传)", defaultValue = "1")
            @RequestParam(value = "type", required = true) String type) throws Exception {
        Envelop envelop = new Envelop();
        try{
            List<DqDatasetWarning> list = dqDatasetWarningService.findByOrgCodeAndType(orgCode, type);
            List<MDqDatasetWarning> warnings = (List<MDqDatasetWarning>)convertToModels(list, new ArrayList<>(list.size()), MDqDatasetWarning.class, null);
            return success(warnings);
        }catch (Exception e){
            e.printStackTrace();
            envelop.setSuccessFlg(false);
            envelop.setErrorMsg(e.getMessage());
        }
        return envelop;
    }
    @RequestMapping(value = ServiceApi.DataQuality.DatasetWarningList, method = RequestMethod.GET)
    @ApiOperation(value = "预警数据集列表")
    public Envelop datasetWarningList(
            @ApiParam(name = "orgCode", value = "机构code", defaultValue = "jkzl")
            @RequestParam(value = "orgCode", required = true) String orgCode,
            @ApiParam(name = "type", value = "类型(1平台接收,2平台上传)", defaultValue = "1")
            @RequestParam(value = "type", required = true) String type,
            @ApiParam(name = "size", value = "分页大小", defaultValue = "15")
            @RequestParam(value = "size", required = false) int size,
            @ApiParam(name = "page", value = "页码", defaultValue = "1")
            @RequestParam(value = "page", required = false) int page){
        Envelop envelop = new Envelop();
        try{
            String filters = "orgCode="+orgCode+";type="+type;
            String sorts = "-code";
            List<DqDatasetWarning> list = dqDatasetWarningService.search(null, filters, sorts, page, size);
            List<MDqDatasetWarning> warnings = (List<MDqDatasetWarning>)convertToModels(list, new ArrayList<>(list.size()), MDqDatasetWarning.class, null);
            return getPageResult(warnings,(int)dqDatasetWarningService.getCount(filters), page, size);
        }catch (Exception e){
            e.printStackTrace();
            envelop.setSuccessFlg(false);
            envelop.setErrorMsg(e.getMessage());
        }
        return envelop;
    }
    @RequestMapping(value = ServiceApi.DataQuality.PaltformReceiveWarningAdd, method = RequestMethod.POST)
    @ApiOperation(value = "新增平台接收预警")
    public Envelop paltformReceiveWarningAdd(
            @ApiParam(name = "jsonData", value = "对象JSON结构体",  defaultValue = "")
            @RequestParam(value = "jsonData", required = true) String jsonData) {
        Envelop envelop = new Envelop();
        try {
            DqPaltformReceiveWarning warning = toEntity(jsonData, DqPaltformReceiveWarning.class);
            DqPaltformReceiveWarning oldWarning = dqPaltformReceiveWarningService.findByOrgCode(warning.getOrgCode());
            if(oldWarning!=null){
                envelop.setSuccessFlg(false);
                envelop.setErrorMsg("该机构已存在预警记录");
                return envelop;
            }
            warning = dqPaltformReceiveWarningService.paltformReceiveWarningAdd(warning);
            return success(convertToModel(warning, MDqPaltformReceiveWarning.class));
        }catch (Exception e){
            e.printStackTrace();
            envelop.setSuccessFlg(false);
            envelop.setErrorMsg(e.getMessage());
        }
        return envelop;
    }
    @RequestMapping(value = ServiceApi.DataQuality.PaltformResourceWarningAdd, method = RequestMethod.POST)
    @ApiOperation(value = "新增平台资源化预警")
    public Envelop paltformResourceWarningAdd(
            @ApiParam(name = "jsonData", value = "对象JSON结构体",  defaultValue = "")
            @RequestParam(value = "jsonData", required = true) String jsonData) {
        Envelop envelop = new Envelop();
        try {
            DqPaltformResourceWarning warning = toEntity(jsonData, DqPaltformResourceWarning.class);
            warning = dqPaltformResourceWarningService.paltformResourceWarningAdd(warning);
            return success(convertToModel(warning, MDqPaltformResourceWarning.class));
        }catch (Exception e){
            e.printStackTrace();
            envelop.setSuccessFlg(false);
            envelop.setErrorMsg(e.getMessage());
        }
        return envelop;
    }
    @RequestMapping(value = ServiceApi.DataQuality.PaltformUploadWarningAdd, method = RequestMethod.POST)
    @ApiOperation(value = "新增平台上传预警")
    public Envelop paltformUploadWarningAdd(
            @ApiParam(name = "jsonData", value = "对象JSON结构体",  defaultValue = "")
            @RequestParam(value = "jsonData", required = true) String jsonData){
        Envelop envelop = new Envelop();
        try {
            DqPaltformUploadWarning warning = toEntity(jsonData, DqPaltformUploadWarning.class);
            DqPaltformUploadWarning oldWarning = dqPaltformUploadWarningService.findByOrgCode(warning.getOrgCode());
            if(oldWarning!=null){
                envelop.setSuccessFlg(false);
                envelop.setErrorMsg("该机构已存在预警记录");
                return envelop;
            }
            warning = dqPaltformUploadWarningService.paltformUploadWarningAdd(warning);
            return success(convertToModel(warning, MDqPaltformUploadWarning.class));
        }catch (Exception e){
            e.printStackTrace();
            envelop.setSuccessFlg(false);
            envelop.setErrorMsg(e.getMessage());
        }
        return envelop;
    }
    @RequestMapping(value = ServiceApi.DataQuality.PaltformReceiveWarningDel, method = RequestMethod.POST)
    @ApiOperation(value = "删除平台接收预警")
    public Envelop paltformReceiveWarningDel(
            @ApiParam(name = "id", value = "1",  defaultValue = "")
            @RequestParam Long id) throws Exception {
        Envelop envelop = new Envelop();
        try {
            dqPaltformReceiveWarningService.deleteWarning(id);
            return success(null);
        }catch (Exception e){
            e.printStackTrace();
            envelop.setSuccessFlg(false);
            envelop.setErrorMsg(e.getMessage());
        }
        return envelop;
    }
    @RequestMapping(value = ServiceApi.DataQuality.PaltformResourceWarningDel, method = RequestMethod.POST)
    @ApiOperation(value = "删除平台资源化预警")
    public Envelop paltformResourceWarningDel(
            @ApiParam(name = "id", value = "1",  defaultValue = "")
            @RequestParam Long id) throws Exception {
        Envelop envelop = new Envelop();
        try {
            dqPaltformResourceWarningService.delete(id);
            return success(null);
        }catch (Exception e){
            e.printStackTrace();
            envelop.setSuccessFlg(false);
            envelop.setErrorMsg(e.getMessage());
        }
        return envelop;
    }
    @RequestMapping(value = ServiceApi.DataQuality.PaltformUploadWarningDel, method = RequestMethod.POST)
    @ApiOperation(value = "删除平台上传预警")
    public Envelop paltformUploadWarningDel(
            @ApiParam(name = "id", value = "1",  defaultValue = "")
            @RequestParam Long id) throws Exception {
        Envelop envelop = new Envelop();
        try {
            dqPaltformUploadWarningService.deleteWarning(id);
            return success(null);
        }catch (Exception e){
            e.printStackTrace();
            envelop.setSuccessFlg(false);
            envelop.setErrorMsg(e.getMessage());
        }
        return envelop;
    }
    @RequestMapping(value = ServiceApi.DataQuality.PaltformReceiveWarningUpd, method = RequestMethod.POST)
    @ApiOperation(value = "修改平台接收预警")
    public Envelop paltformReceiveWarningUpd(
            @ApiParam(name = "jsonData", value = "对象JSON结构体",  defaultValue = "")
            @RequestParam(value = "jsonData", required = true) String jsonData) {
        Envelop envelop = new Envelop();
        try {
            DqPaltformReceiveWarning warning = toEntity(jsonData, DqPaltformReceiveWarning.class);
            warning = dqPaltformReceiveWarningService.paltformReceiveWarningUpd(warning);
            return success(convertToModel(warning, MDqPaltformReceiveWarning.class));
        }catch (Exception e){
            e.printStackTrace();
            envelop.setSuccessFlg(false);
            envelop.setErrorMsg(e.getMessage());
        }
        return envelop;
    }
    @RequestMapping(value = ServiceApi.DataQuality.PaltformResourceWarningUpd, method = RequestMethod.POST)
    @ApiOperation(value = "修改平台资源化预警")
    public Envelop paltformResourceWarningUpd(
            @ApiParam(name = "jsonData", value = "对象JSON结构体",  defaultValue = "")
            @RequestParam(value = "jsonData", required = true) String jsonData) {
        Envelop envelop = new Envelop();
        try {
            DqPaltformResourceWarning warning = toEntity(jsonData, DqPaltformResourceWarning.class);
            warning = dqPaltformResourceWarningService.paltformResourceWarningUpd(warning);
            return success(convertToModel(warning, MDqPaltformResourceWarning.class));
        }catch (Exception e){
            e.printStackTrace();
            envelop.setSuccessFlg(false);
            envelop.setErrorMsg(e.getMessage());
        }
        return envelop;
    }
    @RequestMapping(value = ServiceApi.DataQuality.PaltformUploadWarningUpd, method = RequestMethod.POST)
    @ApiOperation(value = "修改平台上传预警")
    public Envelop paltformUploadWarningUpd(
            @ApiParam(name = "jsonData", value = "对象JSON结构体",  defaultValue = "")
            @RequestParam(value = "jsonData", required = true) String jsonData) {
        Envelop envelop = new Envelop();
        try {
            DqPaltformUploadWarning warning = toEntity(jsonData, DqPaltformUploadWarning.class);
            warning = dqPaltformUploadWarningService.paltformUploadWarningUpd(warning);
            return success(convertToModel(warning, MDqPaltformUploadWarning.class));
        }catch (Exception e){
            e.printStackTrace();
            envelop.setSuccessFlg(false);
            envelop.setErrorMsg(e.getMessage());
        }
        return envelop;
    }
    @RequestMapping(value = ServiceApi.DataQuality.DatasetWarningAdd, method = RequestMethod.POST)
    @ApiOperation(value = "新增数据集")
    public Envelop datasetWarningAdd(
            @ApiParam(name = "jsonData", value = "对象JSON结构体",  defaultValue = "")
            @RequestParam(value = "jsonData", required = true) String jsonData) {
        Envelop envelop = new Envelop();
        try {
            DqDatasetWarning warning = toEntity(jsonData, DqDatasetWarning.class);
            warning = dqDatasetWarningService.save(warning);
            return success(convertToModel(warning, MDqDatasetWarning.class));
        }catch (Exception e){
            e.printStackTrace();
            envelop.setSuccessFlg(false);
            envelop.setErrorMsg(e.getMessage());
        }
        return envelop;
    }
    @RequestMapping(value = ServiceApi.DataQuality.DatasetWarningDel, method = RequestMethod.POST)
    @ApiOperation(value = "删除数据集")
    public Envelop datasetWarningDel(
            @ApiParam(name = "id", value = "1",  defaultValue = "")
            @RequestParam Long id) throws Exception {
        Envelop envelop = new Envelop();
        try {
            dqDatasetWarningService.delete(id);
            return success(null);
        }catch (Exception e){
            e.printStackTrace();
            envelop.setSuccessFlg(false);
            envelop.setErrorMsg(e.getMessage());
        }
        return envelop;
    }
    @RequestMapping(value = ServiceApi.DataQuality.Meta_datas, method = RequestMethod.GET)
    @ApiOperation(value = "获取数据元")
    public Envelop meta_datas(
            @ApiParam(name = "fields", value = "返回的字段,为空返回全部字段", defaultValue = "")
            @RequestParam(value = "fields", required = false) String fields,
            @ApiParam(name = "filters", value = "过滤器,为空检索所有条件", defaultValue = "")
            @RequestParam(value = "filters", required = false) String filters,
            @ApiParam(name = "sorts", value = "排序,规则参见说明文档", defaultValue = "")
            @RequestParam(value = "sorts", required = false) String sorts,
            @ApiParam(name = "size", value = "分页大小", defaultValue = "15")
            @RequestParam(value = "size", required = false) int size,
            @ApiParam(name = "page", value = "页码", defaultValue = "1")
            @RequestParam(value = "page", required = false) int page) {
        Envelop envelop = new Envelop();
        try {
            ResponseEntity<List<MStdMetaData>> res = standardServiceClient.searchOrgMetaDatas(fields,filters,sorts,size,page,defaultQualityVersion);
            List<MStdMetaData> mStdDataSetList = res.getBody();
            int totalCount = getTotalCount(res);
            return getPageResult(mStdDataSetList,totalCount,page,size);
        }catch (Exception e){
            e.printStackTrace();
            envelop.setSuccessFlg(false);
            envelop.setErrorMsg(e.getMessage());
        }
        return envelop;
    }
    @RequestMapping(value = ServiceApi.DataQuality.DatasetList, method = RequestMethod.GET)
    @ApiOperation(value = "获取数据集")
    public Envelop datasetList(
            @ApiParam(name = "fields", value = "返回的字段,为空返回全部字段", defaultValue = "")
            @RequestParam(value = "fields", required = false) String fields,
            @ApiParam(name = "filters", value = "过滤器,为空检索所有条件", defaultValue = "")
            @RequestParam(value = "filters", required = false) String filters,
            @ApiParam(name = "sorts", value = "排序,规则参见说明文档", defaultValue = "")
            @RequestParam(value = "sorts", required = false) String sorts,
            @ApiParam(name = "size", value = "分页大小", defaultValue = "15")
            @RequestParam(value = "size", required = false) int size,
            @ApiParam(name = "page", value = "页码", defaultValue = "1")
            @RequestParam(value = "page", required = false) int page) {
        Envelop envelop = new Envelop();
        try {
            ResponseEntity<List<MStdDataSet>> res = standardServiceClient.searchDataSets(fields,filters,sorts,size,page,defaultQualityVersion);
            List<MStdDataSet> mStdDataSetList = res.getBody();
            int totalCount = getTotalCount(res);
            return getPageResult(mStdDataSetList,totalCount,page,size);
        }catch (Exception e){
            e.printStackTrace();
            envelop.setSuccessFlg(false);
            envelop.setErrorMsg(e.getMessage());
        }
        return envelop;
    }
    @RequestMapping(value = ServiceApi.DataQuality.ImportDatasetExcel, method = RequestMethod.POST)
    @ApiOperation(value = "数据集导入")
    public Envelop importDatasetExcel(MultipartFile file) {
        Envelop envelop = new Envelop();
        try {
            Workbook wb = Workbook.getWorkbook(file.getInputStream());
            Sheet[] sheets = wb.getSheets();
            Sheet sheet = sheets[0];
            int rows = sheet.getRows();
            List<String> codeList = new ArrayList<>(rows);
            if(rows>1){
                for(int row = 1;row<rows;row++){
                    String code = sheet.getCell(0, row).getContents();
                    if(StringUtils.isNotBlank(code)){
                        codeList.add(code);
                    }
                }
            }
            List<DqDatasetWarning> list = dqDatasetWarningService.importDatasetExcel(codeList);
            List<MDqDatasetWarning> warnings = (List<MDqDatasetWarning>)convertToModels(list, new ArrayList<>(list.size()), MDqDatasetWarning.class, null);
            return success(warnings);
        } catch (Exception e) {
            e.printStackTrace();
            envelop.setSuccessFlg(false);
            envelop.setErrorMsg(e.getMessage());
        }
        return envelop;
    }
}

+ 42 - 0
src/main/java/com/yihu/ehr/analyze/controller/qc/QcRuleCheckEndpoint.java

@ -0,0 +1,42 @@
package com.yihu.ehr.analyze.controller.qc;
import com.yihu.ehr.analyze.service.qc.QcRuleCheckService;
import com.yihu.ehr.constants.ApiVersion;
import com.yihu.ehr.constants.ServiceApi;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.http.MediaType;
import org.springframework.web.bind.annotation.RequestBody;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestMethod;
import org.springframework.web.bind.annotation.RestController;
/**
 * @author Airhead
 * @created 2018-01-19
 */
@RestController
@RequestMapping(value = ApiVersion.Version1_0, produces = MediaType.APPLICATION_JSON_UTF8_VALUE)
public class QcRuleCheckEndpoint {
    @Autowired
    private QcRuleCheckService service;
    @RequestMapping(value = ServiceApi.PackageAnalyzer.QcEmpty, method = RequestMethod.POST, produces = "application/json;charset=UTF-8")
    public void emptyCheck(@RequestBody String message) throws Exception {
        //service.emptyCheck(message);
    }
    @RequestMapping(value = ServiceApi.PackageAnalyzer.QcType, method = RequestMethod.POST, produces = "application/json;charset=UTF-8")
    public void typeCheck(@RequestBody String message) throws Exception {
        //service.typeCheck(message);
    }
    @RequestMapping(value = ServiceApi.PackageAnalyzer.QcFormat, method = RequestMethod.POST, produces = "application/json;charset=UTF-8")
    public void formatCheck(@RequestBody String message) throws Exception {
        //service.formatCheck(message);
    }
    @RequestMapping(value = ServiceApi.PackageAnalyzer.QcValue, method = RequestMethod.POST, produces = "application/json;charset=UTF-8")
    public void valueCheck(@RequestBody String message) throws Exception {
        //service.valueCheck(message);
    }
}

+ 23 - 0
src/main/java/com/yihu/ehr/analyze/dao/DqDatasetWarningDao.java

@ -0,0 +1,23 @@
package com.yihu.ehr.analyze.dao;
import com.yihu.ehr.entity.quality.DqDatasetWarning;
import org.springframework.data.jpa.repository.Modifying;
import org.springframework.data.jpa.repository.Query;
import org.springframework.data.repository.PagingAndSortingRepository;
import java.util.List;
/**
 * 数据质量-数据集预警值
 * @author yeshijie on 2018/5/28.
 */
public interface DqDatasetWarningDao extends PagingAndSortingRepository<DqDatasetWarning, Long> {
    List<DqDatasetWarning> findByOrgCodeAndType(String orgCode,String type);
    List<DqDatasetWarning> findByType(String type);
    @Modifying
    @Query("delete DqDatasetWarning a where a.orgCode = ?1 and a.type=?2")
    int deleteByOrgCodeAndType(String orgCode,String type);
}

+ 17 - 0
src/main/java/com/yihu/ehr/analyze/dao/DqPaltformReceiveWarningDao.java

@ -0,0 +1,17 @@
package com.yihu.ehr.analyze.dao;
import com.yihu.ehr.entity.quality.DqPaltformReceiveWarning;
import org.springframework.data.repository.PagingAndSortingRepository;
import java.util.List;
/**
 * 平台接收预警值
 * @author yeshijie on 2018/5/28.
 */
public interface DqPaltformReceiveWarningDao extends PagingAndSortingRepository<DqPaltformReceiveWarning, Long> {
    DqPaltformReceiveWarning findByOrgCode(String orgCode);
    List<DqPaltformReceiveWarning> findAll();
}

+ 23 - 0
src/main/java/com/yihu/ehr/analyze/dao/DqPaltformResourceWarningDao.java

@ -0,0 +1,23 @@
package com.yihu.ehr.analyze.dao;
import com.yihu.ehr.entity.government.GovernmentBrowseLog;
import com.yihu.ehr.entity.quality.DqPaltformResourceWarning;
import org.springframework.data.jpa.repository.Query;
import org.springframework.data.repository.PagingAndSortingRepository;
import org.springframework.data.repository.query.Param;
import java.util.List;
/**
 * 数据质量-平台资源化预警值
 * @author yeshijie on 2018/5/28.
 */
public interface DqPaltformResourceWarningDao extends PagingAndSortingRepository<DqPaltformResourceWarning, Long> {
    DqPaltformResourceWarning findByOrgCode(String orgCode);
    List<DqPaltformResourceWarning> findAll();
    @Query(value = "select a.* from dq_paltform_resource_warning a order by a.create_time desc limit 1", nativeQuery = true)
    DqPaltformResourceWarning findByFirst();
}

+ 17 - 0
src/main/java/com/yihu/ehr/analyze/dao/DqPaltformUploadWarningDao.java

@ -0,0 +1,17 @@
package com.yihu.ehr.analyze.dao;
import com.yihu.ehr.entity.quality.DqPaltformUploadWarning;
import org.springframework.data.repository.PagingAndSortingRepository;
import java.util.List;
/**
 * 数据质量-平台上传预警值
 * @author yeshijie on 2018/5/28.
 */
public interface DqPaltformUploadWarningDao extends PagingAndSortingRepository<DqPaltformUploadWarning, Long> {
    DqPaltformUploadWarning findByOrgCode(String orgCode);
    List<DqPaltformUploadWarning> findAll();
}

+ 11 - 0
src/main/java/com/yihu/ehr/analyze/dao/DqWarningRecordDao.java

@ -0,0 +1,11 @@
package com.yihu.ehr.analyze.dao;
import com.yihu.ehr.entity.quality.DqWarningRecord;
import org.springframework.data.repository.PagingAndSortingRepository;
/**
 * @author yeshijie on 2018/6/12.
 */
public interface DqWarningRecordDao extends PagingAndSortingRepository<DqWarningRecord, String> {
}

+ 107 - 0
src/main/java/com/yihu/ehr/analyze/feign/HosAdminServiceClient.java

@ -0,0 +1,107 @@
package com.yihu.ehr.analyze.feign;
import com.yihu.ehr.analyze.model.AdapterDatasetModel;
import com.yihu.ehr.analyze.model.AdapterMetadataModel;
import com.yihu.ehr.constants.ApiVersion;
import com.yihu.ehr.constants.MicroServices;
import com.yihu.ehr.constants.ServiceApi;
import com.yihu.ehr.model.adaption.MAdapterDataSet;
import com.yihu.ehr.util.rest.Envelop;
import com.yihu.hos.model.standard.MStdMetaData;
import io.swagger.annotations.ApiParam;
import org.springframework.cloud.netflix.feign.FeignClient;
import org.springframework.http.ResponseEntity;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestMethod;
import org.springframework.web.bind.annotation.RequestParam;
import springfox.documentation.annotations.ApiIgnore;
import java.util.Collection;
/**
 * @author zjj
 * @created 2018.03.20
 */
@ApiIgnore
@FeignClient(name = MicroServices.StdRedis)
@RequestMapping(ApiVersion.Version1_0)
public interface HosAdminServiceClient {
    @RequestMapping(value = ServiceApi.Redis.StdMetadataCodes, method = RequestMethod.GET)
    public String getMetadataCodes(@RequestParam("version") String version,
                                   @RequestParam("datasetCode") String datasetCode);
    @RequestMapping(value = ServiceApi.Redis.StdMetadataType, method = RequestMethod.GET)
    String getMetaDataType(
            @RequestParam("version") String version,
            @RequestParam("dataSetCode") String dataSetCode,
            @RequestParam("innerCode") String innerCode);
    @RequestMapping(value = ServiceApi.Redis.StdMetadataFormat, method = RequestMethod.GET)
    String getMetaDataFormat(
            @RequestParam("version") String version,
            @RequestParam("dataSetCode") String dataSetCode,
            @RequestParam("innerCode") String innerCode);
    @RequestMapping(value = ServiceApi.Redis.StdMetadataNullable, method = RequestMethod.GET)
    Boolean isMetaDataNullable(
            @RequestParam("version") String version,
            @RequestParam("datasetCode") String datasetCode,
            @RequestParam("metadataCode") String metadataCode);
    @RequestMapping(value = ServiceApi.Redis.StdMetadataDict, method = RequestMethod.GET)
    String getMetaDataDict(
            @RequestParam("version") String version,
            @RequestParam("dataSetCode") String dataSetCode,
            @RequestParam("innerCode") String innerCode);
    @RequestMapping(value = ServiceApi.Redis.StdDictEntryValue, method = RequestMethod.GET)
    String getDictEntryValue(
            @RequestParam("version") String version,
            @RequestParam("dictId") String dictId,
            @RequestParam("entryCode") String entryCode);
    @RequestMapping(value = ServiceApi.Redis.StdDictEntryValueExist, method = RequestMethod.GET)
    Boolean isDictValueExist(
            @RequestParam("version") String version,
            @RequestParam("dictId") String dictId,
            @RequestParam("entryValue") String entryValue);
    @RequestMapping(value = ServiceApi.Redis.StdDictEntryCodeExist, method = RequestMethod.GET)
    Boolean isDictCodeExist(
            @RequestParam("version") String version,
            @RequestParam("dictId") String dictId,
            @RequestParam("entryCode") String entryCode);
    @RequestMapping(value = "/adapterCenter/dataset/pageList", method = RequestMethod.GET)
    ResponseEntity<Collection<AdapterDatasetModel>> adapterDatasetList(
            @ApiParam(name = "version", value = "版本号")
            @RequestParam(value = "version", required = true) String version,
            @ApiParam(name = "fields", value = "字段")
            @RequestParam(value = "fields", required = true) String fields,
            @ApiParam(name = "filters", value = "过滤")
            @RequestParam(value = "filters", required = false) String filters,
            @ApiParam(name = "sorts", value = "排序")
            @RequestParam(value = "sorts", required = false) String sorts,
            @ApiParam(name = "page", value = "分页大小", required = true, defaultValue = "1")
            @RequestParam(value = "page") int page,
            @ApiParam(name = "size", value = "页码", required = true, defaultValue = "15")
            @RequestParam(value = "size") int size);
    @RequestMapping(value = "/adapterCenter/metadata/pageList", method = RequestMethod.GET)
    ResponseEntity<Collection<AdapterMetadataModel>> adapterMetadataList(
            @ApiParam(name = "version", value = "版本号")
            @RequestParam(value = "version", required = true) String version,
            @ApiParam(name = "fields", value = "字段")
            @RequestParam(value = "fields", required = true) String fields,
            @ApiParam(name = "filters", value = "过滤")
            @RequestParam(value = "filters", required = false) String filters,
            @ApiParam(name = "sorts", value = "排序")
            @RequestParam(value = "sorts", required = false) String sorts,
            @ApiParam(name = "page", value = "分页大小", required = true, defaultValue = "1")
            @RequestParam(value = "page") int page,
            @ApiParam(name = "size", value = "页码", required = true, defaultValue = "15")
            @RequestParam(value = "size") int size);
}

+ 34 - 0
src/main/java/com/yihu/ehr/analyze/feign/PackageMgrClient.java

@ -0,0 +1,34 @@
package com.yihu.ehr.analyze.feign;
import com.yihu.ehr.constants.ApiVersion;
import com.yihu.ehr.profile.AnalyzeStatus;
import com.yihu.ehr.profile.ArchiveStatus;
import com.yihu.ehr.constants.MicroServices;
import com.yihu.ehr.constants.ServiceApi;
import com.yihu.ehr.model.packs.EsSimplePackage;
import org.springframework.cloud.netflix.feign.FeignClient;
import org.springframework.web.bind.annotation.*;
import springfox.documentation.annotations.ApiIgnore;
/**
 * @author Sand
 * @version 1.0
 * @created 2016.03.28 9:27
 */
@ApiIgnore
@FeignClient(name = MicroServices.Package)
@RequestMapping(ApiVersion.Version1_0)
public interface PackageMgrClient {
    @RequestMapping(value = ServiceApi.Packages.Package, method = RequestMethod.GET)
    EsSimplePackage getPackage(
            @PathVariable(value = "id") String id);
    @RequestMapping(value = ServiceApi.PackageAnalyzer.Status, method = {RequestMethod.PUT})
    boolean analyzeStatus(
            @PathVariable(value = "id") String id,
            @RequestParam(value = "status") AnalyzeStatus status,
            @RequestParam(value = "errorType") int errorType,
            @RequestBody String message) throws Exception;
}

+ 28 - 0
src/main/java/com/yihu/ehr/analyze/feign/RedisServiceClient.java

@ -0,0 +1,28 @@
package com.yihu.ehr.analyze.feign;
import com.yihu.ehr.constants.ApiVersion;
import com.yihu.ehr.constants.MicroServices;
import com.yihu.ehr.constants.ServiceApi;
import com.yihu.ehr.util.rest.Envelop;
import org.springframework.cloud.netflix.feign.FeignClient;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestMethod;
import org.springframework.web.bind.annotation.RequestParam;
import springfox.documentation.annotations.ApiIgnore;
/**
 * @author hzp
 * @created 2017.04.28
 */
@ApiIgnore
@FeignClient(name = MicroServices.Redis)
@RequestMapping(ApiVersion.Version1_0)
public interface RedisServiceClient {
    @RequestMapping(value = ServiceApi.Redis.MqChannel.SendMessage, method = RequestMethod.POST)
    Envelop sendMessage(
            @RequestParam(value = "publisherAppId") String publisherAppId,
            @RequestParam(value = "channel") String channel,
            @RequestParam(value = "message") String message);
}

+ 58 - 0
src/main/java/com/yihu/ehr/analyze/feign/StandardServiceClient.java

@ -0,0 +1,58 @@
package com.yihu.ehr.analyze.feign;
import com.yihu.ehr.constants.ApiVersion;
import com.yihu.ehr.constants.MicroServices;
import com.yihu.ehr.constants.ServiceApi;
import com.yihu.hos.model.standard.MStdDataSet;
import com.yihu.hos.model.standard.MStdMetaData;
import io.swagger.annotations.ApiOperation;
import io.swagger.annotations.ApiParam;
import org.springframework.cloud.netflix.feign.FeignClient;
import org.springframework.http.ResponseEntity;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestMethod;
import org.springframework.web.bind.annotation.RequestParam;
import springfox.documentation.annotations.ApiIgnore;
import java.util.List;
/**
 * @author yeshijie on 2018/6/14.
 */
@ApiIgnore
@FeignClient(name = MicroServices.Standard)
@RequestMapping(ApiVersion.Version1_0)
public interface StandardServiceClient {
    @RequestMapping(value = ServiceApi.Standards.DataSets, method = RequestMethod.GET)
    @ApiOperation("查询数据集的方法")
    ResponseEntity<List<MStdDataSet>> searchDataSets(
            @ApiParam(name = "fields", value = "返回的字段,为空返回全部字段", defaultValue = "")
            @RequestParam(value = "fields", required = false) String fields,
            @ApiParam(name = "filters", value = "过滤器,为空检索所有条件", defaultValue = "")
            @RequestParam(value = "filters", required = false) String filters,
            @ApiParam(name = "sorts", value = "排序,规则参见说明文档", defaultValue = "")
            @RequestParam(value = "sorts", required = false) String sorts,
            @ApiParam(name = "size", value = "分页大小", defaultValue = "15")
            @RequestParam(value = "size", required = false) int size,
            @ApiParam(name = "page", value = "页码", defaultValue = "1")
            @RequestParam(value = "page", required = false) int page,
            @ApiParam(name = "version", value = "版本", defaultValue = "")
            @RequestParam(value = "version") String version);
    @RequestMapping(value = ServiceApi.Standards.MetaDatas, method = RequestMethod.GET)
    @ApiOperation(value = "查找数据元")
    ResponseEntity<List<MStdMetaData>> searchOrgMetaDatas(
            @ApiParam(name = "fields", value = "返回的字段,为空返回全部字段", defaultValue = "")
            @RequestParam(value = "fields", required = false) String fields,
            @ApiParam(name = "filters", value = "过滤器,为空检索所有条件", defaultValue = "")
            @RequestParam(value = "filters", required = false) String filters,
            @ApiParam(name = "sorts", value = "排序,规则参见说明文档", defaultValue = "")
            @RequestParam(value = "sorts", required = false) String sorts,
            @ApiParam(name = "size", value = "分页大小", defaultValue = "15")
            @RequestParam(value = "size", required = false) int size,
            @ApiParam(name = "page", value = "页码", defaultValue = "1")
            @RequestParam(value = "page", required = false) int page,
            @ApiParam(name = "version", value = "版本", defaultValue = "")
            @RequestParam(value = "version") String version);
}

+ 65 - 0
src/main/java/com/yihu/ehr/analyze/job/PackDatasetDetailsJob.java

@ -0,0 +1,65 @@
package com.yihu.ehr.analyze.job;
import com.yihu.ehr.analyze.service.qc.PackageQcService;
import com.yihu.ehr.elasticsearch.ElasticSearchUtil;
import com.yihu.ehr.redis.client.RedisClient;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.scheduling.annotation.Scheduled;
import org.springframework.stereotype.Component;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
/**
 * Created by progr1mmer on 2018/7/18.
 */
@Component
public class PackDatasetDetailsJob {
    @Autowired
    private RedisClient redisClient;
    @Autowired
    private ElasticSearchUtil elasticSearchUtil;
    @Scheduled(fixedDelay = 60000)
    public void delayUpdate() throws Exception {
        Map<String, String> datasetDetails = PackageQcService.getDatasetDetails();
        if (!datasetDetails.isEmpty()) {
            List<Map<String, Object>> indexs = new ArrayList<>();
            List<Map<String, Object>> updates = new ArrayList<>();
            datasetDetails.forEach((key, val) -> {
                String [] _key = key.split(";");
                String [] _val = val.split(";");
                Map<String, Object> map = new HashMap<>();
                StringBuilder id = new StringBuilder();
                id.append(_key[0])
                        .append("_")
                        .append(_key[1])
                        .append("_")
                        .append(_key[3])
                        .append("_")
                        .append(_key[4]);
                Map<String, Object> source = elasticSearchUtil.findById("json_archives_qc", "qc_dataset_detail", id.toString());
                map.put("_id", id.toString());
                if (source != null) {
                    map.put("count", Integer.parseInt(source.get("count").toString()) +  Integer.parseInt(_val[0]));
                    map.put("row", Integer.parseInt(source.get("row").toString()) + Integer.parseInt(_val[1]));
                    updates.add(map);
                } else {
                    map.put("org_code", _key[0]);
                    map.put("receive_date", _key[1] + " 00:00:00");
                    map.put("event_type", Integer.parseInt(_key[3]));
                    map.put("dataset", _key[4]);
                    map.put("dataset_name", redisClient.get("std_data_set_" + _key[2] + ":" + _key[4] + ":name"));
                    map.put("count", Integer.parseInt(_val[0]));
                    map.put("row", Integer.parseInt(_val[1]));
                    indexs.add(map);
                }
            });
            elasticSearchUtil.bulkIndex("json_archives_qc", "qc_dataset_detail", indexs);
            elasticSearchUtil.bulkUpdate("json_archives_qc", "qc_dataset_detail", updates);
        }
    }
}

+ 30 - 0
src/main/java/com/yihu/ehr/analyze/job/PackageAnalyzeJob.java

@ -0,0 +1,30 @@
package com.yihu.ehr.analyze.job;
import com.yihu.ehr.analyze.service.pack.PackageAnalyzeService;
import com.yihu.ehr.lang.SpringContext;
import org.quartz.InterruptableJob;
import org.quartz.JobExecutionContext;
import org.quartz.UnableToInterruptJobException;
import org.springframework.stereotype.Component;
/**
 * 档案包分析任务。
 * 采用最简单的方式将zip包解析到Hbase中,不做特殊的业务逻辑,健康档案的中间数据。
 * 之后可对解析出来的数据做资源化及数据质量控制,目前的主要用途就是数据质量控制。
 *
 * @author Airhead
 * @version 1.0
 * @created 2018.01.15
 */
@Component
public class PackageAnalyzeJob implements InterruptableJob {
    @Override
    public void interrupt() throws UnableToInterruptJobException {
    }
    @Override
    public void execute(JobExecutionContext context) {
        PackageAnalyzeService service = SpringContext.getService(PackageAnalyzeService.class);
        service.analyze();
    }
}

+ 100 - 0
src/main/java/com/yihu/ehr/analyze/job/QcDataSetDetailJob.java

@ -0,0 +1,100 @@
package com.yihu.ehr.analyze.job;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.yihu.ehr.analyze.feign.RedisServiceClient;
import com.yihu.ehr.elasticsearch.ElasticSearchUtil;
import com.yihu.ehr.redis.client.RedisClient;
import com.yihu.ehr.util.datetime.DateUtil;
import org.apache.commons.lang.StringUtils;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.data.domain.Page;
import org.springframework.scheduling.annotation.Scheduled;
import org.springframework.stereotype.Component;
import java.util.*;
/**
 * @Author: zhengwei
 * @Date: 2018/7/17 11:47
 * @Description:
 */
@Component
public class QcDataSetDetailJob {
    @Autowired
    private RedisClient redisClient;
    @Autowired
    private ElasticSearchUtil elasticSearchUtil;
    @Autowired
    protected ObjectMapper objectMapper;
    @Scheduled(cron = "0 0 18 * * ?")
    public void dataSetTask() throws Exception {
        String date = redisClient.get("start_date");
        if (StringUtils.isEmpty(date)) {
            date = "2018-07-18";
            redisClient.set("start_date",date);
        }
        if("2018-04-14".equals(date)){
            return;
        }
        List<String> field = new ArrayList<>();
        field.add("org_code");
        String sqlOrg = "SELECT org_code FROM json_archives/info where receive_date>= '"+date+" 00:00:00' AND receive_date<='" +  date + " 23:59:59' group by org_code";
        List<Map<String, Object>> orgList = elasticSearchUtil.findBySql(field,sqlOrg);
        for(Map<String,Object> orgMap : orgList) {
            String orgCode = orgMap.get("org_code")+"";
            List<Map<String, Object>> res = new ArrayList<>();
            StringBuilder stringBuilder = new StringBuilder();
            stringBuilder.append("qc_step=1;");
            stringBuilder.append("receive_date>=" + date + " 00:00:00;");
            stringBuilder.append("receive_date<" + date + " 23:59:59;");
            stringBuilder.append("org_code=" + orgCode);
            long starttime = System.currentTimeMillis();
            int count = (int) elasticSearchUtil.count("json_archives_qc", "qc_dataset_info", stringBuilder.toString());
            double pageNum = count % 1000 > 0 ? count / 1000 + 1 : count / 1000;
            for (int i = 0; i < pageNum; i++) {
                Page<Map<String, Object>> result = elasticSearchUtil.page("json_archives_qc", "qc_dataset_info", stringBuilder.toString(), i + 1, 1000);
                System.out.println("查询耗时:" + (System.currentTimeMillis() - starttime) + "ms");
                for (Map<String, Object> map : result) {
                    List<Map<String, Object>> dataSets = objectMapper.readValue(map.get("details").toString(), List.class);
                    String eventType = map.get("event_type").toString();
                    for (Map<String, Object> dataSet : dataSets) {
                        for (Map.Entry<String, Object> entry : dataSet.entrySet()) {
                            getDataSets(map.get("version") + "", entry.getKey(), (int) entry.getValue(), res, date, orgCode,eventType);
                        }
                    }
                }
            }
            elasticSearchUtil.bulkIndex("json_archives_qc","qc_dataset_detail",res);
            System.out.println("统计耗时:" + (System.currentTimeMillis() - starttime) + "ms");
        }
        Date beginDate = DateUtil.strToDate(date);
        Date addDate = DateUtil.addDate(-1, beginDate);
        String endDate = DateUtil.toString(addDate);
        redisClient.set("start_date",endDate);
    }
    public void getDataSets(String version, String dataSet, int row, List<Map<String, Object>> res,String date,String orgCode,String eventType){
        boolean flag = true;
        for(Map<String, Object> map : res){
            if(dataSet.equals(map.get("dataset"))&&eventType.equals(map.get("event_type"))){
                flag = false;
                map.put("row", (int)map.get("row") + row);
                map.put("count", (int)map.get("count") + 1);
                break;
            }
        }
        if(flag){
            Map<String, Object> map = new HashMap<>();
            map.put("org_code", orgCode);
            map.put("event_type", eventType);
            map.put("receive_date", date+" 00:00:00");
            map.put("dataset", dataSet);
            map.put("dataset_name", redisClient.get("std_data_set_" + version + ":" + dataSet + ":name"));
            map.put("row", row);
            map.put("count", 1);
            res.add(map);
        }
    }
}

+ 26 - 0
src/main/java/com/yihu/ehr/analyze/job/WarningQuestionJob.java

@ -0,0 +1,26 @@
package com.yihu.ehr.analyze.job;
import com.yihu.ehr.analyze.service.dataQuality.WarningQuestionService;
import com.yihu.ehr.lang.SpringContext;
import org.quartz.InterruptableJob;
import org.quartz.JobExecutionContext;
import org.quartz.UnableToInterruptJobException;
import org.springframework.stereotype.Component;
/**
 * 预警问题生成job
 * @author yeshijie on 2018/6/11.
 */
@Component
public class WarningQuestionJob implements InterruptableJob {
    @Override
    public void interrupt() throws UnableToInterruptJobException {
    }
    @Override
    public void execute(JobExecutionContext context) {
        WarningQuestionService service = SpringContext.getService(WarningQuestionService.class);
        service.analyze(null);
    }
}

+ 139 - 0
src/main/java/com/yihu/ehr/analyze/model/AdapterDatasetModel.java

@ -0,0 +1,139 @@
package com.yihu.ehr.analyze.model;
import org.hibernate.annotations.GenericGenerator;
import javax.persistence.Column;
import javax.persistence.GeneratedValue;
import javax.persistence.Id;
import java.io.Serializable;
/**
 * Created by lingfeng on 2015/9/16.
 */
public class AdapterDatasetModel  implements Serializable {
    @Id
    @GeneratedValue(generator = "generator")
    @GenericGenerator(name = "generator", strategy = "increment")
    @Column(name = "id")
    private Long id;
    @Column(name = "scheme_id")
    private Long schemeId;
    @Column(name = "std_dataset_id")
    private Long stdDatasetId;
    @Column(name = "std_dataset_code")
    private String stdDatasetCode;
    @Column(name = "std_dataset_name")
    private String stdDatasetName;
    @Column(name = "adapter_dataset_id")
    private String adapterDatasetId;
    @Column(name = "adapter_dataset_code")
    private String adapterDatasetCode;
    @Column(name = "adapter_dataset_name")
    private String adapterDatasetName;
    @Column(name="main_dataset_code")
    private String mainDatasetCode;
    @Column(name="main_dataset_name")
    private String mainDatasetName;
    @Column(name="is_clone")
    private Integer isClone; //0 不是克隆(默认)   1是克隆
    @Column(name="need_crawer")
    private Integer needCrawer;//0不需要采集   1需要采集(默认)
    public Long getId() {
        return id;
    }
    public void setId(Long id) {
        this.id = id;
    }
    public Long getSchemeId() {
        return schemeId;
    }
    public void setSchemeId(Long schemeId) {
        this.schemeId = schemeId;
    }
    public Long getStdDatasetId() {
        return stdDatasetId;
    }
    public void setStdDatasetId(Long stdDatasetId) {
        this.stdDatasetId = stdDatasetId;
    }
    public String getStdDatasetCode() {
        return stdDatasetCode;
    }
    public void setStdDatasetCode(String stdDatasetCode) {
        this.stdDatasetCode = stdDatasetCode;
    }
    public String getStdDatasetName() {
        return stdDatasetName;
    }
    public void setStdDatasetName(String stdDatasetName) {
        this.stdDatasetName = stdDatasetName;
    }
    public String getAdapterDatasetId() {
        return adapterDatasetId;
    }
    public void setAdapterDatasetId(String adapterDatasetId) {
        this.adapterDatasetId = adapterDatasetId;
    }
    public String getAdapterDatasetCode() {
        return adapterDatasetCode;
    }
    public void setAdapterDatasetCode(String adapterDatasetCode) {
        this.adapterDatasetCode = adapterDatasetCode;
    }
    public String getAdapterDatasetName() {
        return adapterDatasetName;
    }
    public void setAdapterDatasetName(String adapterDatasetName) {
        this.adapterDatasetName = adapterDatasetName;
    }
    public String getMainDatasetCode() {
        return mainDatasetCode;
    }
    public void setMainDatasetCode(String mainDatasetCode) {
        this.mainDatasetCode = mainDatasetCode;
    }
    public String getMainDatasetName() {
        return mainDatasetName;
    }
    public void setMainDatasetName(String mainDatasetName) {
        this.mainDatasetName = mainDatasetName;
    }
    public Integer getIsClone() {
        return isClone;
    }
    public void setIsClone(Integer isClone) {
        this.isClone = isClone;
    }
    public Integer getNeedCrawer() {
        return needCrawer;
    }
    public void setNeedCrawer(Integer needCrawer) {
        this.needCrawer = needCrawer;
    }
}

+ 187 - 0
src/main/java/com/yihu/ehr/analyze/model/AdapterMetadataModel.java

@ -0,0 +1,187 @@
package com.yihu.ehr.analyze.model;
import org.hibernate.annotations.GenericGenerator;
import javax.persistence.Column;
import javax.persistence.GeneratedValue;
import javax.persistence.Id;
import java.io.Serializable;
/**
 * Created by lingfeng on 2015/9/16.
 */
public class AdapterMetadataModel implements Serializable {
    @Id
    @GeneratedValue(generator = "generator")
    @GenericGenerator(name = "generator", strategy = "increment")
    @Column(name = "id")
    private Long id;
    @Column(name = "scheme_id")
    private Long schemeId;
    @Column(name = "std_dataset_id")
    private Long stdDatasetId;
    @Column(name = "std_metadata_id")
    private Long stdMetadataId;
    @Column(name = "std_metadata_code")
    private String stdMetadataCode;
    @Column(name = "std_metadata_name")
    private String stdMetadataName;
    @Column(name = "std_dict_id")
    private Long stdDictId;
    @Column(name = "adapter_dataset_id")
    private Long adapterDatasetId;
    @Column(name = "adapter_metadata_id")
    private Long adapterMetadataId;
    @Column(name = "adapter_metadata_code")
    private String adapterMetadataCode;
    @Column(name = "adapter_metadata_name")
    private String adapterMetadataName;
    @Column(name = "adapter_data_type")
    private Integer adapterDataType;
    @Column(name = "adapter_dict_id")
    private Long adapterDictId;
    @Column(name = "adapter_info")
    private String adapterInfo;
    @Column(name = "belong_adapter_id")
    private Long belongAdapterId;
    @Column(name = "adapter_default")
    private String adapterDefault;
    @Column(name="need_crawer")
    private Integer needCrawer;//0不需要采集   1需要采集(默认)
    public String getAdapterInfo() {
        return adapterInfo;
    }
    public void setAdapterInfo(String adapterInfo) {
        this.adapterInfo = adapterInfo;
    }
    public Long getId() {
        return id;
    }
    public void setId(Long id) {
        this.id = id;
    }
    public Long getSchemeId() {
        return schemeId;
    }
    public void setSchemeId(Long schemeId) {
        this.schemeId = schemeId;
    }
    public Long getStdDatasetId() {
        return stdDatasetId;
    }
    public void setStdDatasetId(Long stdDatasetId) {
        this.stdDatasetId = stdDatasetId;
    }
    public Long getStdMetadataId() {
        return stdMetadataId;
    }
    public void setStdMetadataId(Long stdMetadataId) {
        this.stdMetadataId = stdMetadataId;
    }
    public String getStdMetadataCode() {
        return stdMetadataCode;
    }
    public void setStdMetadataCode(String stdMetadataCode) {
        this.stdMetadataCode = stdMetadataCode;
    }
    public String getStdMetadataName() {
        return stdMetadataName;
    }
    public void setStdMetadataName(String stdMetadataName) {
        this.stdMetadataName = stdMetadataName;
    }
    public Long getStdDictId() {
        return stdDictId;
    }
    public void setStdDictId(Long stdDictId) {
        this.stdDictId = stdDictId;
    }
    public Long getAdapterDatasetId() {
        return adapterDatasetId;
    }
    public void setAdapterDatasetId(Long adapterDatasetId) {
        this.adapterDatasetId = adapterDatasetId;
    }
    public Long getAdapterMetadataId() {
        return adapterMetadataId;
    }
    public void setAdapterMetadataId(Long adapterMetadataId) {
        this.adapterMetadataId = adapterMetadataId;
    }
    public String getAdapterMetadataCode() {
        return adapterMetadataCode;
    }
    public void setAdapterMetadataCode(String adapterMetadataCode) {
        this.adapterMetadataCode = adapterMetadataCode;
    }
    public String getAdapterMetadataName() {
        return adapterMetadataName;
    }
    public void setAdapterMetadataName(String adapterMetadataName) {
        this.adapterMetadataName = adapterMetadataName;
    }
    public Integer getAdapterDataType() {
        return adapterDataType;
    }
    public void setAdapterDataType(Integer adapterDataType) {
        this.adapterDataType = adapterDataType;
    }
    public Long getAdapterDictId() {
        return adapterDictId;
    }
    public void setAdapterDictId(Long adapterDictId) {
        this.adapterDictId = adapterDictId;
    }
    public Long getBelongAdapterId() {
        return belongAdapterId;
    }
    public void setBelongAdapterId(Long belongAdapterId) {
        this.belongAdapterId = belongAdapterId;
    }
    public String getAdapterDefault() {
        return adapterDefault;
    }
    public void setAdapterDefault(String adapterDefault) {
        this.adapterDefault = adapterDefault;
    }
    public Integer getNeedCrawer() {
        return needCrawer;
    }
    public void setNeedCrawer(Integer needCrawer) {
        this.needCrawer = needCrawer;
    }
}

+ 334 - 0
src/main/java/com/yihu/ehr/analyze/model/ZipPackage.java

@ -0,0 +1,334 @@
package com.yihu.ehr.analyze.model;
import com.yihu.ehr.analyze.service.pack.PackageAnalyzeService;
import com.yihu.ehr.analyze.service.pack.StdPackageAnalyzer;
import com.yihu.ehr.fastdfs.config.FastDFSConfig;
import com.yihu.ehr.profile.EventType;
import com.yihu.ehr.profile.ProfileType;
import com.yihu.ehr.lang.SpringContext;
import com.yihu.ehr.model.packs.EsSimplePackage;
import com.yihu.ehr.profile.model.PackageDataSet;
import com.yihu.ehr.util.compress.Zipper;
import com.yihu.ehr.util.log.LogService;
import com.yihu.ehr.util.system.LocalTempPathUtil;
import net.lingala.zip4j.exception.ZipException;
import org.apache.commons.io.FileUtils;
import org.springframework.context.ApplicationContext;
import org.springframework.http.*;
import org.springframework.util.CollectionUtils;
import org.springframework.web.client.RestTemplate;
import java.io.File;
import java.io.IOException;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.*;
/**
 * 档案包解析逻辑。
 *
 * @author Airhead
 * @created 2018.01.16
 */
public class ZipPackage {
    public final static String StandardFolder = "standard";
    public final static String OriginFolder = "origin";
    public final static String DocumentsFile = "documents.json";
    public final static String LinkFile = "index";
    private static final RestTemplate REST_TEMPLATE;
    private static final HttpHeaders HTTP_HEADERS;
    static {
        REST_TEMPLATE = new RestTemplate();
        HTTP_HEADERS = new HttpHeaders();
        HTTP_HEADERS.setAccept(Arrays.asList(MediaType.APPLICATION_OCTET_STREAM));
    }
    //机构代码
    private String orgCode;
    //机构名称
    private String orgName;
    //机构地区
    private String orgArea;
    //cda版本
    private String cdaVersion;
    //事件号
    private String eventNo;
    //事件时间
    private Date eventDate;
    //事件类型
    private EventType eventType;
    //病人ID
    private String patientId;
    //科室代码
    private String deptCode;
    //补传标识
    protected boolean reUploadFlg = false;
    //ICD10诊断列表
    private Set<String> diagnosisCode = new HashSet<>() ;
    //ICD10诊断名称列表
    private Set<String> diagnosisName = new HashSet<>();
    //数据包
    private EsSimplePackage esSimplePackage;
    //zip辅助对象
    private Zipper zipper = new Zipper();
    //数据集合
    private Map<String, PackageDataSet> dataSets = new TreeMap<>();
    //Zip档案包文件
    private File zipFile;
    //解压后文件目录
    private File packFile;
    //private Set<String> tableSet = new HashSet<>();
    //数据集质控记录
    private Map<String, Object> qcDataSetRecord = new HashMap<>();
    //数据元质控记录
    private List<Map<String, Object>> qcMetadataRecords = new ArrayList<>();
    public ZipPackage(EsSimplePackage esSimplePackage) {
        this.esSimplePackage = esSimplePackage;
    }
    public String getOrgCode() {
        return orgCode;
    }
    public void setOrgCode(String orgCode) {
        this.orgCode = orgCode;
    }
    public String getOrgName() {
        return orgName;
    }
    public void setOrgName(String orgName) {
        this.orgName = orgName;
    }
    public String getOrgArea() {
        return orgArea;
    }
    public void setOrgArea(String orgArea) {
        this.orgArea = orgArea;
    }
    public String getCdaVersion() {
        return cdaVersion;
    }
    public void setCdaVersion(String cdaVersion) {
        this.cdaVersion = cdaVersion;
    }
    public String getEventNo() {
        return eventNo;
    }
    public void setEventNo(String eventNo) {
        this.eventNo = eventNo;
    }
    public Date getEventDate() {
        return eventDate;
    }
    public void setEventDate(Date eventDate) {
        this.eventDate = eventDate;
    }
    public EventType getEventType() {
        return eventType;
    }
    public void setEventType(EventType eventType) {
        this.eventType = eventType;
    }
    public String getPatientId() {
        return patientId;
    }
    public void setPatientId(String patientId) {
        this.patientId = patientId;
    }
    public String getDeptCode() {
        return deptCode;
    }
    public void setDeptCode(String deptCode) {
        this.deptCode = deptCode;
    }
    public boolean isReUploadFlg() {
        return reUploadFlg;
    }
    public void setReUploadFlg(boolean reUploadFlg) {
        this.reUploadFlg = reUploadFlg;
    }
    public Set<String> getDiagnosisCode() {
        return diagnosisCode;
    }
    public void setDiagnosisCode(Set<String> diagnosisCode) {
        this.diagnosisCode = diagnosisCode;
    }
    public Set<String> getDiagnosisName() {
        return diagnosisName;
    }
    public void setDiagnosisName(Set<String> diagnosisName) {
        this.diagnosisName = diagnosisName;
    }
    public EsSimplePackage getEsSimplePackage() {
        return esSimplePackage;
    }
    public Map<String, PackageDataSet> getDataSets() {
        return dataSets;
    }
    public void insertDataSet(String dataSetCode, PackageDataSet dataSet) {
        this.dataSets.put(dataSetCode, dataSet);
    }
    public File getPackFile() {
        return packFile;
    }
    public Map<String, Object> getQcDataSetRecord() {
        return qcDataSetRecord;
    }
    public void setQcDataSetRecord(Map<String, Object> qcDataSetRecord) {
        this.qcDataSetRecord = qcDataSetRecord;
    }
    public List<Map<String, Object>> getQcMetadataRecords() {
        return qcMetadataRecords;
    }
    public void setQcMetadataRecords(List<Map<String, Object>> qcMetadataRecords) {
        this.qcMetadataRecords = qcMetadataRecords;
    }
    public void download() throws IOException {
        FastDFSConfig config = SpringContext.getService(FastDFSConfig.class);
        String remotePath = esSimplePackage.getRemote_path();
        String url = config.getPublicServer() + "/" + remotePath.replace(":", "/");
        HttpEntity<String> entity = new HttpEntity<>(HTTP_HEADERS);
        ResponseEntity<byte[]> response = REST_TEMPLATE.exchange(
                url,
                HttpMethod.GET, entity, byte[].class);
        if (response.getStatusCode() == HttpStatus.OK) {
            Path path = Files.write(Paths.get(LocalTempPathUtil.getTempPathWithUUIDSuffix() +  esSimplePackage.get_id() + ".zip"), response.getBody());
            zipFile = path.toFile();
        } else {
            zipFile = null;
        }
    }
    public void unZip() throws Exception {
        if (zipFile == null) {
            return;
        }
        packFile = zipper.unzipFile(zipFile,  LocalTempPathUtil.getTempPathWithUUIDSuffix() + esSimplePackage.get_id(), esSimplePackage.getPwd());
        if (packFile == null || !packFile.isDirectory() || packFile.list().length == 0) {
            throw new ZipException("Invalid package file.");
        }
    }
    public ProfileType resolve() throws Exception {
        ProfileType profileType;
        List<String> directories = CollectionUtils.arrayToList(packFile.list());
        if (directories.contains(StandardFolder) && directories.contains(OriginFolder)) {
            profileType = ProfileType.Standard;
        } else if (directories.contains(DocumentsFile)) {
            profileType = ProfileType.File;
        } else if (directories.size() == 1 && directories.contains(LinkFile)) {
            profileType = ProfileType.Link;
        } else { // 数据集档案包(zip下只有 .json 数据文件)。
            profileType = ProfileType.Simple;
        }
        //目前只解析标准档案包
        if (ProfileType.Standard != profileType) {
            return profileType;
        }
        ApplicationContext context = SpringContext.getApplicationContext();
        StdPackageAnalyzer packageAnalyzer = context.getBean(StdPackageAnalyzer.class);
        packageAnalyzer.analyze(this);
        return profileType;
    }
    public void houseKeep() {
        try {
            FileUtils.deleteQuietly(zipFile);
            FileUtils.deleteQuietly(packFile);
        } catch (Exception e) {
            LogService.getLogger(PackageAnalyzeService.class).warn("House keep failed after package analyze: " + e.getMessage());
        }
    }
    /**
     * 保存标准档案数据
     *
     * @throws Exception
     */
   /* public void save() throws Exception {
        Set<String> keySet = dataSets.keySet();
        for (String key : keySet) {
            DataSetRecord dataSetRecord = dataSets.get(key);
            saveDataSet(dataSetRecord);
        }
    }*/
   /* private void saveDataSet(DataSetRecord dataSetRecord) throws Exception {
        String table = dataSetRecord.getCode();
        createTable(table);
        ApplicationContext context = SpringContext.getApplicationContext();
        HBaseDao hBaseDao = context.getBean(HBaseDao.class);
        String rowKeyPrefix = dataSetRecord.getRowKeyPrefix();
        TableBundle bundle = new TableBundle();
        if (dataSetRecord.isReUploadFlg()) {
            String legacyRowKeys[] = hBaseDao.findRowKeys(table, rowKeyPrefix, rowKeyPrefix.substring(0, rowKeyPrefix.length() - 1) + "z", "^" + rowKeyPrefix);
            if (legacyRowKeys != null && legacyRowKeys.length > 0) {
                bundle.addRows(legacyRowKeys);
                hBaseDao.delete(table, bundle);
            }
        }
        Map<String, DataElementRecord> records = dataSetRecord.getRecords();
        records.forEach((key, metaDataRecord) -> {
            String rowKey = dataSetRecord.genRowKey(key);
            String legacy = hBaseDao.get(table, rowKey);
            if (StringUtils.isNotEmpty(legacy)) {
                hBaseDao.delete(table, rowKey);
            }
            Map<String, String> dataGroup = metaDataRecord.getDataGroup();
            String receiveTime = DateUtil.toString(esSimplePackage.getReceive_date(), DateUtil.DEFAULT_YMDHMSDATE_FORMAT);
            dataGroup.put("receiveTime", receiveTime);  //增加接收时间
            bundle.clear();
            bundle.addValues(
                    rowKey,
                    DATA,
                    dataGroup
            );
            hBaseDao.save(table, bundle);
        });
    }*/
    /*private synchronized void createTable(String table) throws Exception {
        boolean created = tableSet.contains(table);
        if (created) {
            return;
        }
        ApplicationContext context = SpringContext.getApplicationContext();
        HBaseAdmin hBaseAdmin = context.getBean(HBaseAdmin.class);
        if (!hBaseAdmin.isTableExists(table)) {
            hBaseAdmin.createTable(table, DATA);
            tableSet.add(table);
        }
    }*/
}

+ 71 - 0
src/main/java/com/yihu/ehr/analyze/service/RedisService.java

@ -0,0 +1,71 @@
package com.yihu.ehr.analyze.service;
import com.yihu.ehr.redis.schema.*;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
/**
 * Redis管理
 * @author hzp add at 20170425
 */
@Service
public class RedisService {
    @Autowired
    private OrgKeySchema orgKeySchema;
    @Autowired
    private StdMetaDataKeySchema stdMetaDataKeySchema;
    /**
     *获取机构名称redis
     * @return
     */
    public String getOrgName(String key) {
       return orgKeySchema.get(key);
    }
    /**
     *获取机构区域redis
     * @return
     */
    public String getOrgArea(String key) {
        return orgKeySchema.getOrgArea(key);
    }
    /**
     * 获取标准数据元对应类型 redis
     */
    public String getMetaDataType(String version, String dataSetCode, String innerCode) {
        return stdMetaDataKeySchema.metaDataType(version, dataSetCode, innerCode);
    }
    /**
     * 获取数据元格式
     *
     * @param version
     * @param dataSetCode
     * @param innerCode
     * @return
     */
    public String getMetaDataFormat(String version, String dataSetCode, String innerCode) {
        return stdMetaDataKeySchema.metaDataFormat(version, dataSetCode, innerCode);
    }
    /**
     * 获取标准数据元对应字典 redis
     */
    public String getMetaDataDict(String version, String dataSetCode, String innerCode) {
        return stdMetaDataKeySchema.metaDataDict(version, dataSetCode,innerCode);
    }
    /**
     * 获取标准数据字典对应值 redis
     */
    public String getDictEntryValue(String version, String dictId, String entryCode) {
        return stdMetaDataKeySchema.dictEntryValue(version, dictId , entryCode);
    }
}

+ 1355 - 0
src/main/java/com/yihu/ehr/analyze/service/dataQuality/DataQualityStatisticsService.java

@ -0,0 +1,1355 @@
package com.yihu.ehr.analyze.service.dataQuality;
import com.yihu.ehr.analyze.dao.DqPaltformReceiveWarningDao;
import com.yihu.ehr.analyze.service.pack.PackQcReportService;
import com.yihu.ehr.elasticsearch.ElasticSearchPool;
import com.yihu.ehr.elasticsearch.ElasticSearchUtil;
import com.yihu.ehr.entity.quality.DqPaltformReceiveWarning;
import com.yihu.ehr.query.BaseJpaService;
import com.yihu.ehr.util.datetime.DateUtil;
import com.yihu.ehr.util.rest.Envelop;
import org.apache.commons.collections.map.HashedMap;
import org.apache.commons.lang.StringUtils;
import org.elasticsearch.action.search.SearchRequestBuilder;
import org.elasticsearch.action.search.SearchResponse;
import org.elasticsearch.action.search.SearchType;
import org.elasticsearch.client.transport.TransportClient;
import org.elasticsearch.search.aggregations.AggregationBuilder;
import org.elasticsearch.search.aggregations.AggregationBuilders;
import org.elasticsearch.search.aggregations.bucket.histogram.DateHistogramBuilder;
import org.elasticsearch.search.aggregations.bucket.histogram.DateHistogramInterval;
import org.elasticsearch.search.aggregations.bucket.histogram.Histogram;
import org.elasticsearch.search.aggregations.bucket.terms.LongTerms;
import org.elasticsearch.search.aggregations.bucket.terms.StringTerms;
import org.elasticsearch.search.aggregations.bucket.terms.Terms;
import org.hibernate.Query;
import org.hibernate.Session;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.stereotype.Service;
import java.sql.ResultSet;
import java.text.DecimalFormat;
import java.util.*;
/**
 * @author yeshijie on 2018/5/31.
 */
@Service
public class DataQualityStatisticsService extends BaseJpaService {
    private final static Logger logger = LoggerFactory.getLogger(DataQualityStatisticsService.class);
    @Autowired
    private ElasticSearchUtil elasticSearchUtil;
    @Autowired
    private ElasticSearchPool elasticSearchPool;
    @Autowired
    private WarningSettingService warningSettingService;
    @Autowired
    private DqPaltformReceiveWarningDao dqPaltformReceiveWarningDao;
    @Autowired
    private PackQcReportService packQcReportService;
    @Value("${quality.orgCode}")
    private String defaultOrgCode;
    @Value("${quality.cloud}")
    private String cloud;
    @Value("${quality.cloudName}")
    private String cloudName;
    public Map<String, Object> getOrgMap() {
        //初始化 数据集数据
        Session session = currentSession();
        //获取医院数据
        Query query1 = session.createSQLQuery("SELECT org_code,full_name from organizations where org_type = 'Hospital' ");
        List<Object[]> orgList = query1.list();
        Map<String, Object> orgMap = new HashedMap();
        orgList.forEach(one -> {
            String orgCode = one[0].toString();
            String name = one[1].toString();
            orgMap.put(orgCode,name);
        });
        return orgMap;
    }
    /**
     * 省平台上传-统计数据  获取
     *
     * @param toPlatForm 上传到的平台
     * @return
     * @throws Exception
     */
    public List<Map<String, Object>> findUploadStatistics(String start, String end, String toPlatForm) throws Exception {
        List<Map<String, Object>> result = new ArrayList<>();
        String dateStr = DateUtil.toString(new Date());
        if (StringUtils.isBlank(start)) {
            start = dateStr;
        }
        if (StringUtils.isBlank(end)) {
            end = dateStr;
        }
        StringBuilder filter = new StringBuilder();
        filter.append("analyze_date>=").append(start).append(" 00:00:00").append(";");
        filter.append("analyze_date<=").append(end).append(" 23:59:59").append(";");
        String newFilters = "";
        //上传总档案量
        Map<String, Long> totalCount = elasticSearchUtil.countByGroup("upload", "record", filter.toString(), "org_code");
        // 门诊量
        newFilters = new String(filter.toString() + "event_type=0;");
        Map<String, Long> outPatientCount = elasticSearchUtil.countByGroup("upload", "record", newFilters, "org_code");
        // 住院量
        newFilters = new String(filter.toString() + "event_type=1;");
        Map<String, Long> inPatientCount = elasticSearchUtil.countByGroup("upload", "record", newFilters, "org_code");
        // 体检量
        newFilters = new String(filter.toString() + "event_type=2;");
        Map<String, Long> examCount = elasticSearchUtil.countByGroup("upload", "record", newFilters, "org_code");
        // 数据集量
        if (!StringUtils.isEmpty(toPlatForm)) {
            newFilters = new String(filter.toString() + "to_platform=" + toPlatForm + ";");
        } else {
            newFilters = filter.toString();
        }
        Map<String, Double> dataSetCount = elasticSearchUtil.sumByGroup("upload", "qc_dataset_detail", newFilters, "row", "org_code");
        // 上传异常的数据量
        filter = new StringBuilder();
        filter.append("create_date>=").append(start).append(" 00:00:00").append(";");
        filter.append("create_date<=").append(end).append(" 23:59:59").append(";");
        Map<String, Long> errorDataCount = elasticSearchUtil.countByGroup("upload", "info", filter.toString(), "org_code");
        //获取医院信息
        Map<String, Object> orgMap = getOrgMap();
        //整理统计数据
        Iterator<String> iterator = totalCount.keySet().iterator();
        double totalAcrhives = 0;//医疗云总档案数
        double totalDataset = 0;//医疗云总数据集数
        double totalOutPatient = 0;//医疗云总门诊档案数
        double totalInPatient = 0;//医疗云总住院数
        double totalExam = 0;//医疗云总体检数
        double totalError = 0;//医疗云总错误数:
        Map<String, Object> map = null;
        while (iterator.hasNext()) {
            map = new HashMap<>();
            String orgCode = iterator.next();
            map.put("orgCode", orgCode);
            map.put("orgName", orgMap.get(orgCode));
            map.put("total", getLongValue(totalCount.get(orgCode)));
            map.put("outPatient", getLongValue(outPatientCount.get(orgCode)));
            map.put("inPatient", getLongValue(inPatientCount.get(orgCode)));
            map.put("exam", getLongValue(examCount.get(orgCode)));
            map.put("dataset", getDoubleValue(dataSetCount.get(orgCode)));
            map.put("error", getLongValue(errorDataCount.get(orgCode)));
            result.add(map);
            //医疗云平台数据累计
            totalAcrhives += getLongValue(totalCount.get(orgCode));
            totalOutPatient += getLongValue(outPatientCount.get(orgCode));
            totalInPatient += getLongValue(inPatientCount.get(orgCode));
            totalExam += getLongValue(examCount.get(orgCode));
            totalDataset += getDoubleValue(dataSetCount.get(orgCode));
            totalError += getLongValue(errorDataCount.get(orgCode));
        }
        //新增医疗云平台数据
        Map<String, Object> totalMap = new HashedMap();
        totalMap.put("orgCode", cloud);
        totalMap.put("orgName", cloudName);
        totalMap.put("total", totalAcrhives);//上传总档案数
        totalMap.put("outPatient", totalOutPatient);//上传门诊数
        totalMap.put("inPatient", totalInPatient);//上传住院数
        totalMap.put("exam", totalExam); //上传体检
        totalMap.put("error", totalError);//上传异常
        totalMap.put("dataset", totalDataset);//上传数据集
        result.add(0, totalMap);
        return result;
    }
    /**
     * 统计查询
     *
     * @param start     接收时间
     * @param end
     * @param eventType 0门诊 1住院 2体检,null全部
     * @throws Exception
     */
    public List<Map<String, Object>> dataset(String start, String end, Integer eventType) throws Exception {
        List<Map<String, Object>> re = new ArrayList<>();
        String dateStr = DateUtil.toString(new Date());
        if (StringUtils.isBlank(start)) {
            start = dateStr;
        }
        if (StringUtils.isBlank(end)) {
            end = dateStr;
        }
        double totalHospitalAcrhives = 0;//医疗云总档案数
        double totalHospitalDataset = 0;//医疗云总数据集数
        double totalReceiveArchives = 0;//医疗云总接收档案数
        double totalReceiveException = 0;//医疗云总接收质量异常数
        double totalReceiveDataset = 0;//医疗云总接收据集数
        double totalResourceFailure = 0;//医疗云总资源化失败数
        double totalResourceSuccess = 0;//医疗云总资源化成功数
        double totalResourceException = 0;//医疗云总资源化异常数
        //初始化 数据集数据
        Session session = currentSession();
        Query query = session.createSQLQuery("SELECT org_code,COUNT(*) c from dq_dataset_warning WHERE type = 1 GROUP BY org_code");
        List<Object[]> datasetList = query.list();
        Map<String, Object> datasetMap = new HashedMap();
        datasetList.forEach(one -> {
            String orgCode = one[0].toString();
            Integer num = Integer.valueOf(one[1].toString());
            datasetMap.put(orgCode, num);
        });
        //统计医疗云平台数据集总数
        query = session.createSQLQuery("SELECT count(DISTINCT code) c from dq_dataset_warning WHERE type = 1 and org_code != '" + defaultOrgCode + "'");
        List<Object> tmpList = query.list();
        totalHospitalDataset = Integer.valueOf(tmpList.get(0).toString());
        //获取医院数据
        Query query1 = session.createSQLQuery("SELECT org_code,full_name from organizations where org_type = 'Hospital' ");
        List<Object[]> orgList = query1.list();
        Map<String, Object> orgMap = new HashedMap();
        orgList.forEach(one -> {
            String orgCode = one[0].toString();
            String name = one[1].toString();
            orgMap.put(orgCode, name);
        });
        //统计医院数据
        String sql1 = "SELECT sum(HSI07_01_001) s1,sum(HSI07_01_002) s2,sum(HSI07_01_004) s3,sum(HSI07_01_012) s4,org_code FROM qc/daily_report where event_date>= '" + start + "T00:00:00' AND event_date <='" + end + "T23:59:59' group by org_code";
        ResultSet resultSet1 = elasticSearchUtil.findBySql(sql1);
        Map<String, Map<String, Object>> dataMap = new HashMap<>();
        try {
            while (resultSet1.next()) {
                Map<String, Object> dataMap1 = null;
                String orgCode = resultSet1.getString("org_code");
                double HSI07_01_001 = resultSet1.getDouble("s1");//总诊疗=门急诊+出院+体检(入院的不算和js开头的暂时没用)
                double HSI07_01_002 = resultSet1.getDouble("s2");//门急诊
                double HSI07_01_004 = resultSet1.getDouble("s3");//体检
                double HSI07_01_012 = resultSet1.getDouble("s4");//出院
                if (dataMap.containsKey(orgCode)) {
                    dataMap1 = dataMap.get(orgCode);
                } else {
                    dataMap1 = initDataMap(datasetMap, orgMap.get(orgCode), orgCode);
                }
                if (eventType == null) {
                    dataMap1.put("hospitalArchives", HSI07_01_001);
                    totalHospitalAcrhives += HSI07_01_001;
                } else if (eventType == 1) {
                    dataMap1.put("hospitalArchives", HSI07_01_012);
                    totalHospitalAcrhives += HSI07_01_012;
                } else if (eventType == 2) {
                    dataMap1.put("hospitalArchives", HSI07_01_004);
                    totalHospitalAcrhives += HSI07_01_004;
                } else if (eventType == 0) {
                    dataMap1.put("hospitalArchives", HSI07_01_002);
                    totalHospitalAcrhives += HSI07_01_002;
                }
                dataMap.put(orgCode, dataMap1);
            }
        } catch (Exception e) {
            if (!"Error".equals(e.getMessage())) {
                e.printStackTrace();
            }
        }
        //统计有数据的医院code
        String sqlOrg = "SELECT org_code FROM json_archives/info where receive_date>= '" + start + " 00:00:00' AND receive_date<='" + end + " 23:59:59' group by org_code ";
        try {
            ResultSet resultSetOrg = elasticSearchUtil.findBySql(sqlOrg);
            while (resultSetOrg.next()) {
                String orgCode = resultSetOrg.getString("org_code");
                if (!dataMap.containsKey(orgCode)) {
                    dataMap.put(orgCode, initDataMap(datasetMap, orgMap.get(orgCode), orgCode));
                }
            }
        } catch (Exception e) {
            if (!"Error".equals(e.getMessage())) {
                e.printStackTrace();
            }
        }
        int totalSize = 0;
        for (Map<String, Object> map : dataMap.values()) {
            String orgCode = map.get("orgCode").toString();
            //统计接收数据
            String sql2 = "SELECT count(*) c FROM json_archives/info where receive_date>= '" + start + " 00:00:00' AND receive_date<='" + end + " 23:59:59' AND pack_type=1 and org_code='" + orgCode + "' ";
            if (eventType != null) {
                sql2 += " and event_type = " + eventType;
            }
            try {
                ResultSet resultSet2 = elasticSearchUtil.findBySql(sql2);
                resultSet2.next();
                double total = resultSet2.getDouble("c");//接收 档案数
                map.put("receiveArchives", total);
                totalReceiveArchives += total;
            } catch (Exception e) {
                if (!"Error".equals(e.getMessage())) {
                    e.printStackTrace();
                }
            }
            //接收 质量异常
            String sql3 = "SELECT count(*) c FROM json_archives_qc/qc_metadata_info where receive_date>= '" + start + " 00:00:00' AND receive_date<='" + end + " 23:59:59' and qc_step=1 and org_code='" + orgCode + "' ";
            if (eventType != null) {
                sql3 += " and event_type = " + eventType;
            }
            try {
                ResultSet resultSet3 = elasticSearchUtil.findBySql(sql3);
                resultSet3.next();
                double total = resultSet3.getDouble("c");//接收 质量异常
                map.put("receiveException", total);
                totalReceiveException += total;
            } catch (Exception e) {
                if (!"Error".equals(e.getMessage())) {
                    e.printStackTrace();
                }
            }
            //接收 数据集
            StringBuffer sql = new StringBuffer();
            sql.append("SELECT COUNT(DISTINCT dataset) as count from json_archives_qc/qc_dataset_detail ");
            sql.append("WHERE receive_date>='" + start + " 00:00:00' AND receive_date<='" + end + " 23:59:59'");
            sql.append(" AND org_code='" + orgCode + "'");
            if (eventType != null) {
                sql.append(" and event_type = " + eventType);
            }
            ResultSet resultset = elasticSearchUtil.findBySql(sql.toString());
            resultset.next();
            int size = new Double(resultset.getObject("count").toString()).intValue();
            totalSize += size;
            map.put("receiveDataset", size);//数据集个数
//            String sql4 = "SELECT details FROM json_archives_qc/qc_dataset_info where receive_date>= '"+start+" 00:00:00' AND receive_date<='" +  end + " 23:59:59' and qc_step=1 and org_code='"+orgCode+"' ";
//            if(eventType!=null){
//                sql4 += " and event_type = "+eventType ;
//            }
//            try {
//                ResultSet resultSet4 = elasticSearchUtil.findBySql(sql4);
//                Set set = new HashSet();
//
//                while (resultSet4.next()) {
//                    String details = resultSet4.getString("details");//接收 数据集
//                    if("492190575".equals(orgCode)){
//                        System.out.println(details);
//                    }
//                    JSONArray jsonArray = JSONArray.parseArray(details);
//                    for(int i=0;i<jsonArray.size();i++){
//                        JSONObject tmp = jsonArray.getJSONObject(i);
//                        set.addAll(tmp.keySet());
//                        totalSet.addAll(tmp.keySet());
//                    }
//                }
//                map.put("receiveDataset",set.size());//数据集个数
//            }catch (Exception e){
//                if(!"Error".equals(e.getMessage())){
//                    e.printStackTrace();
//                }
//            }
            //资源化数据
            String sql52 = "SELECT count(*) c FROM json_archives/info where receive_date>= '" + start + " 00:00:00' AND receive_date<='" + end + " 23:59:59' AND pack_type=1 and archive_status<>3 and org_code='" + orgCode + "' ";
            String sql53 = "SELECT count(*) c FROM json_archives/info where receive_date>= '" + start + " 00:00:00' AND receive_date<='" + end + " 23:59:59' AND pack_type=1 and archive_status=3 and org_code='" + orgCode + "' ";
            try {
                ResultSet resultSet52 = elasticSearchUtil.findBySql(sql52);
                ResultSet resultSet53 = elasticSearchUtil.findBySql(sql53);
                resultSet52.next();
                resultSet53.next();
                double total2 = resultSet52.getDouble("c");//资源化 解析成功和失败 // 2失败,3成功
                double total3 = resultSet53.getDouble("c");//资源化 解析成功和失败 // 2失败,3成功
                map.put("resourceFailure", total2);//失败
                totalResourceFailure += total2;
                map.put("resourceSuccess", total3);//成功
                totalResourceSuccess += total3;
            } catch (Exception e) {
                if (!"Error".equals(e.getMessage())) {
                    e.printStackTrace();
                }
            }
            String sql6 = "SELECT count(*) c FROM json_archives_qc/qc_metadata_info where receive_date>= '" + start + " 00:00:00' AND receive_date<='" + end + " 23:59:59' AND qc_step=2 and org_code='" + orgCode + "'";
            try {
                ResultSet resultSet6 = elasticSearchUtil.findBySql(sql6);
                resultSet6.next();
                double total = resultSet6.getDouble("c");//资源化 解析异常
                map.put("resourceException", total);
                totalResourceException += total;
            } catch (Exception e) {
                if (!"Error".equals(e.getMessage())) {
                    e.printStackTrace();
                }
            }
        }
        //totalReceiveDataset = totalSet.size();
        //新增医疗云平台数据
        Map<String, Object> totalMap = new HashedMap();
        totalMap.put("orgCode", cloud);
        totalMap.put("orgName", cloudName);
        totalMap.put("hospitalArchives", totalHospitalAcrhives);//医院档案数
        totalMap.put("hospitalDataset", totalHospitalDataset);//医院数据集
        totalMap.put("receiveArchives", totalReceiveArchives);//接收档案数
        totalMap.put("receiveDataset", totalSize); //接收数据集
        totalMap.put("receiveException", totalReceiveException);//接收异常
        totalMap.put("resourceSuccess", totalResourceSuccess);//资源化-成功
        totalMap.put("resourceFailure", totalResourceFailure);//资源化-失败
        totalMap.put("resourceException", totalResourceException);//资源化-异常
        re.add(totalMap);
        for (Map<String, Object> map : dataMap.values()) {
            re.add(map);
        }
        return re;
    }
    /**
     * 统计机构档案包数量
     *
     * @param orgInfoList    机构编码、名称,例:[{orgCode:'xx',orgName:'xx'},{...},...],医疗云汇总记录的 orgCode 用 all 表示。
     * @param archiveStatus  解析状态,可为空
     * @param eventDateStart 就诊时间(起始),格式 yyyy-MM-dd
     * @param eventDateEnd   就诊时间(截止),格式 yyyy-MM-dd
     * @return
     */
    public Long packetCount(List<Map<String, String>> orgInfoList, String archiveStatus, String eventDateStart, String eventDateEnd) {
        Long count = 0L;
        if (orgInfoList.size() == 1 && cloud.equals(orgInfoList.get(0).get("orgCode"))) {
            String sql1 = "SELECT count(*) c FROM json_archives/info where receive_date>= '" + eventDateStart + "' AND receive_date<='" + eventDateEnd + "' AND pack_type=1 ";
            if (!StringUtils.isEmpty(archiveStatus)) {
                sql1 += " and archive_status =" + archiveStatus;
            }
            try {
                ResultSet resultSet1 = elasticSearchUtil.findBySql(sql1);
                while (resultSet1.next()) {
                    Double total = resultSet1.getDouble("c");//接收 档案数
                    count += total.longValue();
                }
            } catch (Exception e) {
                e.getMessage();
            }
        } else {
            for (Map<String, String> orgInfo : orgInfoList) {
                //统计接收数据
                String sql2 = "SELECT count(*) c FROM json_archives/info where receive_date>= '" + eventDateStart + "' AND receive_date<='" + eventDateEnd + "' AND pack_type=1 and org_code='" + orgInfo.get("orgCode") + "'";
                if (!StringUtils.isEmpty(archiveStatus)) {
                    sql2 += " and archive_status =" + archiveStatus;
                }
                try {
                    ResultSet resultSet2 = elasticSearchUtil.findBySql(sql2);
                    while (resultSet2.next()) {
                        Double total = resultSet2.getDouble("c");//接收 档案数
                        count += total.longValue();
                    }
                } catch (Exception e) {
                    e.getMessage();
                }
            }
        }
        return count;
    }
    /**
     * 机构的档案包接收报告数据
     *
     * @param orgInfoList    机构编码、名称,例:[{orgCode:'xx',orgName:'xx'},{...},...],医疗云汇总记录的 orgCode 用 all 表示。
     * @param eventDateStart 就诊时间(起始),格式 yyyy-MM-dd
     * @param eventDateEnd   就诊时间(截止),格式 yyyy-MM-dd
     * @return
     * @throws Exception
     */
    public List<Map<String, Object>> orgPackReportData(List<Map<String, String>> orgInfoList, String eventDateStart, String eventDateEnd) throws Exception {
        List<Map<String, Object>> reportDataList = new ArrayList<>();
        StringBuilder filter = new StringBuilder("event_date[" + eventDateStart + " TO " + eventDateEnd + "]");
        for (Map<String, String> orgInfo : orgInfoList) {
            String orgCode = orgInfo.get("orgCode");
            String orgName = orgInfo.get("orgName");
            Map<String, Object> reportData = new HashMap<>();
            reportData.put("orgName", orgName);
            reportData.put("orgCode", orgCode);
            // 医院上报数
            String reportedNumSql1 = "SELECT SUM(HSI07_01_001) total,SUM(HSI07_01_002) outpatientNum, SUM(HSI07_01_004) healthExaminationNum, SUM(HSI07_01_012) hospitalDischargeNum FROM qc/daily_report " +
                    "WHERE event_date BETWEEN '" + eventDateStart.replace(" ", "T") + "' AND '" + eventDateEnd.replace(" ", "T") + "'";
            if (!cloud.equals(orgInfo.get("orgCode"))) {
                reportedNumSql1 += " AND org_code='" + orgCode + "'";
            }
            String reportedNumFields1 = "total,outpatientNum,healthExaminationNum,hospitalDischargeNum";
            List<Map<String, Object>> reportedNumList1 = elasticSearchUtil.findBySql(Arrays.asList(reportedNumFields1.split(",")), reportedNumSql1);
            reportData.put("reportedNumList1", reportedNumList1);
            // TODO 采集情况
            String reportedNumSql2 = "SELECT count(*) total FROM json_archives/info where receive_date BETWEEN '" + eventDateStart + "' AND '" + eventDateEnd + "' and pack_type=1 ";
            if (!cloud.equals(orgInfo.get("orgCode"))) {
                reportedNumSql2 += " AND org_code='" + orgCode + "'";
            }
            reportedNumSql2 += " group by event_type";
            String reportedNumFields2 = "event_type,total";
            List<Map<String, Object>> reportedNumList2 = elasticSearchUtil.findBySql(Arrays.asList(reportedNumFields2.split(",")), reportedNumSql2);
            Map<String, Object> collectionMap = new HashedMap();
            collectionMap.put("outpatientNum", 0);
            collectionMap.put("healthExaminationNum", 0);
            collectionMap.put("hospitalDischargeNum", 0);
            double totalCollection = 0;
            for (Map<String, Object> map : reportedNumList2) {
                double total = Double.valueOf(map.get("total").toString());
                String eventType = map.get("event_type").toString();
                totalCollection += total;
                if ("1".equals(eventType)) {
                    collectionMap.put("hospitalDischargeNum", total);
                } else if ("2".equals(eventType)) {
                    collectionMap.put("healthExaminationNum", total);
                } else if ("0".equals(eventType)) {
                    collectionMap.put("outpatientNum", total);
                }
            }
            collectionMap.put("total", totalCollection);
            reportData.put("collectionMap", collectionMap);
            // TODO 采集内容
            String reportedNumSql3 = "SELECT count(*) total FROM json_archives/info where receive_date BETWEEN '" + eventDateStart + "' AND '" + eventDateEnd + "' and pack_type=1 ";
            if (!cloud.equals(orgInfo.get("orgCode"))) {
                reportedNumSql3 += " AND org_code='" + orgCode + "'";
            }
            reportedNumSql3 += " group by event_type,date_histogram(field='receive_date','interval'='1d',format='yyyy-MM-dd',alias=receiveDate)";
            String reportedNumFields3 = "event_type,receiveDate,total";
            List<Map<String, Object>> reportedNumList3 = elasticSearchUtil.findBySql(Arrays.asList(reportedNumFields3.split(",")), reportedNumSql3);
            Map<String, Map<String, Object>> collectionMap2 = new HashedMap();
            reportedNumList3.forEach(map -> {
                String receiveDate = map.get("receiveDate").toString();
                String eventType = map.get("event_type").toString();
                double total = Double.valueOf(map.get("total").toString());
                Map<String, Object> tmpMap = null;
                if (collectionMap2.containsKey(receiveDate)) {
                    tmpMap = collectionMap2.get(receiveDate);
                } else {
                    tmpMap = new HashedMap();
                    tmpMap.put("outpatientNum", 0);
                    tmpMap.put("healthExaminationNum", 0);
                    tmpMap.put("hospitalDischargeNum", 0);
                    tmpMap.put("receiveDate", receiveDate);
                }
                if ("1".equals(eventType)) {
                    tmpMap.put("hospitalDischargeNum", total);
                } else if ("2".equals(eventType)) {
                    tmpMap.put("healthExaminationNum", total);
                } else if ("0".equals(eventType)) {
                    tmpMap.put("outpatientNum", total);
                }
                collectionMap2.put(receiveDate, tmpMap);
            });
            List<Map<String, Object>> reportedList3 = new ArrayList<>();
            for (Map<String, Object> map : collectionMap2.values()) {
                double total = Double.valueOf(map.get("outpatientNum").toString()) +
                        Double.valueOf(map.get("healthExaminationNum").toString()) +
                        Double.valueOf(map.get("hospitalDischargeNum").toString());
                if (total > 0) {
                    map.put("total", total);
                    reportedList3.add(map);
                }
            }
            reportData.put("reportedNumList3", reportedList3);
            // TODO 解析情况
            String reportedNumSql4 = "SELECT count(*) total FROM json_archives/info where receive_date BETWEEN '" + eventDateStart + "' AND '" + eventDateEnd + "' and pack_type=1 ";
            if (!cloud.equals(orgInfo.get("orgCode"))) {
                reportedNumSql4 += " AND org_code='" + orgCode + "'";
            }
            reportedNumSql4 += " group by archive_status";
            String reportedNumFields4 = "archive_status,total";
            List<Map<String, Object>> reportedNumList4 = elasticSearchUtil.findBySql(Arrays.asList(reportedNumFields4.split(",")), reportedNumSql4);
            Map<String, Object> archiveMap = new HashedMap();
            archiveMap.put("archive_status0", 0);//archive_status 资源化解析状态 0未解析 1正在解析 2解析失败 3解析完成
            archiveMap.put("archive_status1", 0);
            archiveMap.put("archive_status2", 0);
            archiveMap.put("archive_status3", 0);
            for (Map<String, Object> map : reportedNumList4) {
                double total = Double.valueOf(map.get("total").toString());
                String archiveStatus = map.get("archive_status").toString();
                if ("3".equals(archiveStatus)) {
                    archiveMap.put("archive_status3", total);
                } else if ("2".equals(archiveStatus)) {
                    archiveMap.put("archive_status2", total);
                } else if ("0".equals(archiveStatus)) {
                    archiveMap.put("archive_status0", total);
                } else if ("1".equals(archiveStatus)) {
                    archiveMap.put("archive_status1", total);
                }
            }
            reportData.put("archiveMap", archiveMap);
            // TODO 数据集总量
            List<Map<String, Object>> res = new ArrayList<>();
            StringBuilder stringBuilder1 = new StringBuilder();
            stringBuilder1.append("qc_step=1;");
            stringBuilder1.append("receive_date>=" + eventDateStart + ";");
            stringBuilder1.append("receive_date<" + eventDateEnd + ";");
            if (!cloud.equals(orgInfo.get("orgCode"))) {
                stringBuilder1.append("org_code=" + orgCode);
            }
            List<Map<String, Object>> list = elasticSearchUtil.list("json_archives_qc", "qc_dataset_info", stringBuilder1.toString());
            for (Map<String, Object> map : list) {
                List<Map<String, Object>> dataSets = objectMapper.readValue(map.get("details").toString(), List.class);
                for (Map<String, Object> dataSet : dataSets) {
                    for (Map.Entry<String, Object> entry : dataSet.entrySet()) {
                        packQcReportService.getDataSets(map.get("version") + "", entry.getKey(), (int) entry.getValue(), res);
                    }
                }
            }
            reportData.put("reportedNumList5", res);
            // TODO 解析失败分析
            StringBuilder stringBuilder2 = new StringBuilder();
            stringBuilder2.append("archive_status=2;");
            stringBuilder2.append("pack_type=1;");
            stringBuilder2.append("receive_date>=" + eventDateStart + ";");
            stringBuilder2.append("receive_date<" + eventDateEnd + ";");
            if (!cloud.equals(orgInfo.get("orgCode"))) {
                stringBuilder2.append("org_code=" + orgCode);
            }
            TransportClient transportClient = elasticSearchPool.getClient();
            List<Map<String, Object>> resultList = new ArrayList<>();
            SearchRequestBuilder builder = transportClient.prepareSearch("json_archives");
            builder.setTypes("info");
            builder.setSearchType(SearchType.DFS_QUERY_THEN_FETCH);
            builder.setQuery(elasticSearchUtil.getQueryBuilder(stringBuilder2.toString()));
            AggregationBuilder terms = AggregationBuilders.terms("error_type").field("error_type");
            builder.addAggregation(terms);
            builder.setSize(0);
            builder.setExplain(true);
            SearchResponse response = builder.get();
            LongTerms longTerms = response.getAggregations().get("error_type");
            for (Terms.Bucket item : longTerms.getBuckets()) {
                Map<String, Object> temp = new HashMap<>();
                temp.put("error_type", item.getKeyAsString());
                temp.put("error_count", item.getDocCount());
                resultList.add(temp);
            }
            reportData.put("reportedNumList6", resultList);
            reportDataList.add(reportData);
        }
        return reportDataList;
    }
    /**
     * 初始化datamap 数据
     *
     * @param datasetMap
     * @param orgCode
     * @return
     */
    private Map initDataMap(Map<String, Object> datasetMap, Object orgName, String orgCode) {
        Map dataMap = new HashedMap();
        dataMap.put("orgCode", orgCode);//机构code
        dataMap.put("orgName", orgName);//机构名称
        dataMap.put("hospitalArchives", 0);//医院档案数
        dataMap.put("hospitalDataset", 0);//医院数据集
        dataMap.put("receiveArchives", 0);//接收档案数
        dataMap.put("receiveDataset", 0); //接收数据集
        dataMap.put("receiveException", 0);//接收异常
        dataMap.put("resourceSuccess", 0);//资源化-成功
        dataMap.put("resourceFailure", 0);//资源化-失败
        dataMap.put("resourceException", 0);//资源化-异常
        if (datasetMap.containsKey(orgCode)) {
            dataMap.put("hospitalDataset", datasetMap.get(orgCode));
        } else {
            dataMap.put("hospitalDataset", datasetMap.get(defaultOrgCode));
        }
        return dataMap;
    }
    /**
     * 初始化ratemap 数据
     *
     * @param warningMap
     * @param orgCode
     * @return
     */
    private Map initRateMap(Map<String, DqPaltformReceiveWarning> warningMap, Object orgName, String orgCode) {
        Map dataMap = new HashedMap();
        dataMap.put("orgCode", orgCode);//机构code
        dataMap.put("orgName", orgName);//机构名称
        dataMap.put("outpatientInTime", 0);//门诊及时数
        dataMap.put("hospitalInTime", 0);//住院及时数
        dataMap.put("peInTime", 0);//体检及时数
        dataMap.put("outpatientIntegrity", 0);//门诊完整数
        dataMap.put("hospitalIntegrity", 0);//住院完整数
        dataMap.put("peIntegrity", 0);//体检完整数
        dataMap.put("totalVisit", 0);//总就诊数
        dataMap.put("totalOutpatient", 0);//总门诊数
        dataMap.put("totalPe", 0);//总体检数
        dataMap.put("totalHospital", 0);//总住院数
        return dataMap;
    }
    /**
     * 判断是否及时
     *
     * @param warningMap
     * @param orgCode
     * @param eventType  就诊类型
     * @param delay      延时时间(天)
     * @return
     */
    private boolean isInTime(Map<String, DqPaltformReceiveWarning> warningMap, String orgCode, String eventType, long delay) {
        if (StringUtils.isBlank(eventType) || "null".equals(eventType)) {
            //就诊类型为空 直接返回false
            return false;
        }
        DqPaltformReceiveWarning warning = null;
        if (warningMap.containsKey(orgCode)) {
            warning = warningMap.get(orgCode);
        } else {
            warning = warningMap.get(defaultOrgCode);
        }
        boolean re = false;
        switch (eventType) {
            case "0":
                //0门诊
                re = warning.getOutpatientInTime() >= delay;
                break;
            case "1":
                //1住院
                re = warning.getHospitalInTime() >= delay;
                break;
            case "2":
                //2体检
                re = warning.getPeInTime() >= delay;
                break;
            default:
                break;
        }
        return re;
    }
    /**
     * 完整采集的档案包数量集合
     *
     * @param pageIndex
     * @param pageSize
     * @param orgCode
     * @param eventDateStart
     * @param eventDateEnd
     * @param eventType
     * @return
     */
    public Map<String, Object> receivedPacketNumList(Integer pageIndex, Integer pageSize, String orgCode, String eventDateStart, String eventDateEnd, Integer eventType) {
        List<Map<String, Object>> resultList = new ArrayList<>();
        Map<String, Object> re = new HashedMap();
        try {
            String filters = "";
            if (cloud.equals(orgCode)) {
                filters = " event_date BETWEEN '" + eventDateStart + " 00:00:00' AND '" + eventDateEnd + " 23:59:59' and pack_type=1 ";
            } else {
                filters = "org_code='" + orgCode
                        + "' AND event_date BETWEEN '" + eventDateStart + " 00:00:00' AND '" + eventDateEnd + " 23:59:59' and pack_type=1 ";
            }
            // 及时率场合
            StringBuilder sql = new StringBuilder("SELECT COUNT(DISTINCT event_no) packetCount FROM json_archives/info WHERE ");
            sql.append(filters);
            sql.append(" GROUP BY date_histogram(field='receive_date','interval'='1d',format='yyyy-MM-dd',alias=receiveDate)");
            List<String> fields = new ArrayList<>(2);
            fields.add("packetCount");
            fields.add("receiveDate");
            List<Map<String, Object>> searchList = elasticSearchUtil.findBySql(fields, sql.toString());
            int count = searchList.size();
            // 截取当前页数据
            int startLine = (pageIndex - 1) * pageSize;
            int endLine = startLine + pageSize - 1;
            for (int i = startLine; i <= endLine; i++) {
                if (i < count) {
                    resultList.add(searchList.get(i));
                } else {
                    break;
                }
            }
            re.put("count", count);
        } catch (Exception e) {
            e.getMessage();
            re.put("count", 0);
        }
        re.put("list", resultList);
        return re;
    }
    /**
     * 及时率和完整率
     *
     * @param start 就诊时间
     * @param end
     */
    public List<Map<String, Object>> inTimeAndIntegrityRate(String start, String end) throws Exception {
        List<Map<String, Object>> re = new ArrayList<>();
        String dateStr = DateUtil.toString(new Date());
        if (StringUtils.isBlank(start)) {
            start = dateStr;
        }
        if (StringUtils.isBlank(end)) {
            end = dateStr;
        }
        //初始化 及时率数据
        Session session = currentSession();
        List<DqPaltformReceiveWarning> warningList = dqPaltformReceiveWarningDao.findAll();
        Map<String, DqPaltformReceiveWarning> warningMap = new HashedMap(warningList.size());
        warningList.forEach(one -> {
            String orgCode = one.getOrgCode();
            warningMap.put(orgCode, one);
        });
        //获取医院数据
        Query query1 = session.createSQLQuery("SELECT org_code,full_name from organizations where org_type = 'Hospital' ");
        List<Object[]> orgList = query1.list();
        Map<String, Object> orgMap = new HashedMap(orgList.size());
        orgList.forEach(one -> {
            String orgCode = one[0].toString();
            String name = one[1].toString();
            orgMap.put(orgCode, name);
        });
        double totalVisitNum = 0;//总就诊数
        double totalOutpatientNum = 0;//总门诊数
        double totalPeNum = 0;//总体检数
        double totalHospitalNum = 0;//总住院数
        double totalOutpatientInTime = 0;//总门诊及时数
        double totalPeInTime = 0;//总体检及时数
        double totalHospitalInTime = 0;//总住院及时数
        double totalOutpatientIntegrity = 0;//总门诊完整数
        double totalHospitalIntegrity = 0;//总住院完整数
        double totalPeIntegrity = 0;//总体检完整数
        //统计总数
        String sqlsum = "SELECT sum(HSI07_01_001) s1,sum(HSI07_01_002) s2,sum(HSI07_01_004) s3,sum(HSI07_01_012) s4,org_code FROM qc/daily_report where event_date>= '" + start + "T00:00:00' AND event_date <='" + end + "T23:59:59' group by org_code";
        Map<String, Map<String, Object>> dataMap = new HashMap<>();
        try {
            ResultSet resultSet1 = elasticSearchUtil.findBySql(sqlsum);
            while (resultSet1.next()) {
                Map<String, Object> dataMap1 = null;
                String orgCode = resultSet1.getString("org_code");
                double HSI07_01_001 = resultSet1.getDouble("s1");//总诊疗=门急诊+出院+体检(入院的不算和js开头的暂时没用)
                double HSI07_01_002 = resultSet1.getDouble("s2");//门急诊
                double HSI07_01_004 = resultSet1.getDouble("s3");//体检
                double HSI07_01_012 = resultSet1.getDouble("s4");//出院
                if (dataMap.containsKey(orgCode)) {
                    dataMap1 = dataMap.get(orgCode);
                } else {
                    dataMap1 = initRateMap(warningMap, orgMap.get(orgCode), orgCode);
                }
                dataMap1.put("totalVisit",HSI07_01_001);
                dataMap1.put("totalOutpatient",HSI07_01_002);
                dataMap1.put("totalPe",HSI07_01_004);
                dataMap1.put("totalHospital",HSI07_01_012);
                totalVisitNum += HSI07_01_001;
                totalOutpatientNum += HSI07_01_002;
                totalPeNum += HSI07_01_004;
                totalHospitalNum += HSI07_01_012;
                dataMap.put(orgCode,dataMap1);
            }
        } catch (Exception e) {
            if (!"Error".equals(e.getMessage())) {
                e.printStackTrace();
            }
        }
        //统计有数据的医院code
        String sqlOrg = "SELECT org_code FROM json_archives/info where event_date>= '" + start + " 00:00:00' AND event_date<='" + end + " 23:59:59' group by org_code ";
        try {
            ResultSet resultSetOrg = elasticSearchUtil.findBySql(sqlOrg);
            while (resultSetOrg.next()) {
                String orgCode = resultSetOrg.getString("org_code");
                Map<String, Object> map = dataMap.get(orgCode);
                if(map == null){
                    dataMap.put(orgCode,initRateMap(warningMap,orgMap.get(orgCode),orgCode));
                }
            }
        } catch (Exception e) {
            if (!"Error".equals(e.getMessage())) {
                e.printStackTrace();
            }
        }
        //按医院code查找,直接group by查找结果有问题
        for (Map<String, Object> map : dataMap.values()) {
            String orgCode = map.get("orgCode").toString();
            //完整数
            getPatientCount(start, end, orgCode, map);
            //及时率
            DqPaltformReceiveWarning warning = null;
            if (warningMap.containsKey(orgCode)) {
                warning = warningMap.get(orgCode);
            } else {
                warning = warningMap.get(defaultOrgCode);
            }
            try {
                long starttime = System.currentTimeMillis();
                String sql0 = "SELECT COUNT(DISTINCT event_no) FROM json_archives WHERE event_type=2 AND pack_type=1 AND org_code='" + orgCode + "' AND event_date " +
                        "BETWEEN '" + start + " 00:00:00' AND '" + end + " 23:59:59' and delay <=" + warning.getPeInTime();
                String sql1 = "SELECT COUNT(DISTINCT event_no) FROM json_archives WHERE event_type=1 AND pack_type=1 AND org_code='" + orgCode + "' AND event_date " +
                        "BETWEEN '" + start + " 00:00:00' AND '" + end + " 23:59:59' and delay <=" + warning.getHospitalInTime();
                String sql2 = "SELECT COUNT(DISTINCT event_no) FROM json_archives WHERE event_type=0 AND pack_type=1 AND org_code='" + orgCode + "' AND event_date " +
                        "BETWEEN '" + start + " 00:00:00' AND '" + end + " 23:59:59' and delay <=" + warning.getOutpatientInTime();
                ResultSet resultSet0 = elasticSearchUtil.findBySql(sql0);
                ResultSet resultSet1 = elasticSearchUtil.findBySql(sql1);
                ResultSet resultSet2 = elasticSearchUtil.findBySql(sql2);
                resultSet0.next();
                resultSet1.next();
                resultSet2.next();
                double outpatientInTime = new Double(resultSet2.getObject("COUNT(DISTINCT event_no)").toString());//门诊及时数
                double hospitalInTime = new Double(resultSet1.getObject("COUNT(DISTINCT event_no)").toString());//住院及时数
                double peInTime = new Double(resultSet0.getObject("COUNT(DISTINCT event_no)").toString());//体检及时数
                totalPeInTime += peInTime;
                totalHospitalInTime += hospitalInTime;
                totalOutpatientInTime += outpatientInTime;
                map.put("outpatientInTime", outpatientInTime);//门诊及时数
                map.put("hospitalInTime", hospitalInTime);//住院及时数
                map.put("peInTime", peInTime);//体检及时数
                map.put("visitIntegrity", outpatientInTime + hospitalInTime + peInTime);//就诊
                logger.info("平台就诊及时人数 去重复:" + (System.currentTimeMillis() - starttime) + "ms");
            } catch (Exception e) {
                if (!"Error".equals(e.getMessage())) {
                    e.printStackTrace();
                }
            }
        }
        //计算总数
        Map<String, Object> totalMap = new HashedMap();
        totalMap.put("orgCode", cloud);//机构code
        totalMap.put("orgName", cloudName);//机构名称
        totalMap.put("outpatientInTime", totalOutpatientInTime);//门诊及时数
        totalMap.put("hospitalInTime", totalHospitalInTime);//住院及时数
        totalMap.put("peInTime", totalPeInTime);//体检及时数
        getPatientCount(start, end, null, totalMap);
        totalOutpatientIntegrity = Double.valueOf(totalMap.get("outpatientIntegrity").toString());//门诊完整数
        totalHospitalIntegrity = Double.valueOf(totalMap.get("hospitalIntegrity").toString());//住院完整数
        totalPeIntegrity = Double.valueOf(totalMap.get("peIntegrity").toString());//体检完整数
        double totalVisitIntegrity = Double.valueOf(totalMap.get("visitIntegrity").toString());//就诊完整数
        totalMap.put("totalVisit", totalVisitNum);//总就诊数
        totalMap.put("totalOutpatient", totalOutpatientNum);//总门诊数
        totalMap.put("totalPe", totalPeNum);//总体检数
        totalMap.put("totalHospital", totalHospitalNum);//总住院数
        double totalVisitIntime = totalOutpatientInTime + totalHospitalInTime + totalPeInTime;
        totalMap.put("visitIntime", totalVisitIntime);
        totalMap.put("outpatientInTimeRate", calRate(totalOutpatientInTime, totalOutpatientNum));
        totalMap.put("outpatientInTimeRate1", totalOutpatientInTime + "/" + totalOutpatientNum);
        totalMap.put("hospitalInTimeRate", calRate(totalHospitalInTime, totalHospitalNum));
        totalMap.put("hospitalInTimeRate1", totalHospitalInTime + "/" + totalHospitalNum);
        totalMap.put("peInTimeRate", calRate(totalPeInTime, totalPeNum));
        totalMap.put("peInTimeRate1", totalPeInTime + "/" + totalPeNum);
        totalMap.put("visitIntimeRate", calRate(totalVisitIntime, totalVisitNum));
        totalMap.put("visitIntimeRate1", totalVisitIntime + "/" + totalVisitNum);
        totalMap.put("outpatientIntegrityRate", calRate(totalOutpatientIntegrity, totalOutpatientNum));
        totalMap.put("outpatientIntegrityRate1", totalOutpatientIntegrity + "/" + totalOutpatientNum);
        totalMap.put("hospitalIntegrityRate", calRate(totalHospitalIntegrity, totalHospitalNum));
        totalMap.put("hospitalIntegrityRate1", totalHospitalIntegrity + "/" + totalHospitalNum);
        totalMap.put("peIntegrityRate", calRate(totalPeIntegrity, totalPeNum));
        totalMap.put("peIntegrityRate1", totalPeIntegrity + "/" + totalPeNum);
        totalMap.put("visitIntegrityRate", calRate(totalVisitIntegrity, totalVisitNum));
        totalMap.put("visitIntegrityRate1", totalVisitIntegrity + "/" + totalVisitNum);
        re.add(totalMap);
        //计算及时率及完整率
        for (Map<String, Object> map:dataMap.values()){
            double outpatientInTime = Double.parseDouble(map.get("outpatientInTime").toString());//门诊及时数
            double hospitalInTime = Double.parseDouble(map.get("hospitalInTime").toString());//住院及时数
            double peInTime = Double.parseDouble(map.get("peInTime").toString());//体检及时数
            double outpatientIntegrity = Double.parseDouble(map.get("outpatientIntegrity").toString());//门诊完整数
            double hospitalIntegrity = Double.parseDouble(map.get("hospitalIntegrity").toString());//住院完整数
            double peIntegrity = Double.parseDouble(map.get("peIntegrity").toString());//体检完整数
            double totalVisit = Double.parseDouble(map.get("totalVisit").toString());//总就诊数
            double totalOutpatient = Double.parseDouble(map.get("totalOutpatient").toString());//总门诊数
            double totalPe = Double.parseDouble(map.get("totalPe").toString());//总体检数
            double totalHospital = Double.parseDouble(map.get("totalHospital").toString());//总住院数
            double visitIntime = outpatientInTime + hospitalInTime + peInTime;
            double visitIntegrity = outpatientIntegrity + hospitalIntegrity + peIntegrity;
            map.put("visitIntime", visitIntime);
            map.put("visitIntegrity", visitIntegrity);
            map.put("outpatientInTimeRate", calRate(outpatientInTime, totalOutpatient));
            map.put("outpatientInTimeRate1", outpatientInTime + "/" + totalOutpatient);
            map.put("hospitalInTimeRate", calRate(hospitalInTime, totalHospital));
            map.put("hospitalInTimeRate1", hospitalInTime + "/" + totalHospital);
            map.put("peInTimeRate", calRate(peInTime, totalPe));
            map.put("peInTimeRate1", peInTime + "/" + totalPe);
            map.put("visitIntimeRate", calRate(visitIntime, totalVisit));
            map.put("visitIntimeRate1", visitIntime + "/" + totalVisit);
            map.put("outpatientIntegrityRate", calRate(outpatientIntegrity, totalOutpatient));
            map.put("outpatientIntegrityRate1", outpatientIntegrity + "/" + totalOutpatient);
            map.put("hospitalIntegrityRate", calRate(hospitalIntegrity, totalHospital));
            map.put("hospitalIntegrityRate1", hospitalIntegrity + "/" + totalHospital);
            map.put("peIntegrityRate", calRate(peIntegrity, totalPe));
            map.put("peIntegrityRate1", peIntegrity + "/" + totalPe);
            map.put("visitIntegrityRate", calRate(visitIntegrity, totalVisit));
            map.put("visitIntegrityRate1", visitIntegrity + "/" + totalVisit);
            re.add(map);
        }
        return re;
    }
    /**
     * 平台就诊人数 去重复(完整人数)
     *
     * @param start
     * @param end
     * @param orgCode
     * @return
     */
    public void getPatientCount(String start, String end, String orgCode, Map<String, Object> map) throws Exception {
        try {
            long starttime = System.currentTimeMillis();
            String sql0 = "";
            String sql1 = "";
            String sql2 = "";
            String sql3 = "";
            if (StringUtils.isNotEmpty(orgCode)) {
                sql0 = "SELECT COUNT(DISTINCT event_no) FROM json_archives WHERE event_type=2 AND pack_type=1 AND org_code='" + orgCode + "' AND event_date BETWEEN" +
                        " '" + start + " 00:00:00' AND '" + end + " 23:59:59'";
                sql1 = "SELECT COUNT(DISTINCT event_no) FROM json_archives WHERE event_type=1 AND pack_type=1 AND org_code='" + orgCode + "' AND event_date BETWEEN" +
                        " '" + start + " 00:00:00' AND '" + end + " 23:59:59'";
                sql2 = "SELECT COUNT(DISTINCT event_no) FROM json_archives WHERE event_type=0 AND pack_type=1 AND org_code='" + orgCode + "' AND event_date BETWEEN " +
                        "'" + start + " 00:00:00' AND '" + end + " 23:59:59'";
                sql3 = "SELECT COUNT(DISTINCT event_no) FROM json_archives WHERE pack_type=1 AND org_code='" + orgCode + "' AND event_date BETWEEN " +
                        "'" + start + " 00:00:00' AND '" + end + " 23:59:59'";
            } else {
                sql0 = "SELECT COUNT(DISTINCT event_no) FROM json_archives WHERE event_type=2 AND pack_type=1 AND event_date " +
                        "BETWEEN '" + start + " 00:00:00' AND '" + end + " 23:59:59'";
                sql1 = "SELECT COUNT(DISTINCT event_no) FROM json_archives WHERE event_type=1 AND pack_type=1 AND event_date " +
                        "BETWEEN '" + start + " 00:00:00' AND '" + end + " 23:59:59'";
                sql2 = "SELECT COUNT(DISTINCT event_no) FROM json_archives WHERE event_type=0 AND pack_type=1 AND event_date " +
                        "BETWEEN '" + start + " 00:00:00' AND '" + end + " 23:59:59'";
                sql3 = "SELECT COUNT(DISTINCT event_no) FROM json_archives WHERE pack_type=1 AND event_date " +
                        "BETWEEN '" + start + " 00:00:00' AND '" + end + " 23:59:59'";
            }
            ResultSet resultSet0 = elasticSearchUtil.findBySql(sql0);
            ResultSet resultSet1 = elasticSearchUtil.findBySql(sql1);
            ResultSet resultSet2 = elasticSearchUtil.findBySql(sql2);
            ResultSet resultSet3 = elasticSearchUtil.findBySql(sql3);
            resultSet0.next();
            resultSet1.next();
            resultSet2.next();
            resultSet3.next();
            map.put("peIntegrity", new Double(resultSet0.getObject("COUNT(DISTINCT event_no)").toString()).intValue());//体检
            map.put("hospitalIntegrity", new Double(resultSet1.getObject("COUNT(DISTINCT event_no)").toString()).intValue());//住院
            map.put("outpatientIntegrity", new Double(resultSet2.getObject("COUNT(DISTINCT event_no)").toString()).intValue());//门诊
            map.put("visitIntegrity", new Double(resultSet3.getObject("COUNT(DISTINCT event_no)").toString()).intValue());//就诊
            logger.info("平台就诊人数 去重复:" + (System.currentTimeMillis() - starttime) + "ms");
        } catch (Exception e) {
            if (!"Error".equals(e.getMessage())) {
                e.printStackTrace();
            }
        }
    }
    /**
     * 计算及时率和完整率
     *
     * @param molecular   分子
     * @param denominator 分母
     * @return
     */
    public String calRate(double molecular, double denominator) {
        if (molecular == 0) {
            return "0.00%";
        } else if (denominator == 0) {
            return "100.00%";
        }
        DecimalFormat decimalFormat = new DecimalFormat("0.00%");
        return decimalFormat.format(molecular / denominator);
    }
    /**
     * 获取省平台上传 -- 档案统计数据
     *
     * @param startDate
     * @param endDate
     * @param orgCode
     * @return
     * @throws Exception
     */
    public List<Map<String, Object>> getUploadSuccessList(String startDate, String endDate, String orgCode) throws Exception {
        String dateStr = DateUtil.toString(new Date());
        if (StringUtils.isBlank(startDate)) {
            startDate = dateStr;
        }
        if (StringUtils.isBlank(endDate)) {
            endDate = dateStr;
        }
        StringBuilder stringBuilder = new StringBuilder();
        stringBuilder.append("upload_status:1;");
        stringBuilder.append("analyze_date>=" + startDate + " 00:00:00;");
        stringBuilder.append("analyze_date<=" + endDate + " 23:59:59;");
        if (StringUtils.isNotEmpty(orgCode) && !"null".equals(orgCode) && !cloud.equals(orgCode)) {
            stringBuilder.append("org_code=" + orgCode);
        }
        TransportClient transportClient = elasticSearchPool.getClient();
        List<Map<String, Object>> resultList = new ArrayList<>();
        SearchRequestBuilder builder = transportClient.prepareSearch("upload");
        builder.setTypes("record");
        builder.setSearchType(SearchType.DFS_QUERY_THEN_FETCH);
        builder.setQuery(elasticSearchUtil.getQueryBuilder(stringBuilder.toString()));
        DateHistogramBuilder dateHistogramBuilder = AggregationBuilders.dateHistogram("date_count");
        dateHistogramBuilder.field("analyze_date");
        dateHistogramBuilder.interval(DateHistogramInterval.DAY);
        dateHistogramBuilder.format("yyyy-MM-dd");
        dateHistogramBuilder.minDocCount(0);
        AggregationBuilder terms = AggregationBuilders.terms("event_type").field("event_type");
        dateHistogramBuilder.subAggregation(terms);
        builder.addAggregation(dateHistogramBuilder);
        builder.setSize(0);
        builder.setExplain(true);
        SearchResponse response = builder.get();
        Histogram histogram = response.getAggregations().get("date_count");
        double inpatient_total = 0.0;
        double oupatient_total = 0.0;
        double physical_total = 0.0;
        for (Histogram.Bucket item : histogram.getBuckets()) {
            Map<String, Object> temp = new HashMap<>();
            if (item.getDocCount() > 0 && !"".equals(item.getKeyAsString())) {
                temp.put("date", item.getKeyAsString());
                StringTerms stringTerms = item.getAggregations().get("event_type");
                double inpatient = 0.0;
                double oupatient = 0.0;
                double physical = 0.0;
                for (Terms.Bucket item1 : stringTerms.getBuckets()) {
                    if ("0".equals(item1.getKeyAsString())) {
                        oupatient = item1.getDocCount();
                        oupatient_total += item1.getDocCount();
                    } else if ("1".equals(item1.getKeyAsString())) {
                        inpatient = item1.getDocCount();
                        inpatient_total += item1.getDocCount();
                    } else if ("2".equals(item1.getKeyAsString())) {
                        physical = item1.getDocCount();
                        physical_total += item1.getDocCount();
                    }
                }
                temp.put("inpatient", inpatient);
                temp.put("oupatient", oupatient);
                temp.put("physical", physical);
                temp.put("total", inpatient + oupatient + physical);
                resultList.add(temp);
            }
        }
        Map<String, Object> total = new HashMap<>();
        total.put("date", "总计");
        total.put("inpatient", inpatient_total);
        total.put("oupatient", oupatient_total);
        total.put("physical", physical_total);
        total.put("total", inpatient_total + oupatient_total + physical_total);
        resultList.add(0, total);
        return resultList;
    }
    /**
     * 获取省平台上传 -- 档案统计数据
     *
     * @param startDate
     * @param endDate
     * @param orgCode
     * @return
     * @throws Exception
     */
    public Envelop getUploadSuccessListPage(String startDate, String endDate, String orgCode,int size,int page) throws Exception {
        List<Map<String, Object>> list = getUploadSuccessList(startDate, endDate, orgCode);
        Envelop pageEnvelop = getPageEnvelop(page, size, list);
        return pageEnvelop;
    }
    private Envelop getPageEnvelop(int page, int size, List totalList){
        Envelop envelop = new Envelop();
        //设置假分页
        int totalCount = totalList.size();
        envelop.setTotalCount(totalCount);
        int totalPage = totalCount%size==0 ? totalCount%size:totalCount%size+1;
        envelop.setTotalPage(totalPage);
        envelop.setCurrPage(page);
        envelop.setPageSize(size);
        List<Map<String, Object>> pagedList = getPageList(page, size, totalList);
        envelop.setSuccessFlg(true);
        envelop.setDetailModelList(pagedList);
        return envelop;
    }
    private List getPageList(int pageNum,int pageSize,List data) {
        int fromIndex = (pageNum - 1) * pageSize;
        if (fromIndex >= data.size()) {
            return Collections.emptyList();
        }
        int toIndex = pageNum * pageSize;
        if (toIndex >= data.size()) {
            toIndex = data.size();
        }
        return data.subList(fromIndex, toIndex);
    }
    /**
     * 获取省平台上传 -- 数据集统计数据
     *
     * @param startDate
     * @param endDate
     * @param orgCode
     * @return
     * @throws Exception
     */
    public List<Map<String, Object>> getUploadDataSetList(String startDate, String endDate, String orgCode) throws Exception {
        String dateStr = DateUtil.toString(new Date());
        if (StringUtils.isBlank(startDate)) {
            startDate = dateStr;
        }
        if (StringUtils.isBlank(endDate)) {
            endDate = dateStr;
        }
        StringBuffer sql = new StringBuffer();
        sql.append("SELECT SUM(count) as count ,SUM(row) as row, dataset_name, dataset ");
        sql.append("FROM upload/qc_dataset_detail");
        sql.append(" WHERE analyze_date>='" + startDate + " 00:00:00' and analyze_date<='" + endDate + " 23:59:59'");
        if (StringUtils.isNotEmpty(orgCode) && !"null".equals(orgCode) && !cloud.equals(orgCode)) {
            sql.append(" and org_code='" + orgCode + "'");
        }
        sql.append("GROUP BY dataset_name,dataset");
        List<String> field = new ArrayList<>();
        field.add("count");
        field.add("row");
        field.add("dataset_name");
        field.add("dataset");
        List<Map<String, Object>> list = elasticSearchUtil.findBySql(field, sql.toString());
        Map<String, Object> totalMap = new HashMap<>();
        totalMap.put("dataset", "总计");
        totalMap.put("dataset_name", "-");
        double rowTotal = 0;
        double countTotal = 0;
        for (Map<String, Object> map : list) {
            map.put("name", map.get("dataset_name"));
            rowTotal += Double.valueOf(map.get("row").toString());
            countTotal += Double.valueOf(map.get("count").toString());
        }
        totalMap.put("row", rowTotal);
        totalMap.put("count", countTotal);
        list.add(0, totalMap);
        return list;
    }
    /**
     * 获取省平台上传 -- 数据集统计数据
     *
     * @param startDate
     * @param endDate
     * @param orgCode
     * @return
     * @throws Exception
     */
    public Envelop getUploadDataSetListPage(String startDate, String endDate, String orgCode,int size,int page) throws Exception {
        List<Map<String, Object>> uploadDataSetList = getUploadDataSetList(startDate, endDate, orgCode);
        return getPageEnvelop(page, size, uploadDataSetList);
    }
    /**
     * 获取省平台上传 -- 失败档案统计数据
     * 上传状态:1 上传成功 0 门诊数据集缺败,-1 住院数据集缺失,-2 体检数据集缺失,-3 事件类型有误
     *
     * @param startDate
     * @param endDate
     * @param orgCode
     * @return
     * @throws Exception
     */
    public List<Map<String, Object>> getUploadErrorList(String startDate, String endDate, String orgCode) throws Exception {
        String dateStr = DateUtil.toString(new Date());
        if (StringUtils.isBlank(startDate)) {
            startDate = dateStr;
        }
        if (StringUtils.isBlank(endDate)) {
            endDate = dateStr;
        }
        List<Map<String, Object>> resultList = new ArrayList<>();
        StringBuilder stringBuilder = new StringBuilder("SELECT count(*) c,upload_status FROM upload/record");
        stringBuilder.append(" where analyze_date>= '")
                .append(startDate)
                .append(" 00:00:00' AND analyze_date<='")
                .append(endDate)
                .append(" 23:59:59' ")
                .append(" and upload_status<>1 ");
        if (StringUtils.isNotEmpty(orgCode) && !"null".equals(orgCode) && !cloud.equals(orgCode)) {
            stringBuilder.append(" and org_code='").append(orgCode).append("'");
        }
        stringBuilder.append(" group by upload_status");
        double totalCount = 0;
        try {
            ResultSet resultSet1 = elasticSearchUtil.findBySql(stringBuilder.toString());
            while (resultSet1.next()) {
                Map<String, Object> statusMap = new HashMap<>();
                String status = resultSet1.getString("upload_status");
                double count = resultSet1.getDouble("c");
                //0 门诊数据集缺败,-1 住院数据集缺失,-2 体检数据集缺失,-3 事件类型有误
                if ("0".equals(status)) {
                    statusMap.put("name", "门诊数据集缺失");
                } else if ("-1".equals(status)) {
                    statusMap.put("name", "住院数据集缺失");
                } else if ("-2".equals(status)) {
                    statusMap.put("name", "体检数据集缺失");
                } else if ("-3".equals(status)) {
                    statusMap.put("name", "事件类型有误");
                }
                totalCount += count;
                statusMap.put("code", status);
                statusMap.put("count", count);
                resultList.add(statusMap);
            }
        } catch (Exception e) {
            e.getMessage();
        }
        Map<String, Object> totalMap = new HashMap<>();
        totalMap.put("name", "总计");
        totalMap.put("code", "-");
        totalMap.put("count", totalCount);
        resultList.add(0, totalMap);
        return resultList;
    }
    public Envelop getUploadErrorListPage(String startDate, String endDate, String orgCode, int size, int page) throws Exception {
        List<Map<String, Object>> uploadErrorList = getUploadErrorList(startDate, endDate, orgCode);
        return getPageEnvelop(page,size,uploadErrorList);
    }
    public double getDoubleValue(Double object) {
        if (object == null) {
            return 0;
        } else {
            return object;
        }
    }
    public Long getLongValue(Long object) {
        if (object == null) {
            return 0l;
        } else {
            return object;
        }
    }
}

+ 53 - 0
src/main/java/com/yihu/ehr/analyze/service/dataQuality/DqDatasetWarningService.java

@ -0,0 +1,53 @@
package com.yihu.ehr.analyze.service.dataQuality;
import com.yihu.ehr.analyze.dao.DqDatasetWarningDao;
import com.yihu.ehr.entity.quality.DqDatasetWarning;
import com.yihu.ehr.query.BaseJpaService;
import com.yihu.ehr.redis.client.RedisClient;
import org.apache.commons.lang.StringUtils;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.stereotype.Service;
import java.util.ArrayList;
import java.util.List;
/**
 * 数据质量-数据集预警值
 * @author yeshijie on 2018/5/28.
 */
@Service
public class DqDatasetWarningService extends BaseJpaService<DqDatasetWarning, DqDatasetWarningDao> {
    @Value("${quality.version}")
    private String defaultQualityVersion;
    private String DataSetTable = "std_data_set_";
    @Autowired
    private RedisClient redisClient;
    @Autowired
    private DqDatasetWarningDao dqDatasetWarningDao;
    public List<DqDatasetWarning> findByOrgCodeAndType(String orgCode,String type){
        return dqDatasetWarningDao.findByOrgCodeAndType(orgCode,type);
    }
    /**
     * 导入excel
     * @param codeList
     */
    public List<DqDatasetWarning> importDatasetExcel(List<String> codeList){
        List<DqDatasetWarning> warningList = new ArrayList<>(codeList.size());
        codeList.forEach(code->{
            String key = DataSetTable+defaultQualityVersion+":"+code+":name";
            String value = redisClient.get(key);
            if(StringUtils.isNotBlank(value)){
                DqDatasetWarning warning = new DqDatasetWarning();
                warning.setCode(code);
                warning.setName(value);
                warningList.add(warning);
            }
        });
        return warningList;
    }
}

+ 98 - 0
src/main/java/com/yihu/ehr/analyze/service/dataQuality/DqPaltformReceiveWarningService.java

@ -0,0 +1,98 @@
package com.yihu.ehr.analyze.service.dataQuality;
import com.yihu.ehr.analyze.dao.DqDatasetWarningDao;
import com.yihu.ehr.analyze.dao.DqPaltformReceiveWarningDao;
import com.yihu.ehr.entity.quality.DqPaltformReceiveWarning;
import com.yihu.ehr.query.BaseJpaService;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Transactional;
import java.util.Date;
/**
 * 平台接收预警值
 * @author yeshijie on 2018/5/28.
 */
@Service
@Transactional
public class DqPaltformReceiveWarningService extends BaseJpaService<DqPaltformReceiveWarning, DqPaltformReceiveWarningDao> {
    @Autowired
    private DqPaltformReceiveWarningDao dqPaltformReceiveWarningDao;
    @Autowired
    private DqDatasetWarningDao dqDatasetWarningDao;
    public DqPaltformReceiveWarning findById(Long id) {
        return dqPaltformReceiveWarningDao.findOne(id);
    }
    public DqPaltformReceiveWarning findByOrgCode(String orgCode){
        return dqPaltformReceiveWarningDao.findByOrgCode(orgCode);
    }
    /**
     * 新增平台接收预警
     * @param warning
     * @return
     */
    public DqPaltformReceiveWarning paltformReceiveWarningAdd(DqPaltformReceiveWarning warning){
        warning.setCreateTime(new Date());
        warning.setUpdateTime(new Date());
        save(warning);
        if(warning.getDatasetWarningList()!=null&&warning.getDatasetWarningList().size()>0){
            warning.getDatasetWarningList().forEach(dataset->{
                dataset.setOrgCode(warning.getOrgCode());
                dataset.setType("1");
            });
            dqDatasetWarningDao.save(warning.getDatasetWarningList());
        }
        return warning;
    }
    /**
     * 修改平台接收预警
     * @param warning
     * @return
     */
    public DqPaltformReceiveWarning paltformReceiveWarningUpd(DqPaltformReceiveWarning warning){
        DqPaltformReceiveWarning oldWarning = findById(warning.getId());
        oldWarning.setErrorNum(warning.getErrorNum());
        oldWarning.setArchiveNum(warning.getArchiveNum());
        oldWarning.setHospitalInTime(warning.getHospitalInTime());
        oldWarning.setHospitalInTimeRate(warning.getHospitalInTimeRate());
        oldWarning.setOutpatientInTime(warning.getOutpatientInTime());
        oldWarning.setOutpatientInTimeRate(warning.getOutpatientInTimeRate());
        oldWarning.setPeInTime(warning.getPeInTime());
        oldWarning.setPeInTimeRate(warning.getPeInTimeRate());
        oldWarning.setUpdateTime(new Date());
        oldWarning.setUpdateUserId(warning.getUpdateUserId());
        oldWarning.setUpdateUserName(warning.getUpdateUserName());
        dqDatasetWarningDao.deleteByOrgCodeAndType(oldWarning.getOrgCode(),"1");
        if(warning.getDatasetWarningList()!=null&&warning.getDatasetWarningList().size()>0){
            warning.getDatasetWarningList().forEach(dataset->{
                dataset.setOrgCode(warning.getOrgCode());
                dataset.setType("1");
            });
            dqDatasetWarningDao.save(warning.getDatasetWarningList());
        }
        save(oldWarning);
        return oldWarning;
    }
    /**
     * 删除
     * @param id
     */
    public void deleteWarning(Long id){
        DqPaltformReceiveWarning oldWarning = findById(id);
        if(oldWarning!=null){
            dqDatasetWarningDao.deleteByOrgCodeAndType(oldWarning.getOrgCode(),"1");
            delete(oldWarning);
        }
    }
}

+ 90 - 0
src/main/java/com/yihu/ehr/analyze/service/dataQuality/DqPaltformResourceWarningService.java

@ -0,0 +1,90 @@
package com.yihu.ehr.analyze.service.dataQuality;
import com.yihu.ehr.analyze.dao.DqPaltformResourceWarningDao;
import com.yihu.ehr.analyze.service.scheduler.WarningSchedulerService;
import com.yihu.ehr.entity.quality.DqPaltformResourceWarning;
import com.yihu.ehr.query.BaseJpaService;
import org.apache.commons.lang.StringUtils;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
import java.util.Date;
/**
 * 数据质量-平台资源化预警值
 * @author yeshijie on 2018/5/28.
 */
@Service
public class DqPaltformResourceWarningService extends BaseJpaService<DqPaltformResourceWarning, DqPaltformResourceWarningDao> {
    @Autowired
    private DqPaltformResourceWarningDao dqPaltformResourceWarningDao;
    @Autowired
    private WarningSchedulerService warningSchedulerService;
    public DqPaltformResourceWarning findById(Long id) {
        return dqPaltformResourceWarningDao.findOne(id);
    }
    public DqPaltformResourceWarning findByOrgCode(String orgCode){
        return dqPaltformResourceWarningDao.findByOrgCode(orgCode);
    }
    /**
     * 新增平台资源化预警
     * @param warning
     * @return
     */
    public DqPaltformResourceWarning paltformResourceWarningAdd(DqPaltformResourceWarning warning){
        save(warning);
        return warning;
    }
    /**
     * 修改平台资源化预警
     * @param warning
     * @return
     */
    public DqPaltformResourceWarning paltformResourceWarningUpd(DqPaltformResourceWarning warning){
        DqPaltformResourceWarning oldWarning = findById(warning.getId());
        oldWarning.setErrorNum(warning.getErrorNum());
        oldWarning.setFailureNum(warning.getFailureNum());
        oldWarning.setUnparsingNum(warning.getUnparsingNum());
        oldWarning.setUpdateTime(new Date());
        oldWarning.setUpdateUserId(warning.getUpdateUserId());
        oldWarning.setUpdateUserName(warning.getUpdateUserName());
        boolean flag = false;
        if(!oldWarning.getUnparsingPeriod().equals(warning.getUnparsingPeriod())){
            oldWarning.setUnparsingPeriod(warning.getUnparsingPeriod());
            flag = true;
        }
        save(oldWarning);
        if(flag){
            String cronExp = getCronExp(warning.getUnparsingPeriod());
            warningSchedulerService.addJob(cronExp);
        }
        return oldWarning;
    }
    /**
     * 返回 cron
     * @param unparsingPeriod
     * @return
     */
    public String getCronExp(String unparsingPeriod){
        if(StringUtils.isBlank(unparsingPeriod)){
            DqPaltformResourceWarning warning = dqPaltformResourceWarningDao.findByFirst();
            unparsingPeriod = warning.getUnparsingPeriod();
        }
        String cron = "0 m h * * ?";// h 小时,m 分钟
        String[] str = unparsingPeriod.split(":");
        if(str.length>1){
            return cron.replace("m",Integer.valueOf(str[1])+"").replace("h",Integer.valueOf(str[0])+"");
        }else {
            return cron.replace("m","0").replace("h",Integer.valueOf(str[0])+"");
        }
    }
}

+ 90 - 0
src/main/java/com/yihu/ehr/analyze/service/dataQuality/DqPaltformUploadWarningService.java

@ -0,0 +1,90 @@
package com.yihu.ehr.analyze.service.dataQuality;
import com.yihu.ehr.analyze.dao.DqDatasetWarningDao;
import com.yihu.ehr.analyze.dao.DqPaltformUploadWarningDao;
import com.yihu.ehr.entity.quality.DqPaltformUploadWarning;
import com.yihu.ehr.query.BaseJpaService;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Transactional;
import java.util.Date;
/**
 * 数据质量-平台上传预警值
 * @author yeshijie on 2018/5/28.
 */
@Service
@Transactional
public class DqPaltformUploadWarningService extends BaseJpaService<DqPaltformUploadWarning, DqPaltformUploadWarningDao> {
    @Autowired
    private DqPaltformUploadWarningDao dqPaltformUploadWarningDao;
    @Autowired
    private DqDatasetWarningDao dqDatasetWarningDao;
    public DqPaltformUploadWarning findById(Long id) {
        return dqPaltformUploadWarningDao.findOne(id);
    }
    public DqPaltformUploadWarning findByOrgCode(String orgCode){
        return dqPaltformUploadWarningDao.findByOrgCode(orgCode);
    }
    /**
     * 新增平台上传预警
     * @param warning
     * @return
     */
    public DqPaltformUploadWarning paltformUploadWarningAdd(DqPaltformUploadWarning warning){
        warning.setCreateTime(new Date());
        save(warning);
        if(warning.getDatasetWarningList()!=null&&warning.getDatasetWarningList().size()>0){
            warning.getDatasetWarningList().forEach(dataset->{
                dataset.setOrgCode(warning.getOrgCode());
                dataset.setType("2");
            });
            dqDatasetWarningDao.save(warning.getDatasetWarningList());
        }
        return warning;
    }
    /**
     * 修改平台上传预警
     * @param warning
     * @return
     */
    public DqPaltformUploadWarning paltformUploadWarningUpd(DqPaltformUploadWarning warning){
        DqPaltformUploadWarning oldWarning = findById(warning.getId());
        oldWarning.setErrorNum(warning.getErrorNum());
        oldWarning.setArchiveNum(warning.getArchiveNum());
        oldWarning.setUpdateTime(new Date());
        oldWarning.setUpdateUserId(warning.getUpdateUserId());
        oldWarning.setUpdateUserName(warning.getUpdateUserName());
        dqDatasetWarningDao.deleteByOrgCodeAndType(oldWarning.getOrgCode(),"2");
        if(warning.getDatasetWarningList()!=null&&warning.getDatasetWarningList().size()>0){
            warning.getDatasetWarningList().forEach(dataset->{
                dataset.setOrgCode(warning.getOrgCode());
                dataset.setType("2");
            });
            dqDatasetWarningDao.save(warning.getDatasetWarningList());
        }
        save(oldWarning);
        return oldWarning;
    }
    /**
     * 删除
     * @param id
     */
    public void deleteWarning(Long id){
        DqPaltformUploadWarning oldWarning = findById(id);
        if(oldWarning!=null){
            dqDatasetWarningDao.deleteByOrgCodeAndType(oldWarning.getOrgCode(),"2");
            delete(oldWarning);
        }
    }
}

+ 66 - 0
src/main/java/com/yihu/ehr/analyze/service/dataQuality/WarningProblemService.java

@ -0,0 +1,66 @@
package com.yihu.ehr.analyze.service.dataQuality;
import com.yihu.ehr.elasticsearch.ElasticSearchUtil;
import com.yihu.ehr.entity.quality.DqDatasetWarning;
import com.yihu.ehr.query.BaseJpaService;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.stereotype.Service;
import java.util.*;
/**
 * Created by progr1mmer on 2018/6/12.
 */
@Service
public class WarningProblemService extends BaseJpaService {
    @Value("${quality.version}")
    private String defaultQualityVersion;
    @Autowired
    private ElasticSearchUtil elasticSearchUtil;
    @Autowired
    private DqDatasetWarningService dqDatasetWarningService;
    public List<Map<String, Object>> receiveDataset (String orgCode, String date) throws Exception {
        StringBuilder filters = new StringBuilder();
        filters.append("org_code=").append(orgCode).append(";")
                .append("receive_date=").append(date).append(" 00:00:00");
        List<Map<String, Object>> qcResult = elasticSearchUtil.list("json_archives_qc", "qc_dataset_detail", filters.toString());
        Set<String> versions = new HashSet<>();
        versions.add(defaultQualityVersion);
        Map<String, String> upDataset = new HashMap<>(); //上传数据集
        Map<String, String> unUpDataset = new HashMap<>(); //未上传数据集
        qcResult.forEach(item -> upDataset.put((String) item.get("dataset"), (String) item.get("dataset_name")));
        List<DqDatasetWarning> dqDatasetWarnings = dqDatasetWarningService.findByOrgCodeAndType(orgCode, "1");
        dqDatasetWarnings.forEach(item2 -> {
            if (!upDataset.containsKey(item2.getCode())) {
                unUpDataset.put(item2.getCode(), item2.getName());
            }
        });
        List<Map<String, Object>> result = new ArrayList<>();
        int index = 1;
        for (String key : unUpDataset.keySet()) {
            Map<String, Object> data = new HashMap<>();
            data.put("sn", index);
            data.put("version", defaultQualityVersion);
            data.put("code", key);
            data.put("name", unUpDataset.get(key));
            data.put("status", "未上传");
            result.add(data);
            index ++;
        }
        for (String key : upDataset.keySet()) {
            Map<String, Object> data = new HashMap<>();
            data.put("sn", index);
            data.put("version", defaultQualityVersion);
            data.put("code", key);
            data.put("name", upDataset.get(key));
            data.put("status", "已上传");
            result.add(data);
            index ++;
        }
        return result;
    }
}

+ 824 - 0
src/main/java/com/yihu/ehr/analyze/service/dataQuality/WarningQuestionService.java

@ -0,0 +1,824 @@
package com.yihu.ehr.analyze.service.dataQuality;
import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONArray;
import com.yihu.ehr.analyze.dao.*;
import com.yihu.ehr.elasticsearch.ElasticSearchUtil;
import com.yihu.ehr.entity.quality.*;
import com.yihu.ehr.profile.qualilty.DqWarningRecordType;
import com.yihu.ehr.profile.qualilty.DqWarningRecordWarningType;
import com.yihu.ehr.query.BaseJpaService;
import com.yihu.ehr.util.datetime.DateUtil;
import org.apache.commons.collections.map.HashedMap;
import org.apache.commons.lang.StringUtils;
import org.hibernate.Query;
import org.hibernate.Session;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.stereotype.Service;
import java.sql.ResultSet;
import java.util.*;
import static com.yihu.ehr.profile.qualilty.DqWarningRecordWarningType.errorNum;
/**
 * @author yeshijie on 2018/6/11.
 */
@Service
public class WarningQuestionService extends BaseJpaService {
    private final static Logger logger = LoggerFactory.getLogger(WarningQuestionService.class);
    @Autowired
    private WarningSettingService warningSettingService;
    @Autowired
    private DqWarningRecordDao dqWarningRecordDao;
    @Autowired
    private DqDatasetWarningDao dqDatasetWarningDao;
    @Autowired
    private DqPaltformReceiveWarningDao dqPaltformReceiveWarningDao;
    @Autowired
    private DqPaltformResourceWarningDao dqPaltformResourceWarningDao;
    @Autowired
    private DqPaltformUploadWarningDao dqPaltformUploadWarningDao;
    @Autowired
    private DataQualityStatisticsService dataQualityStatisticsService;
    @Autowired
    private ElasticSearchUtil elasticSearchUtil;
    @Value("${quality.orgCode}")
    private String defaultOrgCode;
    @Value("${quality.cloud}")
    private String cloud;
    /**
     * 预警问题生成分析
     */
    public void analyze(String date){
        String dateStr = null;
        if(StringUtils.isBlank(date)){
            dateStr = DateUtil.formatDate(new Date(),DateUtil.DEFAULT_DATE_YMD_FORMAT);
        }else {
            dateStr = date;
        }
        receive(dateStr);
        resource(dateStr);
        upload(dateStr);
    }
    /**
     * 接收预警
     */
    public void receive(String dateStr){
        Session session = currentSession();
        //0.获取医院数据
        Query query1 = session.createSQLQuery("SELECT org_code,full_name from organizations where org_type = 'Hospital'");
        List<Object[]> orgList = query1.list();
        Map<String, String> orgMap = new HashedMap(orgList.size());
        orgList.forEach(one->{
            String orgCode = one[0].toString();
            String name = one[1].toString();
            orgMap.put(orgCode,name);
        });
        //1.查找预警设置
        List<DqPaltformReceiveWarning> warningList = dqPaltformReceiveWarningDao.findAll();
        Map<String, DqPaltformReceiveWarning> warningMap = new HashedMap(warningList.size());
        warningList.forEach(one->{
            String orgCode = one.getOrgCode();
            warningMap.put(orgCode,one);
        });
        DqPaltformReceiveWarning defaultWarning = warningMap.get(defaultOrgCode);
        List<DqDatasetWarning> datasetList = dqDatasetWarningDao.findByType("1");
        Map<String, List<DqDatasetWarning>> datasetMap = new HashedMap(datasetList.size());
        datasetList.forEach(one->{
            String orgCode = one.getOrgCode();
            if(datasetMap.containsKey(orgCode)){
                datasetMap.get(orgCode).add(one);
            }else {
                List<DqDatasetWarning> list = new ArrayList<DqDatasetWarning>();
                list.add(one);
                datasetMap.put(orgCode,list);
            }
        });
        for (Map.Entry<String, DqPaltformReceiveWarning> entry : warningMap.entrySet()) {
            String orgCode = entry.getKey();
            entry.getValue().setDatasetWarningList(datasetMap.get(orgCode));
        }
        //2.统计实际值
        Map<String, Map<String, Object>> dataMap = new HashMap<>(warningList.size());
        //及时率
        inTimeWarning(warningMap,dataMap);
        //3.预警
        List<DqWarningRecord> list = new ArrayList<>();
        Date recordTime = DateUtil.formatCharDateYMD(dateStr);
        String unqualified = "不合格";
        for (String orgCode : warningMap.keySet()) {
            if(defaultOrgCode.equals(orgCode)){
                continue;
            }
            try {
                String orgName = orgMap.get(orgCode);
                String id = DateUtil.getCurrentString(DateUtil.DEFAULT_CHAR_DATE_YMD_FORMAT)+"_"+ dateStr +"_"+orgCode+"_";
                DqPaltformReceiveWarning warning = warningMap.get(orgCode);
                Map<String, Object> hospitalMap = dataMap.get(orgCode);
                //统计接收档案数据
                String sql1 = "SELECT count(*) c FROM json_archives/info where receive_date>= '"+dateStr+" 00:00:00' AND receive_date<='" +  dateStr + " 23:59:59'  AND pack_type=1 and org_code='"+orgCode+"'";
                try {
                    ResultSet resultSet1 = elasticSearchUtil.findBySql(sql1);
                    resultSet1.next();
                    double total = resultSet1.getDouble("c");//接收 档案数
                    hospitalMap.put("receiveArchives",total);
                }catch (Exception e){
                    e.getMessage();
                }
                //统计质量异常
                String sql2 = "SELECT count(*) c FROM json_archives_qc/qc_metadata_info where receive_date>= '"+dateStr+" 00:00:00' AND receive_date<='" +  dateStr + " 23:59:59' and qc_step=1 and org_code='"+orgCode+"'";
                try {
                    ResultSet resultSet2 = elasticSearchUtil.findBySql(sql2);
                    resultSet2.next();
                    double total = resultSet2.getDouble("c");//接收 质量异常
                    hospitalMap.put("receiveException",total);
                }catch (Exception e){
                    e.getMessage();
                }
                //数据集
                String sql3 = "SELECT distinct details FROM json_archives_qc/qc_dataset_info where receive_date>= '"+dateStr+" 00:00:00' AND receive_date<='" +  dateStr + " 23:59:59' and qc_step=1 and org_code='"+orgCode+"' ";
                try {
                    ResultSet resultSet3 = elasticSearchUtil.findBySql(sql3);
                    Map<String, Object> datasetMap1 = new HashMap<>();
                    while (resultSet3.next()) {
                        String details = resultSet3.getString("details");//接收 数据集
                        JSONArray jsonArray = JSON.parseArray(details);
                        for(int i=0;i<jsonArray.size();i++){
                            String dataset = jsonArray.get(i).toString();
                            if(!datasetMap1.containsKey(dataset)){
                                datasetMap1.put(dataset,dataMap);
                            }
                        }
                    }
                    hospitalMap.put("receiveDataset",datasetMap1.size());//数据集个数
                }catch (Exception e){
                    e.getMessage();
                }
                if(hospitalMap==null){
                    //该医院没有上传数据
                    DqWarningRecord record1 = new DqWarningRecord();
                    record1.setOrgCode(orgCode);
                    record1.setType(DqWarningRecordType.receive.getValue());
                    record1.setActualValue("0");
                    record1.setId(id+DqWarningRecordWarningType.archives.getValue());
                    record1.setOrgName(orgName);
                    record1.setWarningType(DqWarningRecordWarningType.archives.getValue());
                    record1.setQuota(DqWarningRecordWarningType.archives.getName());
                    record1.setRecordTime(recordTime);
                    record1.setWarningTime(new Date());
                    record1.setWarningValue(warning.getArchiveNum()+"");
                    record1.setStatus("1");
                    record1.setProblemDescription(DqWarningRecordWarningType.archives.getName()+unqualified);
                    list.add(record1);
                    DqWarningRecord record2 = new DqWarningRecord();
                    record2.setOrgCode(orgCode);
                    record2.setType(DqWarningRecordType.receive.getValue());
                    record2.setActualValue("0");
                    record2.setId(id+DqWarningRecordWarningType.errorNum.getValue());
                    record2.setOrgName(orgName);
                    record2.setWarningType(DqWarningRecordWarningType.errorNum.getValue());
                    record2.setQuota(DqWarningRecordWarningType.errorNum.getName());
                    record2.setRecordTime(recordTime);
                    record2.setWarningTime(new Date());
                    record2.setWarningValue(warning.getErrorNum()+"");
                    record2.setStatus("1");
                    record2.setProblemDescription(errorNum.getName()+unqualified);
                    list.add(record2);
                    DqWarningRecord record3 = new DqWarningRecord();
                    record3.setOrgCode(orgCode);
                    record3.setType(DqWarningRecordType.receive.getValue());
                    record3.setActualValue("0");
                    record3.setId(id+DqWarningRecordWarningType.datasetWarningNum.getValue());
                    record3.setOrgName(orgName);
                    record3.setWarningType(DqWarningRecordWarningType.datasetWarningNum.getValue());
                    record3.setQuota(DqWarningRecordWarningType.datasetWarningNum.getName());
                    record3.setRecordTime(recordTime);
                    record3.setWarningTime(new Date());
                    record3.setWarningValue(warning.getDatasetWarningNum()+"");
                    record3.setStatus("1");
                    record3.setProblemDescription(DqWarningRecordWarningType.datasetWarningNum.getName()+unqualified);
                    list.add(record3);
                    DqWarningRecord record4 = new DqWarningRecord();
                    record4.setOrgCode(orgCode);
                    record4.setType(DqWarningRecordType.receive.getValue());
                    record4.setActualValue("0");
                    record4.setId(id+DqWarningRecordWarningType.outpatientInTimeRate.getValue());
                    record4.setOrgName(orgName);
                    record4.setWarningType(DqWarningRecordWarningType.outpatientInTimeRate.getValue());
                    record4.setQuota(DqWarningRecordWarningType.outpatientInTimeRate.getName());
                    record4.setRecordTime(recordTime);
                    record4.setWarningTime(new Date());
                    record4.setWarningValue(warning.getOutpatientInTimeRate()+"%");
                    record4.setStatus("1");
                    String description4 = "就诊日期为:"+dateStr+"的"+
                            DqWarningRecordWarningType.outpatientInTimeRate.getName()+unqualified;
                    record4.setProblemDescription(description4);
                    list.add(record4);
                    DqWarningRecord record5 = new DqWarningRecord();
                    record5.setOrgCode(orgCode);
                    record5.setType(DqWarningRecordType.receive.getValue());
                    record5.setActualValue("0");
                    record5.setId(id+DqWarningRecordWarningType.hospitalInTimeRate.getValue());
                    record5.setOrgName(orgName);
                    record5.setWarningType(DqWarningRecordWarningType.hospitalInTimeRate.getValue());
                    record5.setQuota(DqWarningRecordWarningType.hospitalInTimeRate.getName());
                    record5.setRecordTime(recordTime);
                    record5.setWarningTime(new Date());
                    record5.setWarningValue(warning.getHospitalInTimeRate()+"%");
                    record5.setStatus("1");
                    String description5 = "就诊日期为:"+dateStr+"的"+
                            DqWarningRecordWarningType.hospitalInTimeRate.getName()+unqualified;
                    record5.setProblemDescription(description5);
                    list.add(record5);
                    DqWarningRecord record6 = new DqWarningRecord();
                    record6.setOrgCode(orgCode);
                    record6.setType(DqWarningRecordType.receive.getValue());
                    record6.setActualValue("0");
                    record6.setId(id+DqWarningRecordWarningType.peInTimeRate.getValue());
                    record6.setOrgName(orgName);
                    record6.setWarningType(DqWarningRecordWarningType.peInTimeRate.getValue());
                    record6.setQuota(DqWarningRecordWarningType.peInTimeRate.getName());
                    record6.setRecordTime(recordTime);
                    record6.setWarningTime(new Date());
                    record6.setWarningValue(warning.getPeInTimeRate()+"%");
                    record6.setStatus("1");
                    String description6 = "就诊日期为:"+dateStr+"的"+
                            DqWarningRecordWarningType.peInTimeRate.getName()+unqualified;
                    record6.setProblemDescription(description6);
                    list.add(record6);
                    continue;
                }
                //1、档案数
                Long archiveNum = Long.valueOf(hospitalMap.get("receiveArchives").toString());
                if(archiveNum<warning.getArchiveNum()){
                    //接收的档案数小于预警值
                    DqWarningRecord record = new DqWarningRecord();
                    record.setOrgCode(orgCode);
                    record.setType(DqWarningRecordType.receive.getValue());
                    record.setActualValue(archiveNum+"");
                    record.setId(id+DqWarningRecordWarningType.archives.getValue());
                    record.setOrgName(orgName);
                    record.setWarningType(DqWarningRecordWarningType.archives.getValue());
                    record.setQuota(DqWarningRecordWarningType.archives.getName());
                    record.setRecordTime(recordTime);
                    record.setWarningTime(new Date());
                    record.setWarningValue(warning.getArchiveNum()+"");
                    record.setStatus("1");
                    record.setProblemDescription(DqWarningRecordWarningType.archives.getName()+unqualified);
                    list.add(record);
                }
                //2、质量异常问题数
                Long errorNum = Long.valueOf(hospitalMap.get("receiveException").toString());
                if(errorNum>warning.getErrorNum()){
                    //接收的质量异常问题数大于预警值
                    DqWarningRecord record = new DqWarningRecord();
                    record.setOrgCode(orgCode);
                    record.setType(DqWarningRecordType.receive.getValue());
                    record.setActualValue(errorNum+"");
                    record.setId(id+DqWarningRecordWarningType.errorNum.getValue());
                    record.setOrgName(orgName);
                    record.setWarningType(DqWarningRecordWarningType.errorNum.getValue());
                    record.setQuota(DqWarningRecordWarningType.errorNum.getName());
                    record.setRecordTime(recordTime);
                    record.setWarningTime(new Date());
                    record.setWarningValue(warning.getErrorNum()+"");
                    record.setStatus("1");
                    record.setProblemDescription(DqWarningRecordWarningType.errorNum.getName()+unqualified);
                    list.add(record);
                }
                //3、数据集
                Long datasetNum = Long.valueOf(hospitalMap.get("receiveDataset").toString());
                if(warning.getDatasetWarningList()!=null&&datasetNum<warning.getDatasetWarningList().size()){
                    //接收的数据集小于预警值
                    DqWarningRecord record = new DqWarningRecord();
                    record.setOrgCode(orgCode);
                    record.setType(DqWarningRecordType.receive.getValue());
                    record.setActualValue(datasetNum+"");
                    record.setId(id+DqWarningRecordWarningType.datasetWarningNum.getValue());
                    record.setOrgName(orgName);
                    record.setWarningType(DqWarningRecordWarningType.datasetWarningNum.getValue());
                    record.setQuota(DqWarningRecordWarningType.datasetWarningNum.getName());
                    record.setRecordTime(recordTime);
                    record.setWarningTime(new Date());
                    record.setWarningValue(warning.getDatasetWarningList().size()+"");
                    record.setStatus("1");
                    record.setProblemDescription(DqWarningRecordWarningType.datasetWarningNum.getName()+unqualified);
                    list.add(record);
                }
                //4、门诊及时率
                try {
                    double totalOutpatient = Double.valueOf(hospitalMap.get("totalOutpatient").toString());
                    double outpatientIntime = Double.valueOf(hospitalMap.get("outpatientIntime").toString());
                    String outpatientRate = dataQualityStatisticsService.calRate(outpatientIntime,totalOutpatient);
                    double outpatientIntimeRate = Double.valueOf(outpatientRate.replace("%",""));
                    double intimeRate = Double.valueOf(warning.getOutpatientInTimeRate().replace("%",""));
                    if(outpatientIntimeRate<intimeRate){
                        //门诊及时率小于预警值
                        DqWarningRecord record = new DqWarningRecord();
                        record.setOrgCode(orgCode);
                        record.setType(DqWarningRecordType.receive.getValue());
                        record.setActualValue(outpatientRate);
                        record.setId(id+DqWarningRecordWarningType.outpatientInTimeRate.getValue());
                        record.setOrgName(orgName);
                        record.setWarningType(DqWarningRecordWarningType.outpatientInTimeRate.getValue());
                        record.setQuota(DqWarningRecordWarningType.outpatientInTimeRate.getName());
                        record.setRecordTime(recordTime);
                        record.setWarningTime(new Date());
                        record.setWarningValue(warning.getOutpatientInTimeRate()+"%");
                        record.setStatus("1");
                        String description = "就诊日期为:"+hospitalMap.get("outpatientReceiveTime")+"的"+
                                DqWarningRecordWarningType.outpatientInTimeRate.getName()+unqualified;
                        record.setProblemDescription(description);
                        list.add(record);
                    }
                }catch (Exception e){
                    e.printStackTrace();
                }
                //5、住院及时率
                try {
                    double totalHospital = Double.valueOf(hospitalMap.get("totalHospital").toString());
                    double hospitalIntime = Double.valueOf(hospitalMap.get("hospitalIntime").toString());
                    String hospitalRate = dataQualityStatisticsService.calRate(hospitalIntime,totalHospital);
                    double hospitalIntimeRate = Double.valueOf(hospitalRate.replace("%",""));
                    double intimeRate = Double.valueOf(warning.getHospitalInTimeRate().replace("%",""));
                    if(hospitalIntimeRate<intimeRate){
                        //住院及时率小于预警值
                        DqWarningRecord record = new DqWarningRecord();
                        record.setOrgCode(orgCode);
                        record.setType(DqWarningRecordType.receive.getValue());
                        record.setActualValue(hospitalRate);
                        record.setId(id+DqWarningRecordWarningType.hospitalInTimeRate.getValue());
                        record.setOrgName(orgName);
                        record.setWarningType(DqWarningRecordWarningType.hospitalInTimeRate.getValue());
                        record.setQuota(DqWarningRecordWarningType.hospitalInTimeRate.getName());
                        record.setRecordTime(recordTime);
                        record.setWarningTime(new Date());
                        record.setWarningValue(warning.getHospitalInTimeRate()+"%");
                        record.setStatus("1");
                        String description = "就诊日期为:"+hospitalMap.get("hospitalReceiveTime")+"的"+
                                DqWarningRecordWarningType.hospitalInTimeRate.getName()+unqualified;
                        record.setProblemDescription(description);
                        list.add(record);
                    }
                }catch (Exception e){
                    e.printStackTrace();
                }
                //6、体检及时率
                try {
                    double totalPe = Double.valueOf(hospitalMap.get("totalPe").toString());
                    double peIntime = Double.valueOf(hospitalMap.get("peIntime").toString());
                    String peRate = dataQualityStatisticsService.calRate(peIntime,totalPe);
                    double peIntimeRate = Double.valueOf(peRate.replace("%",""));
                    double intimeRate = Double.valueOf(warning.getPeInTimeRate().replace("%",""));
                    if(peIntimeRate<intimeRate){
                        //住院及时率小于预警值
                        DqWarningRecord record = new DqWarningRecord();
                        record.setOrgCode(orgCode);
                        record.setType(DqWarningRecordType.receive.getValue());
                        record.setActualValue(peRate);
                        record.setId(id+DqWarningRecordWarningType.peInTimeRate.getValue());
                        record.setOrgName(orgName);
                        record.setWarningType(DqWarningRecordWarningType.peInTimeRate.getValue());
                        record.setQuota(DqWarningRecordWarningType.peInTimeRate.getName());
                        record.setRecordTime(recordTime);
                        record.setWarningTime(new Date());
                        record.setWarningValue(warning.getPeInTimeRate()+"%");
                        record.setStatus("1");
                        String description = "就诊日期为:"+hospitalMap.get("peReceiveTime")+"的"+
                                DqWarningRecordWarningType.peInTimeRate.getName()+unqualified;
                        record.setProblemDescription(description);
                        list.add(record);
                    }
                }catch (Exception e){
                    e.printStackTrace();
                }
            }catch (Exception e){
                e.printStackTrace();
            }
        }
        if(list.size()>0){
            dqWarningRecordDao.save(list);
        }
    }
    /**
     * 及时率预警
     */
    public void inTimeWarning(Map<String, DqPaltformReceiveWarning> warningMap,Map<String, Map<String, Object>> dataMap){
        //统计
        for (String orgCode : warningMap.keySet()) {
            try {
                if(defaultOrgCode.equals(orgCode)){
                    continue;
                }
                Map<String, Object> map = null;
                DqPaltformReceiveWarning warning = warningMap.get(orgCode);
                map = initDataMap(warning);
                String peDateStr = DateUtil.formatDate(DateUtil.addDate(-(warning.getPeInTime()+1), new Date()),DateUtil.DEFAULT_DATE_YMD_FORMAT);
                String hospitalDateStr = DateUtil.formatDate(DateUtil.addDate(-(warning.getHospitalInTime()+1), new Date()),DateUtil.DEFAULT_DATE_YMD_FORMAT);
                String outpatientDateStr = DateUtil.formatDate(DateUtil.addDate(-(warning.getOutpatientInTime()+1), new Date()),DateUtil.DEFAULT_DATE_YMD_FORMAT);
                //体检
                try {
                    String sql1 = "SELECT sum(HSI07_01_004) s3 FROM qc/daily_report where event_date>= '"+peDateStr+"T00:00:00' AND event_date <='" +  peDateStr + "T23:59:59' and org_code = '"+orgCode+"' ";
                    ResultSet resultSet1 = elasticSearchUtil.findBySql(sql1);
                    resultSet1.next();
                    double HSI07_01_004 = resultSet1.getDouble("s3");//体检
                    map.put("totalPe",HSI07_01_004);
                    map.put("peReceiveTime",peDateStr);
                }catch (Exception e){
                    if(!"Error".equals(e.getMessage())){
                        e.printStackTrace();
                    }
                }
                try {
                    String sql2 = "SELECT count(distinct event_no) c FROM json_archives/info where event_date>= '"+peDateStr+" 00:00:00' AND event_date<='" +  peDateStr + " 23:59:59' and org_code = '"+orgCode+"' AND pack_type=1 and event_type=2 and delay <="+warning.getPeInTime();
                    ResultSet resultSet2 = elasticSearchUtil.findBySql(sql2);
                    resultSet2.next();
                    double total = resultSet2.getDouble("c");//及时数
                    map.put("peInTime",total);
                    map.put("peReceiveTime",peDateStr);
                }catch (Exception e){
                    if(!"Error".equals(e.getMessage())){
                        e.printStackTrace();
                    }
                }
                //门诊
                try {
                    String sql1 = "SELECT sum(HSI07_01_002) s2 FROM qc/daily_report where event_date>= '"+outpatientDateStr+"T00:00:00' AND event_date <='"+outpatientDateStr+"T23:59:59' and org_code = '"+orgCode+"' ";
                    ResultSet resultSet1 = elasticSearchUtil.findBySql(sql1);
                    resultSet1.next();
                    double HSI07_01_002 = resultSet1.getDouble("s4");//门急诊
                    map.put("totalOutpatient",HSI07_01_002);
                    map.put("outpatientReceiveTime",outpatientDateStr);
                }catch (Exception e){
                    if(!"Error".equals(e.getMessage())){
                        e.printStackTrace();
                    }
                }
                try {
                    String sql2 = "SELECT count(distinct event_no) c FROM json_archives/info where event_date>= '"+outpatientDateStr+" 00:00:00' AND event_date<='" +  outpatientDateStr + " 23:59:59' and org_code = '"+orgCode+"' AND pack_type=1 and event_type=0 and delay <="+warning.getOutpatientInTime();
                    ResultSet resultSet2 = elasticSearchUtil.findBySql(sql2);
                    resultSet2.next();
                    double total = resultSet2.getDouble("c");//及时数
                    map.put("outpatientInTime",total);
                    map.put("outpatientReceiveTime",outpatientDateStr);
                }catch (Exception e){
                    if(!"Error".equals(e.getMessage())){
                        e.printStackTrace();
                    }
                }
                //住院
                String sql1 = "SELECT sum(HSI07_01_012) s4 FROM qc/daily_report where event_date>= '"+hospitalDateStr+"T00:00:00' AND event_date <='"+hospitalDateStr+"T23:59:59' and org_code = '"+orgCode+"' ";
                try {
                    ResultSet resultSet1 = elasticSearchUtil.findBySql(sql1);
                    resultSet1.next();
                    double HSI07_01_012 = resultSet1.getDouble("s4");//住院
                    map.put("totalHospital",HSI07_01_012);
                    map.put("hospitalReceiveTime",hospitalDateStr);
                }catch (Exception e){
                    if(!"Error".equals(e.getMessage())){
                        e.printStackTrace();
                    }
                }
                String sql2 = "SELECT count(distinct event_no) c FROM json_archives/info where event_date>= '"+hospitalDateStr+" 00:00:00' AND event_date<='" +  hospitalDateStr + " 23:59:59' and org_code = '"+orgCode+"' AND pack_type=1 and event_type=1 and delay <="+warning.getHospitalInTime();
                try {
                    ResultSet resultSet2 = elasticSearchUtil.findBySql(sql2);
                    resultSet2.next();
                    double total = resultSet2.getDouble("c");//及时数
                    map.put("hospitalInTime",total);
                    map.put("hospitalReceiveTime",hospitalDateStr);
                }catch (Exception e){
                    if(!"Error".equals(e.getMessage())){
                        e.printStackTrace();
                    }
                }
            }catch (Exception e){
                e.printStackTrace();
            }
        }
    }
    /**
     * 资源化预警
     */
    public void resource(String dateStr){
        //1.查找预警设置
        List<DqPaltformResourceWarning> warningList = dqPaltformResourceWarningDao.findAll();
        if(warningList.size()==0){
            return;
        }
        DqPaltformResourceWarning warning = warningList.get(0);
        double resourceFailure = 0;
        double resourceUnArchive = 0;
        double resourceException = 0;
        //资源化数据
        String sql1 = "SELECT count(*) c,archive_status FROM json_archives/info where receive_date>= '"+dateStr+" 00:00:00' AND receive_date<='" +  dateStr + " 23:59:59' AND pack_type=1 and (archive_status=2 or archive_status=0) group by archive_status";
        try {
            ResultSet resultSet1 = elasticSearchUtil.findBySql(sql1);
            while (resultSet1.next()) {
                String archiveStatus = resultSet1.getString("archive_status");// 2失败,0未解析
                double total = resultSet1.getDouble("c");
                if("2".equals(archiveStatus)){
                    resourceFailure = total;
                }else {
                    resourceUnArchive = total;
                }
            }
        }catch (Exception e){
            e.getMessage();
        }
        String sql2 = "SELECT count(*) c FROM json_archives_qc/qc_metadata_info where receive_date>= '"+dateStr+" 00:00:00' AND receive_date<='" +  dateStr + " 23:59:59' AND qc_step=2 ";
        try {
            ResultSet resultSet2 = elasticSearchUtil.findBySql(sql2);
            resultSet2.next();
            resourceUnArchive = resultSet2.getDouble("c");//资源化 解析异常
        }catch (Exception e){
            e.getMessage();
        }
        List<DqWarningRecord> list = new ArrayList<>(3);
        Date recordTime = DateUtil.formatCharDateYMD(dateStr);
        String unqualified = "不合格";
        String orgName = "医疗云平台";
        String id = DateUtil.getCurrentString(DateUtil.DEFAULT_CHAR_DATE_YMD_FORMAT)+"_"+ dateStr +"_"+cloud+"_";
        //1、失败数
        if(resourceFailure>warning.getFailureNum()){
            //失败数>预警值
            DqWarningRecord record = new DqWarningRecord();
            record.setType(DqWarningRecordType.resource.getValue());
            record.setActualValue(resourceFailure+"");
            record.setId(id+DqWarningRecordWarningType.resourceFailureNum.getValue());
            record.setWarningType(DqWarningRecordWarningType.resourceFailureNum.getValue());
            record.setQuota(DqWarningRecordWarningType.resourceFailureNum.getName());
            record.setRecordTime(recordTime);
            record.setWarningTime(new Date());
            record.setWarningValue(warning.getFailureNum()+"");
            record.setStatus("1");
            record.setOrgName(orgName);
            record.setProblemDescription(DqWarningRecordWarningType.resourceFailureNum.getName()+unqualified);
            list.add(record);
        }
        //2、质量问题数
        if(resourceException>warning.getErrorNum()){
            //质量问题数>预警值
            DqWarningRecord record = new DqWarningRecord();
            record.setType(DqWarningRecordType.resource.getValue());
            record.setActualValue(resourceException+"");
            record.setId(id+DqWarningRecordWarningType.resourceErrorNum.getValue());
            record.setWarningType(DqWarningRecordWarningType.resourceErrorNum.getValue());
            record.setQuota(DqWarningRecordWarningType.resourceErrorNum.getName());
            record.setRecordTime(recordTime);
            record.setWarningTime(new Date());
            record.setWarningValue(warning.getErrorNum()+"");
            record.setStatus("1");
            record.setOrgName(orgName);
            record.setProblemDescription(DqWarningRecordWarningType.resourceErrorNum.getName()+unqualified);
            list.add(record);
        }
        //3、未解析量
        if(resourceUnArchive>warning.getUnparsingNum()){
            //未解析量>预警值
            DqWarningRecord record = new DqWarningRecord();
            record.setType(DqWarningRecordType.resource.getValue());
            record.setActualValue(resourceUnArchive+"");
            record.setId(id+DqWarningRecordWarningType.unArchiveNum.getValue());
            record.setWarningType(DqWarningRecordWarningType.unArchiveNum.getValue());
            record.setQuota(DqWarningRecordWarningType.unArchiveNum.getName());
            record.setRecordTime(recordTime);
            record.setWarningTime(new Date());
            record.setWarningValue(warning.getUnparsingNum()+"");
            record.setStatus("1");
            record.setOrgName(orgName);
            record.setProblemDescription(DqWarningRecordWarningType.unArchiveNum.getName()+unqualified);
            list.add(record);
        }
        if(list.size()>0){
            dqWarningRecordDao.save(list);
        }
    }
    /**
     * 上传预警
     */
    public void upload(String dateStr){
        //1.查找预警设置
        List<DqPaltformUploadWarning> warningList = dqPaltformUploadWarningDao.findAll();
        if(warningList.size()==0){
            return;
        }
        DqPaltformUploadWarning warning = warningList.get(0);
        List<DqDatasetWarning> dtList =  dqDatasetWarningDao.findByType("2");
        double datasetNum = dtList.size();
        double uploadSuccessNum = 0;
        double uploadErrorNum = 0;
        double uploadDatasetNum = 0;
        double uploadArchiveNum = 0;
        //统计数据量和错误数
        String sql1 = "SELECT count(*) c,upload_status FROM upload/record where event_date>= '"+dateStr+" 00:00:00' AND event_date<='" +  dateStr + " 23:59:59' group by upload_status";
        try {
            ResultSet resultSet1 = elasticSearchUtil.findBySql(sql1);
            while (resultSet1.next()) {
                String status = resultSet1.getString("upload_status");// 0失败,1成功
                double total = resultSet1.getDouble("c");
                if("1".equals(status)){
                    uploadSuccessNum = total;
                }else {
                    uploadErrorNum = total;
                }
            }
            uploadArchiveNum = uploadErrorNum + uploadSuccessNum;
        }catch (Exception e){
            e.getMessage();
        }
        //数据集
        String sql2 = "SELECT datasets FROM upload/record where event_date>= '"+dateStr+" 00:00:00' AND event_date<='" +  dateStr + " 23:59:59' ";
        try {
            Map<String, String> datasetMap = new HashMap<>();
            ResultSet resultSet2 = elasticSearchUtil.findBySql(sql2);
            while (resultSet2.next()) {
                String datasets = resultSet2.getString("datasets");//接收 数据集
                JSONArray jsonArray = JSON.parseArray(datasets);
                for(int i=0;i<jsonArray.size();i++){
                    String dataset = jsonArray.get(i).toString();
                    if(!datasetMap.containsKey(dataset)){
                        datasetMap.put(dataset,dataset);
                    }
                }
            }
            uploadDatasetNum = datasetMap.size();
        }catch (Exception e){
            e.getMessage();
        }
        List<DqWarningRecord> list = new ArrayList<>(3);
        Date recordTime = DateUtil.formatCharDateYMD(dateStr);
        String unqualified = "不合格";
        String orgName = warning.getOrgName();
        String orgCode = warning.getOrgCode();
        String id = DateUtil.getCurrentString(DateUtil.DEFAULT_CHAR_DATE_YMD_FORMAT)+"_"+ dateStr +"_"+orgCode+"_";
        //1、档案数
        if(uploadArchiveNum<warning.getArchiveNum()){
            //档案数<预警值
            DqWarningRecord record = new DqWarningRecord();
            record.setType(DqWarningRecordType.upload.getValue());
            record.setActualValue(uploadArchiveNum+"");
            record.setId(id+DqWarningRecordWarningType.archiveNum.getValue());
            record.setWarningType(DqWarningRecordWarningType.archiveNum.getValue());
            record.setQuota(DqWarningRecordWarningType.archiveNum.getName());
            record.setRecordTime(recordTime);
            record.setWarningTime(new Date());
            record.setWarningValue(warning.getArchiveNum()+"");
            record.setStatus("1");
            record.setOrgName(orgName);
            record.setOrgCode(orgCode);
            record.setProblemDescription(DqWarningRecordWarningType.archiveNum.getName()+unqualified);
            list.add(record);
        }
        //2、数据错误问题数
        if(uploadErrorNum>warning.getErrorNum()){
            //数据错误问题数>预警值
            DqWarningRecord record = new DqWarningRecord();
            record.setType(DqWarningRecordType.upload.getValue());
            record.setActualValue(uploadErrorNum+"");
            record.setId(id+DqWarningRecordWarningType.dataErrorNum.getValue());
            record.setWarningType(DqWarningRecordWarningType.dataErrorNum.getValue());
            record.setQuota(DqWarningRecordWarningType.dataErrorNum.getName());
            record.setRecordTime(recordTime);
            record.setWarningTime(new Date());
            record.setWarningValue(warning.getErrorNum()+"");
            record.setStatus("1");
            record.setOrgName(orgName);
            record.setOrgCode(orgCode);
            record.setProblemDescription(DqWarningRecordWarningType.dataErrorNum.getName()+unqualified);
            list.add(record);
        }
        //3、数据集
        if(uploadDatasetNum > datasetNum){
            //数据错误问题数>预警值
            DqWarningRecord record = new DqWarningRecord();
            record.setType(DqWarningRecordType.upload.getValue());
            record.setActualValue(uploadDatasetNum+"");
            record.setId(id+DqWarningRecordWarningType.uploadDatasetNum.getValue());
            record.setWarningType(DqWarningRecordWarningType.uploadDatasetNum.getValue());
            record.setQuota(DqWarningRecordWarningType.uploadDatasetNum.getName());
            record.setRecordTime(recordTime);
            record.setWarningTime(new Date());
            record.setWarningValue(datasetNum+"");
            record.setStatus("1");
            record.setOrgName(orgName);
            record.setOrgCode(orgCode);
            record.setProblemDescription(DqWarningRecordWarningType.uploadDatasetNum.getName()+unqualified);
            list.add(record);
        }
        if(list.size()>0){
            dqWarningRecordDao.save(list);
        }
    }
    /**
     * 初始化datamap 数据
     * @return
     */
    private Map initDataMap(DqPaltformReceiveWarning defaultWarning){
        Map dataMap = new HashedMap();
        Integer defaultPe = defaultWarning.getPeInTime();
        Integer defaultOutpatient = defaultWarning.getOutpatientInTime();
        Integer defaultHospital = defaultWarning.getHospitalInTime();
        String defaultPeDateStr = DateUtil.formatDate(DateUtil.addDate(-(defaultPe+1), new Date()),DateUtil.DEFAULT_DATE_YMD_FORMAT);
        String defaultOutpatientDateStr = DateUtil.formatDate(DateUtil.addDate(-(defaultOutpatient+1), new Date()),DateUtil.DEFAULT_DATE_YMD_FORMAT);
        String defaultHospitalDateStr = DateUtil.formatDate(DateUtil.addDate(-(defaultHospital+1), new Date()),DateUtil.DEFAULT_DATE_YMD_FORMAT);
        dataMap.put("receiveArchives",0);//接收档案数
        dataMap.put("receiveDataset",0); //接收数据集
        dataMap.put("receiveException",0);//接收异常
        dataMap.put("totalOutpatient",0);//门诊总数
        dataMap.put("totalPe",0);//体检总数
        dataMap.put("totalHospital",0);//住院总数
        dataMap.put("outpatientIntime",0);//门诊及时数
        dataMap.put("peIntime",0);//体检及时数
        dataMap.put("hospitalIntime",0);//住院及时数
//        dataMap.put("outpatientIntimeRate",0);//门诊及时率
//        dataMap.put("peIntimeRate",0);//体检及时率
//        dataMap.put("hospitalIntimeRate",0);//住院及时率
        dataMap.put("outpatientReceiveTime",defaultOutpatientDateStr);//门诊接收时间
        dataMap.put("peReceiveTime",defaultPeDateStr);//体检接收时间
        dataMap.put("hospitalReceiveTime",defaultHospitalDateStr);//住院接收时间
        return dataMap;
    }
    /**
     * 判断是否及时
     * @param warningMap
     * @param orgCode
     * @param eventType 就诊类型
     * @param delay 延时时间(天)
     * @return
     */
    private boolean isInTime(Map<String, DqPaltformReceiveWarning> warningMap,String orgCode,String eventType,long delay){
        if(StringUtils.isBlank(eventType)||"null".equals(eventType)){
            //就诊类型为空 直接返回false
            return false;
        }
        DqPaltformReceiveWarning warning = null;
        if(warningMap.containsKey(orgCode)){
            warning = warningMap.get(orgCode);
        }else {
            warning = warningMap.get(defaultOrgCode);
        }
        boolean re = false;
        switch (eventType){
            case "0":
                //0门诊
                re = warning.getOutpatientInTime() >= delay;
                break;
            case "1":
                //1住院
                re = warning.getHospitalInTime() >= delay;
                break;
            case "2":
                //2体检
                re = warning.getPeInTime() >= delay;
                break;
            default:
                break;
        }
        return re;
    }
}

+ 258 - 0
src/main/java/com/yihu/ehr/analyze/service/dataQuality/WarningRecordService.java

@ -0,0 +1,258 @@
package com.yihu.ehr.analyze.service.dataQuality;
import com.yihu.ehr.analyze.dao.DqWarningRecordDao;
import com.yihu.ehr.entity.quality.DqWarningRecord;
import com.yihu.ehr.profile.qualilty.DqWarningRecordSolveType;
import com.yihu.ehr.query.BaseJpaService;
import com.yihu.ehr.util.datetime.DateUtil;
import jxl.format.CellFormat;
import jxl.write.Label;
import jxl.write.WritableCellFormat;
import jxl.write.WritableSheet;
import org.apache.commons.lang.StringUtils;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
/**
 * 预警问题查询
 * @author yeshijie on 2018/6/11.
 */
@Service
public class WarningRecordService extends BaseJpaService<DqWarningRecord, DqWarningRecordDao> {
    @Autowired
    private WarningSettingService warningSettingService;
    @Autowired
    private DqWarningRecordDao dqWarningRecordDao;
    public DqWarningRecord findById(String id) {
        return dqWarningRecordDao.findOne(id);
    }
    /**
     * 处理问题
     * @param solveTime
     * @param solveId
     * @param solveName
     * @param solveType
     * @param id
     */
    public int warningRecordUpd(String solveTime,String solveId,String solveName,String solveType,String id){
        DqWarningRecord record = dqWarningRecordDao.findOne(id);
        if(record == null){
            return -1;
        }
        record.setStatus("2");
        record.setSolveId(solveId);
        record.setSolveName(solveName);
        record.setSolveTime(DateUtil.formatCharDateYMD(solveTime));
        record.setSolveType(solveType);
        dqWarningRecordDao.save(record);
        return 0;
    }
    /**
     * 添加行
     * @param type
     * @param wc
     * @param ws
     * @param record
     * @param j
     */
    public void addRow(String type,WritableCellFormat wc,WritableSheet ws,DqWarningRecord record,int j){
        try {
            if("1".equals(type)||"3".equals(type)){
                addCell(ws,0,j,DateUtil.toString(record.getWarningTime(),DateUtil.DEFAULT_DATE_YMD_FORMAT),wc);//预警时间
                addCell(ws,1,j,DateUtil.toString(record.getRecordTime(),DateUtil.DEFAULT_DATE_YMD_FORMAT),wc);//接收日期/上传时间
                addCell(ws,2,j,record.getOrgName(),wc);//医疗机构
                addCell(ws,3,j,record.getProblemDescription(),wc);//问题类型
                addCell(ws,4,j,getStatusName(record.getStatus()),wc);//状态
                addCell(ws,5,j,record.getQuota(),wc);//指标
                addCell(ws,6,j,record.getActualValue(),wc);//值
                addCell(ws,7,j,record.getWarningValue(),wc);//预警值
                addCell(ws,8,j,DateUtil.toString(record.getSolveTime(),DateUtil.DEFAULT_DATE_YMD_FORMAT),wc);//处理时间
                addCell(ws,9,j,getSolveTypeName(record.getSolveType()),wc);//处理结果
                addCell(ws,10,j,record.getSolveName(),wc);//操作人
            }else if("2".equals(type)){
                addCell(ws,0,j,DateUtil.toString(record.getWarningTime(),DateUtil.DEFAULT_DATE_YMD_FORMAT),wc);//预警时间
                addCell(ws,1,j,DateUtil.toString(record.getRecordTime(),DateUtil.DEFAULT_DATE_YMD_FORMAT),wc);//资源化时间
                addCell(ws,2,j,record.getProblemDescription(),wc);//问题类型
                addCell(ws,3,j,getStatusName(record.getStatus()),wc);//状态
                addCell(ws,4,j,record.getQuota(),wc);//指标
                addCell(ws,5,j,record.getActualValue(),wc);//值
                addCell(ws,6,j,record.getWarningValue(),wc);//预警值
                addCell(ws,7,j,DateUtil.toString(record.getSolveTime(),DateUtil.DEFAULT_DATE_YMD_FORMAT),wc);//处理时间
                addCell(ws,8,j,getSolveTypeName(record.getSolveType()),wc);//处理结果
                addCell(ws,9,j,record.getSolveName(),wc);//操作人
            }
        }catch (Exception e){
            e.printStackTrace();
        }
    }
    /**
     * 状态翻译
     * @param status
     * @return
     */
    public String getStatusName(String status){
        String re = "";
        switch (status){
            case "1":
                re = "未解决";
                break;
            case "2":
                re = "已解决";
                break;
            default:
                break;
        }
        return re;
    }
    /**
     * 解决方式翻译
     * @param solveType
     * @return
     */
    public String getSolveTypeName(String solveType){
        if(StringUtils.isBlank(solveType)){
            return "";
        }
        String re = "";
        switch (solveType){
            case "1":
                re = DqWarningRecordSolveType.solved.getName();
                break;
            case "2":
                re = DqWarningRecordSolveType.ignore.getName();
                break;
            case "3":
                re = DqWarningRecordSolveType.unSolve.getName();
                break;
            case "4":
                re = DqWarningRecordSolveType.notProblem.getName();
                break;
            default:
                break;
        }
        return re;
    }
    /**
     * 接收
     * excel中添加固定内容
     * @param ws
     */
    public void addReceiveStaticCell(WritableSheet ws){
        try {
            addCell(ws,0,0,"预警时间");
            addCell(ws,1,0,"接收日期");
            addCell(ws,2,0,"医疗机构");
            addCell(ws,3,0,"问题类型");
            addCell(ws,4,0,"状态");
            addCell(ws,5,0,"指标");
            addCell(ws,6,0,"值");
            addCell(ws,7,0,"预警值");
            addCell(ws,8,0,"处理时间");
            addCell(ws,9,0,"处理结果");
            addCell(ws,10,0,"操作人");
        } catch (Exception e) {
            e.printStackTrace();
        }
    }
    /**
     * 资源化
     * excel中添加固定内容
     * @param ws
     */
    public void addResourceStaticCell(WritableSheet ws){
        try {
            addCell(ws,0,0,"预警时间");
            addCell(ws,1,0,"资源化时间");
            addCell(ws,2,0,"问题类型");
            addCell(ws,3,0,"状态");
            addCell(ws,4,0,"指标");
            addCell(ws,5,0,"值");
            addCell(ws,6,0,"预警值");
            addCell(ws,7,0,"处理时间");
            addCell(ws,8,0,"处理结果");
            addCell(ws,9,0,"操作人");
        } catch (Exception e) {
            e.printStackTrace();
        }
    }
    /**
     * 平台上传
     * excel中添加固定内容
     * @param ws
     */
    public void addUploadStaticCell(WritableSheet ws){
        try {
            addCell(ws,0,0,"预警时间");
            addCell(ws,1,0,"上传时间");
            addCell(ws,2,0,"机构");
            addCell(ws,3,0,"问题类型");
            addCell(ws,4,0,"状态");
            addCell(ws,5,0,"指标");
            addCell(ws,6,0,"值");
            addCell(ws,7,0,"预警值");
            addCell(ws,8,0,"处理时间");
            addCell(ws,9,0,"处理结果");
            addCell(ws,10,0,"操作人");
        } catch (Exception e) {
            e.printStackTrace();
        }
    }
    public void addQualityMonitoringCell(WritableSheet ws){
        try {
            addCell(ws,0,0,"机构");
            addCell(ws,1,0,"医院档案数");
            addCell(ws,2,0,"医院数据集");
            addCell(ws,3,0,"接收档案数");
            addCell(ws,4,0,"接收数据集");
            addCell(ws,5,0,"接收质量异常数");
            addCell(ws,6,0,"资源化解析成功");
            addCell(ws,7,0,"资源化解析失败");
            addCell(ws,8,0,"资源化解析异常");
        } catch (Exception e) {
            e.printStackTrace();
        }
    }
    /**
     * 添加单元格内容
     * @param ws
     * @param column
     * @param row
     * @param data
     */
    public void addCell(WritableSheet ws,int column,int row,String data){
        try {
            Label label = new Label(column,row,data);
            ws.addCell(label);
        } catch (Exception e) {
            e.printStackTrace();
        }
    }
    /**
     * 添加单元格内容带样式
     * @param ws
     * @param column
     * @param row
     * @param data
     * @param cellFormat
     */
    public void addCell(WritableSheet ws,int column,int row,String data,CellFormat cellFormat){
        try {
            Label label = new Label(column,row,data,cellFormat);
            ws.addCell(label);
        } catch (Exception e) {
            e.printStackTrace();
        }
    }
}

+ 80 - 0
src/main/java/com/yihu/ehr/analyze/service/dataQuality/WarningSettingService.java

@ -0,0 +1,80 @@
package com.yihu.ehr.analyze.service.dataQuality;
import com.yihu.ehr.entity.quality.DqDatasetWarning;
import com.yihu.ehr.entity.quality.DqPaltformReceiveWarning;
import com.yihu.ehr.entity.quality.DqPaltformResourceWarning;
import com.yihu.ehr.entity.quality.DqPaltformUploadWarning;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.stereotype.Service;
import java.util.List;
/**
 * @author yeshijie on 2018/5/28.
 */
@Service
public class WarningSettingService {
    @Autowired
    private DqDatasetWarningService dqDatasetWarningService;
    @Autowired
    private DqPaltformReceiveWarningService dqPaltformReceiveWarningService;
    @Autowired
    private DqPaltformResourceWarningService dqPaltformResourceWarningService;
    @Autowired
    private DqPaltformUploadWarningService dqPaltformUploadWarningService;
    @Value("&{quality.orgCode}")
    private String defaultOrgCode;
    /**
     * 查找接收预警设置
     * @param orgCode
     */
    public DqPaltformReceiveWarning getReceiveWarning(String orgCode){
        DqPaltformReceiveWarning warning = dqPaltformReceiveWarningService.findByOrgCode(orgCode);
        if(warning==null){
            warning = dqPaltformReceiveWarningService.findByOrgCode(defaultOrgCode);
            orgCode = defaultOrgCode;
        }
        if(warning!=null){
            List<DqDatasetWarning> list = dqDatasetWarningService.findByOrgCodeAndType(orgCode,"1");
            warning.setDatasetWarningList(list);
        }
        return warning;
    }
    /**
     * 查找资源化预警设置
     * @param orgCode
     */
    public DqPaltformResourceWarning getResourceWarning(String orgCode){
        DqPaltformResourceWarning warning = dqPaltformResourceWarningService.findByOrgCode(orgCode);
        if(warning==null){
            warning = dqPaltformResourceWarningService.findByOrgCode(defaultOrgCode);
        }
        return warning;
    }
    /**
     * 查找上传预警设置
     * @param orgCode
     */
    public DqPaltformUploadWarning getUploadWarning(String orgCode){
        DqPaltformUploadWarning warning = dqPaltformUploadWarningService.findByOrgCode(orgCode);
        if(warning==null){
            warning = dqPaltformUploadWarningService.findByOrgCode(defaultOrgCode);
            orgCode = defaultOrgCode;
        }
        if(warning!=null){
            List<DqDatasetWarning> list = dqDatasetWarningService.findByOrgCodeAndType(orgCode,"2");
            warning.setDatasetWarningList(list);
        }
        return warning;
    }
}

+ 859 - 0
src/main/java/com/yihu/ehr/analyze/service/pack/PackQcReportService.java

@ -0,0 +1,859 @@
package com.yihu.ehr.analyze.service.pack;
import com.yihu.ehr.elasticsearch.ElasticSearchPool;
import com.yihu.ehr.elasticsearch.ElasticSearchUtil;
import com.yihu.ehr.model.quality.MProfileInfo;
import com.yihu.ehr.query.BaseJpaService;
import com.yihu.ehr.redis.client.RedisClient;
import com.yihu.ehr.redis.schema.OrgKeySchema;
import com.yihu.ehr.redis.schema.RsAdapterMetaKeySchema;
import com.yihu.ehr.solr.SolrUtil;
import com.yihu.ehr.util.datetime.DateUtil;
import com.yihu.ehr.util.rest.Envelop;
import org.apache.commons.lang.ObjectUtils;
import org.apache.commons.lang.StringUtils;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.solr.client.solrj.response.RangeFacet;
import org.elasticsearch.action.search.SearchRequestBuilder;
import org.elasticsearch.action.search.SearchResponse;
import org.elasticsearch.action.search.SearchType;
import org.elasticsearch.client.transport.TransportClient;
import org.elasticsearch.index.query.BoolQueryBuilder;
import org.elasticsearch.index.query.MatchQueryBuilder;
import org.elasticsearch.index.query.QueryBuilders;
import org.elasticsearch.index.query.RangeQueryBuilder;
import org.elasticsearch.search.aggregations.AggregationBuilder;
import org.elasticsearch.search.aggregations.AggregationBuilders;
import org.elasticsearch.search.aggregations.bucket.histogram.DateHistogramBuilder;
import org.elasticsearch.search.aggregations.bucket.histogram.DateHistogramInterval;
import org.elasticsearch.search.aggregations.bucket.histogram.Histogram;
import org.elasticsearch.search.aggregations.bucket.terms.LongTerms;
import org.elasticsearch.search.aggregations.bucket.terms.Terms;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.data.domain.Page;
import org.springframework.jdbc.core.JdbcTemplate;
import org.springframework.stereotype.Service;
import org.springframework.util.CollectionUtils;
import java.util.*;
/**
 * @Author: zhengwei
 * @Date: 2018/5/31 16:21
 * @Description:质控报表
 */
@Service
public class PackQcReportService extends BaseJpaService {
    @Autowired
    private ElasticSearchUtil elasticSearchUtil;
    @Autowired
    private ElasticSearchPool elasticSearchPool;
    @Autowired
    private JdbcTemplate jdbcTemplate;
    @Autowired
    private RedisClient redisClient;
    @Autowired
    private SolrUtil solrUtil;
    @Value("${quality.cloud}")
    private String cloud;
    @Autowired
    private OrgKeySchema orgKeySchema;
    @Autowired
    private RsAdapterMetaKeySchema rsAdapterMetaKeySchema;
    protected final Log logger = LogFactory.getLog(this.getClass());
    /**
     * 获取医院数据
     * @param startDate
     * @param endDate
     * @param orgCode
     * @return
     * @throws Exception
     */
    public Envelop dailyReport(String startDate, String endDate, String orgCode) throws Exception{
        Envelop envelop = new Envelop();
        Date end = DateUtil.addDate(1, DateUtil.formatCharDateYMD(endDate));
        Map<String,Object> resMap = new HashMap<String,Object>();
        List<Map<String,Object>> list = new ArrayList<>();
        int total=0;
        int inpatient=0;
        int oupatient=0;
        int physical=0;
        BoolQueryBuilder boolQueryBuilder = QueryBuilders.boolQuery();
        RangeQueryBuilder startRange = QueryBuilders.rangeQuery("event_date");
        startRange.gte(startDate);
        boolQueryBuilder.must(startRange);
        RangeQueryBuilder endRange = QueryBuilders.rangeQuery("event_date");
        endRange.lt(DateUtil.toString(end));
        boolQueryBuilder.must(endRange);
        if (StringUtils.isNotEmpty(orgCode)&&!cloud.equals(orgCode)) {
            MatchQueryBuilder matchQueryBuilder = QueryBuilders.matchQuery("org_code", orgCode);
            boolQueryBuilder.must(matchQueryBuilder);
        }
        List<Map<String, Object>> res = elasticSearchUtil.list("qc","daily_report", boolQueryBuilder);
        if(res!=null && res.size()>0){
            for(Map<String,Object> report : res){
                total+=Integer.parseInt(report.get("HSI07_01_001").toString());
                inpatient+=Integer.parseInt(report.get("HSI07_01_012").toString());
                oupatient+=Integer.parseInt(report.get("HSI07_01_002").toString());
                physical+=Integer.parseInt(report.get("HSI07_01_004").toString());
            }
        }
        resMap.put("total",total);
        resMap.put("inpatient",inpatient);
        resMap.put("oupatient",oupatient);
        resMap.put("physical",physical);
        list.add(resMap);
        envelop.setDetailModelList(list);
        envelop.setSuccessFlg(true);
        return envelop;
    }
    /**
     * 获取资源化数据
     * @param startDate
     * @param endDate
     * @param orgCode
     * @return
     * @throws Exception
     */
    public List<Map<String, Object>> getResourceSuccessList(String startDate, String endDate, String orgCode) throws Exception {
        StringBuilder stringBuilder = new StringBuilder();
        stringBuilder.append("pack_type=1;archive_status=3;");
        stringBuilder.append("receive_date>=" + startDate + " 00:00:00;");
        stringBuilder.append("receive_date<" + endDate + " 23:59:59;");
        if (StringUtils.isNotEmpty(orgCode) && !"null".equals(orgCode)&&!cloud.equals(orgCode)){
            stringBuilder.append("org_code=" + orgCode);
        }
        TransportClient transportClient = elasticSearchPool.getClient();
        List<Map<String, Object>> resultList = new ArrayList<>();
        SearchRequestBuilder builder = transportClient.prepareSearch("json_archives");
        builder.setTypes("info");
        builder.setSearchType(SearchType.DFS_QUERY_THEN_FETCH);
        builder.setQuery(elasticSearchUtil.getQueryBuilder(stringBuilder.toString()));
        DateHistogramBuilder dateHistogramBuilder = new DateHistogramBuilder("date");
        dateHistogramBuilder.field("event_date");
        dateHistogramBuilder.interval(DateHistogramInterval.DAY);
        dateHistogramBuilder.format("yyyy-MM-dd");
        dateHistogramBuilder.minDocCount(0);
        AggregationBuilder terms = AggregationBuilders.terms("event_type").field("event_type");
        dateHistogramBuilder.subAggregation(terms);
        builder.addAggregation(dateHistogramBuilder);
        builder.setSize(0);
        builder.setExplain(true);
        SearchResponse response = builder.get();
        Histogram histogram = response.getAggregations().get("date");
        double inpatient_total = 0.0;
        double oupatient_total = 0.0;
        double physical_total = 0.0;
        for(Histogram.Bucket item: histogram.getBuckets()){
            Map<String, Object> temp = new HashMap<>();
            if(item.getDocCount()>0 && !"".equals(item.getKeyAsString())) {
                temp.put("date", item.getKeyAsString());
                LongTerms longTerms = item.getAggregations().get("event_type");
                double inpatient = 0.0;
                double oupatient = 0.0;
                double physical = 0.0;
                for(Terms.Bucket item1 : longTerms.getBuckets()){
                    if("0".equals(item1.getKeyAsString())) {
                        oupatient=item1.getDocCount();
                        oupatient_total+=item1.getDocCount();
                    }else if("1".equals(item1.getKeyAsString())) {
                        inpatient=item1.getDocCount();
                        inpatient_total+=item1.getDocCount();
                    }else if("2".equals(item1.getKeyAsString())) {
                        physical=item1.getDocCount();
                        physical_total+=item1.getDocCount();
                    }
                }
                temp.put("inpatient", inpatient);
                temp.put("oupatient", oupatient);
                temp.put("physical", physical);
                temp.put("total", inpatient+oupatient+physical);
                resultList.add(temp);
            }
        }
        Map<String, Object> total = new HashMap<>();
        total.put("date", "总计");
        total.put("inpatient", inpatient_total);
        total.put("oupatient", oupatient_total);
        total.put("physical", physical_total);
        total.put("total", inpatient_total + oupatient_total + physical_total);
        resultList.add(0,total);
        return resultList;
    }
    /**
     * 获取资源化数据
     * @param startDate
     * @param endDate
     * @param orgCode
     * @return
     * @throws Exception
     */
    public Envelop resourceSuccess(String startDate, String endDate, String orgCode) throws Exception {
        Envelop envelop = new Envelop();
        List<Map<String, Object>> resultList = getResourceSuccessList(startDate, endDate, orgCode);
        envelop.setSuccessFlg(true);
        envelop.setDetailModelList(resultList);
        return envelop;
    }
    /**
     * 获取资源化数据
     * @param startDate
     * @param endDate
     * @param orgCode
     * @return
     * @throws Exception
     */
    public Envelop resourceSuccess(String startDate, String endDate, String orgCode,int size,int page) throws Exception {
        List<Map<String, Object>> resultList = getResourceSuccessList(startDate, endDate, orgCode);
        //设置假分页
        return getPageEnvelop(page,size,resultList);
    }
    private Envelop getPageEnvelop(int page,int size,List totalList){
        Envelop envelop = new Envelop();
        //设置假分页
        int totalCount = totalList.size();
        envelop.setTotalCount(totalCount);
        int totalPage = totalCount%size==0 ? totalCount%size:totalCount%size+1;
        envelop.setTotalPage(totalPage);
        envelop.setCurrPage(page);
        envelop.setPageSize(size);
        List<Map<String, Object>> pagedList = getPageList(page, size, totalList);
        envelop.setSuccessFlg(true);
        envelop.setDetailModelList(pagedList);
        return envelop;
    }
    private List getPageList(int pageNum,int pageSize,List data) {
        int fromIndex = (pageNum - 1) * pageSize;
        if (fromIndex >= data.size()) {
            return Collections.emptyList();
        }
        int toIndex = pageNum * pageSize;
        if (toIndex >= data.size()) {
            toIndex = data.size();
        }
        return data.subList(fromIndex, toIndex);
    }
    /**
     * 获取档案数据
     * @param startDate
     * @param endDate
     * @param orgCode
     * @return
     * @throws Exception
     */
    public Envelop archiveReport(String startDate, String endDate, String orgCode) throws Exception {
        Envelop envelop = new Envelop();
        StringBuilder stringBuilder = new StringBuilder();
        stringBuilder.append("pack_type=1;");
        stringBuilder.append("receive_date>=" + startDate + " 00:00:00;");
        stringBuilder.append("receive_date<" + endDate + " 23:59:59;");
        if (StringUtils.isNotEmpty(orgCode) && !"null".equals(orgCode)&&!cloud.equals(orgCode)){
            stringBuilder.append("org_code=" + orgCode+";");
        }
        Map<String, Object> resultMap = new HashMap<>();
        List<Map<String, Object>> resultList = new ArrayList<>();
        Long total = elasticSearchUtil.count("json_archives", "info", stringBuilder.toString());
        resultMap.put("total", total);
        resultList.add(resultMap);
        envelop.setDetailModelList(resultList);
        envelop.setSuccessFlg(true);
        return envelop;
    }
    /**
     * 获取数据集列表数据
     * @param startDate
     * @param endDate
     * @param orgCode
     * @return
     * @throws Exception
     */
    public List<Map<String,Object>> getDataList(String startDate, String endDate, String orgCode) throws Exception {
        StringBuffer sql = new StringBuffer();
        sql.append("SELECT SUM(count) as count ,SUM(row) as row, dataset_name, dataset ");
        sql.append("FROM json_archives_qc/qc_dataset_detail");
        sql.append(" WHERE receive_date>='" + startDate + " 00:00:00' and receive_date<='" + endDate + " 23:59:59'");
        if (StringUtils.isNotEmpty(orgCode) && !"null".equals(orgCode)&&!cloud.equals(orgCode)){
            sql.append(" and org_code='" + orgCode +"'");
        }
        sql.append("GROUP BY dataset_name,dataset");
        List<String> field = new ArrayList<>();
        field.add("count");
        field.add("row");
        field.add("dataset_name");
        field.add("dataset");
        List<Map<String,Object>> list = elasticSearchUtil.findBySql(field, sql.toString());
        Map<String, Object> totalMap = new HashMap<>();
        totalMap.put("dataset","总计");
        totalMap.put("dataset_name","-");
        double rowTotal = 0;
        double countTotal = 0;
        for(Map<String,Object> map :list){
            map.put("name" ,map.get("dataset_name"));
            rowTotal += Double.valueOf(map.get("row").toString());
            countTotal += Double.valueOf(map.get("count").toString());
        }
        totalMap.put("row",rowTotal);
        totalMap.put("count",countTotal);
        list.add(0,totalMap);
        return list;
    }
    /**
     * 获取数据集列表数据
     * @param startDate
     * @param endDate
     * @param orgCode
     * @return
     * @throws Exception
     */
    public Envelop dataSetList(String startDate, String endDate, String orgCode) throws Exception {
        Envelop envelop = new Envelop();
        List<Map<String, Object>> list = getDataList(startDate, endDate, orgCode);
        envelop.setSuccessFlg(true);
        envelop.setDetailModelList(list);
        return envelop;
    }
    /**
     * 获取数据集列表数据
     * @param startDate
     * @param endDate
     * @param orgCode
     * @return
     * @throws Exception
     */
    public Envelop getDataSetListPage(String startDate, String endDate, String orgCode,int size,int page) throws Exception {
        List<Map<String, Object>> list = getDataList(startDate, endDate, orgCode);
        return getPageEnvelop(page,size,list);
    }
    public void getDataSets(String version, String dataSet, int row, List<Map<String, Object>> res){
        boolean flag = true;
        for(Map<String, Object> map : res){
            if(dataSet.equals(map.get("dataset"))){
                flag = false;
                map.put("row", (int)map.get("row") + row);
                map.put("count", (int)map.get("count") + 1);
                break;
            }
        }
        if(flag){
            Map<String, Object> map = new HashMap<>();
            map.put("dataset", dataSet);
            map.put("name", redisClient.get("std_data_set_" + version + ":" + dataSet + ":name"));
            map.put("row", row);
            map.put("count", 1);
            res.add(map);
        }
    }
    public Envelop datasetDetail(String date) throws Exception{
        Envelop envelop = new Envelop();
        List<String> field = new ArrayList<>();
        field.add("org_code");
        String sqlOrg = "SELECT org_code FROM json_archives/info where receive_date>= '"+date+" 00:00:00' AND receive_date<='" +  date + " 23:59:59' group by org_code";
        List<Map<String, Object>> orgList = elasticSearchUtil.findBySql(field,sqlOrg);
        for(Map<String,Object> orgMap : orgList) {
            String orgCode = orgMap.get("org_code")+"";
            List<Map<String, Object>> res = new ArrayList<>();
            StringBuilder stringBuilder = new StringBuilder();
            stringBuilder.append("qc_step=1;");
            stringBuilder.append("receive_date>=" + date + " 00:00:00;");
            stringBuilder.append("receive_date<" + date + " 23:59:59;");
            stringBuilder.append("org_code=" + orgCode);
            long starttime = System.currentTimeMillis();
            int count = (int) elasticSearchUtil.count("json_archives_qc", "qc_dataset_info", stringBuilder.toString());
            double pageNum = count % 1000 > 0 ? count / 1000 + 1 : count / 1000;
            for (int i = 0; i < pageNum; i++) {
                Page<Map<String, Object>> result = elasticSearchUtil.page("json_archives_qc", "qc_dataset_info", stringBuilder.toString(), i + 1, 1000);
                logger.info("查询耗时:" + (System.currentTimeMillis() - starttime) + "ms");
                for (Map<String, Object> map : result) {
                    String eventType = map.get("event_type").toString();
                    List<Map<String, Object>> dataSets = objectMapper.readValue(map.get("details").toString(), List.class);
                    for (Map<String, Object> dataSet : dataSets) {
                        for (Map.Entry<String, Object> entry : dataSet.entrySet()) {
                            getDataSetsDetail(map.get("version") + "", entry.getKey(), (int) entry.getValue(), res, date, orgCode, eventType);
                        }
                    }
                }
            }
            elasticSearchUtil.bulkIndex("json_archives_qc","qc_dataset_detail",res);
            logger.info("统计耗时:" + (System.currentTimeMillis() - starttime) + "ms");
        }
        envelop.setSuccessFlg(true);
        return envelop;
    }
    public void getDataSetsDetail(String version, String dataSet, int row, List<Map<String, Object>> res,String date,String orgCode,String eventType){
        boolean flag = true;
        for(Map<String, Object> map : res){
            if(dataSet.equals(map.get("dataset"))&&eventType.equals(map.get("event_type"))){
                flag = false;
                map.put("row", (int)map.get("row") + row);
                map.put("count", (int)map.get("count") + 1);
                break;
            }
        }
        if(flag){
            Map<String, Object> map = new HashMap<>();
            map.put("org_code", orgCode);
            map.put("event_type", eventType);
            map.put("receive_date", date+" 00:00:00");
            map.put("dataset", dataSet);
            map.put("dataset_name", redisClient.get("std_data_set_" + version + ":" + dataSet + ":name"));
            map.put("row", row);
            map.put("count", 1);
            res.add(map);
        }
    }
    /**
     * 获取资源化解析失败
     * @param startDate
     * @param endDate
     * @param orgCode
     * @return
     * @throws Exception
     */
    public List<Map<String, Object>> getArchiveFailedList(String startDate, String endDate, String orgCode) throws Exception {
        StringBuilder stringBuilder = new StringBuilder();
        stringBuilder.append("archive_status=2;pack_type=1;");
        stringBuilder.append("receive_date>=" + startDate + " 00:00:00;");
        stringBuilder.append("receive_date<" + endDate + " 23:59:59;");
        if (StringUtils.isNotEmpty(orgCode) && !"null".equals(orgCode)&&!cloud.equals(orgCode)){
            stringBuilder.append("org_code=" + orgCode);
        }
        TransportClient transportClient = elasticSearchPool.getClient();
        List<Map<String, Object>> resultList = new ArrayList<>();
        SearchRequestBuilder builder = transportClient.prepareSearch("json_archives");
        builder.setTypes("info");
        builder.setSearchType(SearchType.DFS_QUERY_THEN_FETCH);
        builder.setQuery(elasticSearchUtil.getQueryBuilder(stringBuilder.toString()));
        AggregationBuilder terms = AggregationBuilders.terms("error_type").field("error_type");
        builder.addAggregation(terms);
        builder.setSize(0);
        builder.setExplain(true);
        SearchResponse response = builder.get();
        LongTerms longTerms = response.getAggregations().get("error_type");
        Map<String, Object> totalMap = new HashMap<>();
        double totalCount = 0.0;
        for(Terms.Bucket item: longTerms.getBuckets()){
            Map<String, Object> temp = new HashMap<>();
            temp.put("error_type", item.getKeyAsString());
            temp.put("error_count", item.getDocCount());
            totalCount += item.getDocCount();
            resultList.add(temp);
        }
        totalMap.put("error_type","total");
        totalMap.put("error_count",totalCount);
        resultList.add(0,totalMap);
        return resultList;
    }
    /**
     * 获取资源化解析失败
     * @param startDate
     * @param endDate
     * @param orgCode
     * @return
     * @throws Exception
     */
    public Envelop archiveFailed(String startDate, String endDate, String orgCode) throws Exception {
        Envelop envelop = new Envelop();
        List<Map<String, Object>> resultList = getArchiveFailedList(startDate,endDate,orgCode);
        envelop.setSuccessFlg(true);
        envelop.setDetailModelList(resultList);
        return envelop;
    }
    /**
     * 获取资源化解析失败
     * @param startDate
     * @param endDate
     * @param orgCode
     * @return
     * @throws Exception
     */
    public Envelop archiveFailed(String startDate, String endDate, String orgCode,int size,int page) throws Exception {
        List<Map<String, Object>> resultList = getArchiveFailedList(startDate,endDate,orgCode);
        Envelop pageEnvelop = getPageEnvelop(page, size, resultList);
        return pageEnvelop;
    }
    /**
     * 获取数据元异常
     * @param startDate
     * @param endDate
     * @param orgCode
     * @return
     * @throws Exception
     */
    public List<Map<String, Object>> getMetadaErrorList(String step, String startDate, String endDate, String orgCode) throws Exception {
        StringBuilder stringBuilder = new StringBuilder();
        stringBuilder.append("qc_step="+step+";");
        stringBuilder.append("receive_date>=" + startDate + " 00:00:00;");
        stringBuilder.append("receive_date<" + endDate + " 23:59:59;");
        if (StringUtils.isNotEmpty(orgCode) && !"null".equals(orgCode)&&!cloud.equals(orgCode)){
            stringBuilder.append("org_code=" + orgCode);
        }
        TransportClient transportClient = elasticSearchPool.getClient();
        List<Map<String, Object>> resultList = new ArrayList<>();
        SearchRequestBuilder builder = transportClient.prepareSearch("json_archives_qc");
        builder.setTypes("qc_metadata_info");
        builder.setSearchType(SearchType.DFS_QUERY_THEN_FETCH);
        builder.setQuery(elasticSearchUtil.getQueryBuilder(stringBuilder.toString()));
        AggregationBuilder terms = AggregationBuilders.terms("qc_error_type").field("qc_error_type");
        builder.addAggregation(terms);
        builder.setSize(0);
        builder.setExplain(true);
        SearchResponse response = builder.get();
        LongTerms longTerms = response.getAggregations().get("qc_error_type");
        Map<String, Object> total = new HashMap<>();
        double totalNum = 0.0;
        for(Terms.Bucket item: longTerms.getBuckets()){
            Map<String, Object> temp = new HashMap<>();
            temp.put("error_type", item.getKeyAsString());
            long docCount = item.getDocCount();
            temp.put("error_count",docCount );
            totalNum += docCount;
            resultList.add(temp);
        }
        total.put("error_type","total");
        total.put("error_count",totalNum);
        resultList.add(0,total);
        return resultList;
    }
    /**
     * 获取数据元异常
     * @param startDate
     * @param endDate
     * @param orgCode
     * @return
     * @throws Exception
     */
    public Envelop metadataError(String step, String startDate, String endDate, String orgCode) throws Exception {
        Envelop envelop = new Envelop();
        List<Map<String, Object>> resultList = getMetadaErrorList(step, startDate, endDate, orgCode);
        envelop.setSuccessFlg(true);
        envelop.setDetailModelList(resultList);
        return envelop;
    }
    /**
     * 获取数据元异常
     * @param startDate
     * @param endDate
     * @param orgCode
     * @return
     * @throws Exception
     */
    public Envelop metadataError(String step, String startDate, String endDate, String orgCode,int size,int page) throws Exception {
        List<Map<String, Object>> resultList = getMetadaErrorList(step, startDate, endDate, orgCode);
        return getPageEnvelop(page, size, resultList);
    }
    /**
     * 解析失败问题查询
     * @param filters
     * @param sorts
     * @param page
     * @param size
     * @return
     * @throws Exception
     */
    public Page<Map<String, Object>> analyzeErrorList(String filters, String sorts, int page, int size) throws Exception {
        return elasticSearchUtil.page("json_archives","info", filters, sorts, page, size);
    }
    /**
     * 异常数据元查询
     * @param filters
     * @param sorts
     * @param page
     * @param size
     * @return
     * @throws Exception
     */
    public Page<Map<String, Object>> metadataErrorList(String filters, String sorts, int page, int size) throws Exception {
        Page<Map<String, Object>> result = elasticSearchUtil.page("json_archives_qc","qc_metadata_info", filters, sorts, page, size);
        for (Map<String, Object> map : result){
            if(map.get("org_name") == null && map.get("org_code") != null){
                map.put("org_name",orgKeySchema.get(map.get("org_code")+""));
            }
            map.put("dataset_name", redisClient.get("std_data_set_" + map.get("version") + ":" + map.get("dataset") + ":name"));
            map.put("metadata_name", redisClient.get("std_meta_data_" + map.get("version") + ":" + map.get("dataset")+"."+ map.get("metadata")+ ":name"));
        }
        return result;
    }
    public List<Map<String, Object>> getOrgs(){
        return jdbcTemplate.queryForList("SELECT org_code,full_name from organizations");
    }
    public String getOrgName(List<Map<String, Object>> orgs, String orgCode){
        String orgName = "";
        for(Map<String, Object> map : orgs){
            if(orgCode.equals(map.get("ORG_CODE"))){
                orgName = ObjectUtils.toString(map.get("FULL_NAME"));
                break;
            }
        }
        return orgName;
    }
    /**
     * 获取异常详情
     * @param id
     * @return
     * @throws Exception
     */
    public Envelop metadataErrorDetail(String id) throws Exception {
        Envelop envelop = new Envelop();
        Map<String, Object> res = new HashMap<>();
        Map<String, Object> metedata = elasticSearchUtil.findById("json_archives_qc","qc_metadata_info",id);
        if(metedata.get("org_name") == null && metedata.get("org_code") != null){
            metedata.put("org_name",orgKeySchema.get(metedata.get("org_code")+""));
        }
        //查找包密码
        Map<String, Object> jsonArchives = elasticSearchUtil.findById("json_archives","info",metedata.get("pack_id")+"");
        if(jsonArchives != null){
            metedata.put("pwd",jsonArchives.get("pwd"));
        }
        if("2".equals(metedata.get("qc_step")+"")){//资源化
            String sql = "SELECT * FROM rs_adapter_scheme WHERE adapter_version='"+metedata.get("version")+"'";
            List<Map<String, Object>> schemeList = jdbcTemplate.queryForList(sql);
            if(schemeList!=null&&schemeList.size()>0){
                metedata.put("scheme", schemeList.get(0).get("NAME"));
            }
            //增加资源化信息
            Map<String,Object> resourceInfo = new HashMap<>();
            String version = metedata.get("version")+"";
            String datasetCode = metedata.get("dataset")+"";
            String metadaCode = metedata.get("metadata")+"";
            resourceInfo.put("originDatasetCode",datasetCode);
            String originDatasetName = redisClient.get("std_data_set_" + metedata.get("version") + ":" + metedata.get("dataset") + ":name");
            resourceInfo.put("originDatasetName",originDatasetName);
            resourceInfo.put("originMetadataCode",metadaCode);
            String originMetadataName = redisClient.get("std_meta_data_" + metedata.get("version") + ":" + metedata.get("dataset")+"."+ metedata.get("metadata")+ ":name");
            resourceInfo.put("originMetadataName",originMetadataName);
            //获取资源化的名称编码
            String targetMetadataCode = rsAdapterMetaKeySchema.getMetaData(version, datasetCode, metadaCode);
            if(StringUtils.isNotBlank(targetMetadataCode)){
                resourceInfo.put("targetMetadataCode",targetMetadataCode);
                String querySql = "SELECT * FROM rs_metadata WHERE id='"+targetMetadataCode+"'";
                List<Map<String, Object>> resourMetadata = jdbcTemplate.queryForList(querySql);
                if(!CollectionUtils.isEmpty(resourMetadata)){
                    resourceInfo.put("targetMetadataName",resourMetadata.get(0).get("name"));
                    Object dict_id = resourMetadata.get(0).get("dict_id");
                    if(dict_id != null && !dict_id.toString().equals("0")){
                        resourceInfo.put("targetDataType","编码");
                    } else {
                        resourceInfo.put("targetDataType","值");
                    }
                }
            }
            String dictId = redisClient.get(String.format("%s:%s:%s", "std_meta_data_"+version, datasetCode+"."+metadaCode, "dict_id"));
            if(StringUtils.isNotBlank(dictId)){
                resourceInfo.put("originDataType","编码");
            } else {
                resourceInfo.put("originDataType","值");
            }
            resourceInfo.put("originValue",metedata.get("value"));
            res.put("resourceInfo",resourceInfo);
        }else if("3".equals(metedata.get("qc_step")+"")){//上传省平台
            //获取适配版本名称
            String adapterName = redisClient.get(metedata.get("adapter_version") + "");
            metedata.put("adapterName", adapterName);
            //增加上传信息
            Map<String,Object> uploadInfo = new HashMap<>();
        }
        metedata.put("dataset_name", redisClient.get("std_data_set_" + metedata.get("version") + ":" + metedata.get("dataset") + ":name"));
        metedata.put("metadata_name", redisClient.get("std_meta_data_" + metedata.get("version") + ":" + metedata.get("dataset")+"."+ metedata.get("metadata")+ ":name"));
        String relationId = metedata.get("org_code")+"_"+metedata.get("event_no")+"_"+ DateUtil.strToDate(metedata.get("event_date")+"").getTime();
        res.put("metedata",metedata);
        Map<String, Object> relation = elasticSearchUtil.findById("archive_relation", "info", relationId);
        if(relation == null){
            relation = new HashMap<>();
        }
        res.put("relation",relation);
        envelop.setObj(res);
        envelop.setSuccessFlg(true);
        return envelop;
    }
    /**
     * 档案包列表
     * @param filters
     * @param sorts
     * @param page
     * @param size
     * @return
     * @throws Exception
     */
    public Page<Map<String, Object>> archiveList(String filters, String sorts, int page, int size) throws Exception {
        long starttime = System.currentTimeMillis();
        Page<Map<String, Object>> result = elasticSearchUtil.page("json_archives","info", filters, sorts, page, size);
        logger.info("查询耗时:" + (System.currentTimeMillis() - starttime) + "ms");
        return result;
    }
    /**
     * 档案包详情
     * @param id
     * @return
     * @throws Exception
     */
    public Envelop archiveDetail(String id) throws Exception {
        Envelop envelop = new Envelop();
        Map<String, Object> res = new HashMap<>();
        Map<String, Object> archive = elasticSearchUtil.findById("json_archives","info",id);
        res.put("archive",archive);
        Map<String, Object> relation = elasticSearchUtil.findById("archive_relation", "info", archive.get("profile_id") + "");
        if(relation == null){
            relation = new HashMap<>();
        }
        res.put("relation",relation);
        envelop.setObj(res);
        envelop.setSuccessFlg(true);
        return envelop;
    }
    /**
     * 上传列表
     * @param filters
     * @param sorts
     * @param page
     * @param size
     * @return
     * @throws Exception
     */
    public Page<Map<String, Object>> uploadRecordList(String filters, String sorts, int page, int size) throws Exception {
        return elasticSearchUtil.page("upload", "record", filters, sorts, page, size);
    }
    /**
     * 上传详情
     * @param id
     * @return
     * @throws Exception
     */
    public Envelop uploadRecordDetail(String id) throws Exception {
        Envelop envelop = new Envelop();
        Map<String,Object> res = new HashMap<>();
        Map<String, Object> uploadRecord = elasticSearchUtil.findById("upload","record",id);
        List<Map<String, Object>> datasets = new ArrayList<>();
        if(uploadRecord.get("missing")!=null) {
            List<String> missing = (List<String>)uploadRecord.get("missing");
            for (String code : missing) {
                Map<String, Object> dataset = new HashMap<>();
                dataset.put("code", code);
                dataset.put("name", redisClient.get("std_data_set_" + uploadRecord.get("version") + ":" + code + ":name"));
                dataset.put("status", "未上传");
                datasets.add(dataset);
            }
        }
        if(uploadRecord.get("datasets")!=null) {
            List<Map<String,Object>> details = objectMapper.readValue(uploadRecord.get("datasets").toString(), List.class);
            for (Map<String, Object> dataSet : details) {
                for (Map.Entry<String, Object> entry : dataSet.entrySet()) {
                    Map<String, Object> dataset = new HashMap<>();
                    dataset.put("code", entry.getKey());
                    dataset.put("name", redisClient.get("std_data_set_" + uploadRecord.get("version") + ":" + entry.getKey() + ":name"));
                    dataset.put("status", "已上传");
                    datasets.add(dataset);
                }
            }
        }
        res.put("uploadRecord", uploadRecord);
        res.put("datasets", datasets);
        envelop.setSuccessFlg(true);
        envelop.setObj(res);
        return envelop;
    }
    /**
     * 通过solr查询某一段日期,各类型就诊数据
     * 返回list<Map<String,>>
     * @param startDate
     * @param endDate
     * @param orgCode
     */
    public List<MProfileInfo> getProfileInfo(String startDate, String endDate, String orgCode) throws Exception {
        String q = "event_date:["+startDate+"T00:00:00Z TO "+ endDate +"T23:59:59Z]";
        if(StringUtils.isNotBlank(orgCode)){
            q += " AND org_code="+orgCode;
        }
        //查找门诊
        List<RangeFacet> outRangeFacet = solrUtil.getFacetDateRange("HealthProfile", "event_date", startDate+"T23:59:59Z", endDate+"T23:59:59Z", "+1DAY", "event_type:0",q);
        RangeFacet outPatientRangeFacet = outRangeFacet.get(0);
        List<RangeFacet.Count> outCounts = outPatientRangeFacet.getCounts();
        //查找住院
        List<RangeFacet> inRangeFacet = solrUtil.getFacetDateRange("HealthProfile", "event_date", startDate+"T23:59:59Z", endDate+"T23:59:59Z", "+1DAY", "event_type:1",q);
        RangeFacet inpatientRangeFacet = inRangeFacet.get(0);
        List<RangeFacet.Count> inCounts = inpatientRangeFacet.getCounts();
        //查找体检
        List<RangeFacet> examRangeFacet = solrUtil.getFacetDateRange("HealthProfile", "event_date", startDate+"T23:59:59Z", endDate+"T23:59:59Z", "+1DAY", "event_type:2",q);
        RangeFacet healRangeFacet = examRangeFacet.get(0);
        List<RangeFacet.Count> healCounts = healRangeFacet.getCounts();
        //前端需要list
//        Map<String, Map<String,Integer>> dataMap = new LinkedHashMap<>(outRangeFacet.size());
//        for (int i=0;i<inCounts.size();i++) {
//            Map<String, Integer> map = new LinkedHashMap<>();
//            map.put("out",outCounts.get(i).getCount());
//            map.put("inp",inCounts.get(i).getCount());
//            map.put("heal",healCounts.get(i).getCount());
//            String date = outCounts.get(i).getValue().substring(0, 10);//日期
//            dataMap.put(date, map);
//        }
//        return dataMap;
        int healExamTotal = 0;
        int outTotal = 0;
        int inTotal = 0;
        int hosTotal = 0;
        List<MProfileInfo> list = new ArrayList();
        for (int i=0;i<inCounts.size();i++) {
            MProfileInfo profileInfo = new MProfileInfo();
            profileInfo.setData(outCounts.get(i).getValue().substring(0, 10));
            int healCount = healCounts.get(i).getCount();
            healExamTotal += healCount;
            profileInfo.setHealExam(healCount);
            int inCount = inCounts.get(i).getCount();
            inTotal += inCount;
            profileInfo.setInpatient(inCount);
            int outCount = outCounts.get(i).getCount();
            outTotal += outCount;
            profileInfo.setOutpatient(outCount);
            int total = healCount + inCount + outCount;
            profileInfo.setTotal(total);
            hosTotal += total;
            list.add(profileInfo);
        }
        MProfileInfo profileInfo = new MProfileInfo();
        profileInfo.setData("总计");
        profileInfo.setHealExam(healExamTotal);
        profileInfo.setInpatient(inTotal);
        profileInfo.setOutpatient(outTotal);
        profileInfo.setTotal(hosTotal);
        list.add(0,profileInfo);
        return list;
    }
}

+ 988 - 0
src/main/java/com/yihu/ehr/analyze/service/pack/PackStatisticsService.java

@ -0,0 +1,988 @@
package com.yihu.ehr.analyze.service.pack;
import com.yihu.ehr.elasticsearch.ElasticSearchPool;
import com.yihu.ehr.elasticsearch.ElasticSearchUtil;
import com.yihu.ehr.query.BaseJpaService;
import com.yihu.ehr.util.datetime.DateUtil;
import com.yihu.ehr.util.rest.Envelop;
import org.apache.commons.lang.ObjectUtils;
import org.apache.commons.lang.StringUtils;
import org.elasticsearch.action.search.SearchRequestBuilder;
import org.elasticsearch.action.search.SearchResponse;
import org.elasticsearch.action.search.SearchType;
import org.elasticsearch.client.transport.TransportClient;
import org.elasticsearch.search.aggregations.AggregationBuilder;
import org.elasticsearch.search.aggregations.AggregationBuilders;
import org.elasticsearch.search.aggregations.bucket.histogram.DateHistogramBuilder;
import org.elasticsearch.search.aggregations.bucket.histogram.DateHistogramInterval;
import org.elasticsearch.search.aggregations.bucket.histogram.Histogram;
import org.elasticsearch.search.aggregations.bucket.terms.LongTerms;
import org.elasticsearch.search.aggregations.bucket.terms.StringTerms;
import org.elasticsearch.search.aggregations.metrics.valuecount.InternalValueCount;
import org.elasticsearch.search.aggregations.metrics.valuecount.ValueCountBuilder;
import org.hibernate.Session;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.jdbc.core.JdbcTemplate;
import org.springframework.stereotype.Service;
import java.sql.ResultSet;
import java.util.*;
/**
 * 质控报表
 *
 * @author zhengwei
 * @created 2018.04.24
 */
@Service
public class PackStatisticsService extends BaseJpaService {
    @Autowired
    private ElasticSearchUtil elasticSearchUtil;
    @Autowired
    private ElasticSearchPool elasticSearchPool;
    @Autowired
    private JdbcTemplate jdbcTemplate;
    /**
     * getRecieveOrgCount 根据接收日期统计各个医院的数据解析情况
     *
     * @param dateStr
     * @return
     */
    public List<Map<String, Object>> getRecieveOrgCount(String dateStr) throws Exception{
        long starttime = System.currentTimeMillis();
        Session session = currentSession();
        String sql1 = "SELECT org_code, COUNT(org_code) FROM json_archives WHERE " +
                " (archive_status = 0 OR archive_status = 1) AND (analyze_status = 0 OR analyze_status = 1) " +
                " AND receive_date BETWEEN '" + dateStr + " 00:00:00' AND '" +  dateStr + " 23:59:59' GROUP BY org_code";
        ResultSet resultSet1 = elasticSearchUtil.findBySql(sql1);
        Map<String, Object[]> dataMap1 = new HashMap<>();
        try {
            while (resultSet1.next()) {
                Object[] tempArr = new Object[3];
                String code = ObjectUtils.toString(resultSet1.getObject("org_code"));
                String name = (String) session.createSQLQuery("SELECT full_name FROM organizations WHERE org_code ='" + code + "'").uniqueResult();
                double count = resultSet1.getDouble("COUNT(org_code)");
                tempArr[0] = code;
                tempArr[1] = name;
                tempArr[2] = count;
                dataMap1.put(code, tempArr);
            }
        }catch (Exception e){
            dataMap1 = new HashMap<>();
        }
        String sql2 = "SELECT org_code, COUNT(org_code) FROM json_archives WHERE archive_status = 3 AND " +
                " receive_date BETWEEN '" + dateStr + " 00:00:00' AND '" +  dateStr + " 23:59:59' GROUP BY org_code";
        ResultSet resultSet2 = elasticSearchUtil.findBySql(sql2);
        Map<String, Object[]> dataMap2 = new HashMap<>();
        try {
            while (resultSet2.next()) {
                Object[] tempArr = new Object[3];
                String code = ObjectUtils.toString(resultSet2.getObject(0));
                String name = (String) session.createSQLQuery("SELECT full_name FROM organizations WHERE org_code ='" + code + "'").uniqueResult();
                double count = resultSet2.getDouble("COUNT(org_code)");
                tempArr[0] = code;
                tempArr[1] = name;
                tempArr[2] = count;
                dataMap2.put(code, tempArr);
            }
        }catch (Exception e){
            dataMap2 = new HashMap<>();
        }
        String sql3 = "SELECT org_code, COUNT(org_code) FROM json_archives WHERE receive_date " +
                "   BETWEEN '" + dateStr + " 00:00:00' AND '" +  dateStr + " 23:59:59' GROUP BY org_code";
        ResultSet resultSet3 = elasticSearchUtil.findBySql(sql3);
        Map<String, Object[]> dataMap3 = new HashMap<>();
        try {
            while (resultSet3.next()) {
                Object[] tempArr = new Object[3];
                String code = ObjectUtils.toString(resultSet3.getObject(0));
                String name = (String) session.createSQLQuery("SELECT full_name FROM organizations WHERE org_code ='" + code + "'").uniqueResult();
                double count = resultSet3.getDouble("COUNT(org_code)");
                tempArr[0] = code;
                tempArr[1] = name;
                tempArr[2] = count;
                dataMap3.put(code, tempArr);
            }
        }catch (Exception e){
            dataMap3 = new HashMap<>();
        }
        List<Map<String,Object>> dataList = new ArrayList<>();
        if(dataMap3!=null) {
            for (String key : dataMap3.keySet()) {
                Map<String, Object> dataArr = new HashMap<>();
                dataArr.put("org_code", dataMap3.get(key)[0]);
                dataArr.put("org_name", dataMap3.get(key)[1]);
                if (dataMap1!=null && dataMap1.containsKey(key)) { //待解析
                    dataArr.put("waiting", dataMap1.get(key)[2]);
                } else {
                    dataArr.put("waiting", 0);
                }
                if (dataMap2!=null && dataMap2.containsKey(key)) { //成功解析
                    dataArr.put("successful", dataMap2.get(key)[2]);
                } else {
                    dataArr.put("successful", 0);
                }
                dataArr.put("total", dataMap3.get(key)[2]);
                if ((double) dataMap3.get(key)[2] != 0) { //成功率
                    dataArr.put("rate", Double.parseDouble(dataArr.get("successful").toString()) / (double) dataMap3.get(key)[2] * 100);
                } else {
                    dataArr.put("rate", 0);
                }
                dataList.add(dataArr);
            }
        }
        long endtime = System.currentTimeMillis();
        System.out.println("各个医院的数据解析情况查询耗时:" + (endtime - starttime) + "ms");
        return dataList;
    }
    /**
     * 获取一段时间内数据解析情况
     * @param dateStr
     * @return
     */
    public List<Map<String,Object>> getArchivesCount(String dateStr, String orgCode) throws Exception {
        String sql1 = "";
        String sql2 = "";
        String sql3 = "";
        if(StringUtils.isNotEmpty(orgCode)){
            sql1 = "SELECT COUNT(*) FROM json_archives WHERE (archive_status = 0 OR archive_status = 1) AND " +
                    " (analyze_status = 0 OR analyze_status = 1) AND org_code='"+orgCode+"'" +
                    " AND receive_date BETWEEN '" + dateStr + " 00:00:00' AND '" +  dateStr + " 23:59:59'";
            sql2 = "SELECT COUNT(*) FROM json_archives WHERE archive_status = 3 AND org_code='"+orgCode+"'" +
                    " AND receive_date BETWEEN '" + dateStr + " 00:00:00' AND '" +  dateStr + " 23:59:59'";
            sql3 = "SELECT COUNT(*) FROM json_archives WHERE  org_code='"+orgCode+"'" +
                    " AND receive_date BETWEEN '" + dateStr + " 00:00:00' AND '" +  dateStr + " 23:59:59'";
        }else{
            sql1 = "SELECT COUNT(*) FROM json_archives WHERE (archive_status = 0 OR archive_status = 1) " +
                    " AND (analyze_status = 0 OR analyze_status = 1) AND receive_date BETWEEN '" + dateStr + " 00:00:00' " +
                    " AND '" +  dateStr + " 23:59:59'";
            sql2 = "SELECT COUNT(*) FROM json_archives WHERE archive_status = 3 " +
                    " AND receive_date BETWEEN '" + dateStr + " 00:00:00' AND '" +  dateStr + " 23:59:59'";
            sql3 = "SELECT COUNT(*) FROM json_archives WHERE " +
                    " receive_date BETWEEN '" + dateStr + " 00:00:00' AND '" +  dateStr + " 23:59:59'";
        }
        List<Map<String,Object>> dataList = new ArrayList<Map<String,Object>>();
        ResultSet resultSet1 = elasticSearchUtil.findBySql(sql1);
        ResultSet resultSet2 = elasticSearchUtil.findBySql(sql2);
        ResultSet resultSet3 = elasticSearchUtil.findBySql(sql3);
        resultSet1.next();
        resultSet2.next();
        resultSet3.next();
        Map<String,Object> map = new HashMap<String,Object>();
        map.put("waiting",resultSet1.getObject("COUNT(*)"));
        map.put("successful",resultSet2.getObject("COUNT(*)"));
        map.put("total",resultSet3.getObject("COUNT(*)"));
        dataList.add(map);
        return dataList;
    }
    /**
     * 业务分析
     * @param date
     * @return
     */
    public List<Map<String,Object>> getArchivesInc(String date,String orgCode) throws Exception{
        long starttime = System.currentTimeMillis();
        List<Map<String, Object>> list = getIncCount(date, orgCode);
        List<Map<String, Object>> res = new ArrayList<>();
        System.out.println("业务分析日期:"+list.size());
        if(!list.isEmpty()){
            for(Map<String, Object> map : list){
                if(map.get("ed")!=null) {
                    map.putAll(getPatientCount(map.get("ed").toString(), orgCode));
                    map.putAll(getPatientCountInc(date, map.get("ed").toString(), orgCode));
                    res.add(map);
                }
            }
        }
        long endtime = System.currentTimeMillis();
        System.out.println("业务分析查询耗时:" + (endtime - starttime) + "ms");
        return res;
    }
    public List<Map<String, Object>> getIncCount(String date, String orgCode) throws Exception{
        long starttime = System.currentTimeMillis();
        StringBuilder stringBuilder = new StringBuilder();
        stringBuilder.append("receive_date>=" + date + " 00:00:00;");
        stringBuilder.append("receive_date<" + date + " 23:59:59;");
        if (StringUtils.isNotEmpty(orgCode) && !"null".equals(orgCode)){
            stringBuilder.append("org_code=" + orgCode);
        }
        TransportClient transportClient = elasticSearchPool.getClient();
        List<Map<String, Object>> resultList = new ArrayList<>();
        SearchRequestBuilder builder = transportClient.prepareSearch("json_archives");
        builder.setTypes("info");
        builder.setSearchType(SearchType.DFS_QUERY_THEN_FETCH);
        builder.setQuery(elasticSearchUtil.getQueryBuilder(stringBuilder.toString()));
        DateHistogramBuilder dateHistogramBuilder = new DateHistogramBuilder("date");
        dateHistogramBuilder.field("event_date");
        dateHistogramBuilder.interval(DateHistogramInterval.DAY);
        dateHistogramBuilder.format("yyyy-MM-dd");
        dateHistogramBuilder.minDocCount(0);
        builder.addAggregation(dateHistogramBuilder);
        builder.setSize(0);
        builder.setExplain(true);
        SearchResponse response = builder.get();
        Histogram histogram = response.getAggregations().get("date");
        histogram.getBuckets().forEach(item -> {
            Map<String, Object> temp = new HashMap<>();
            if(item.getDocCount()>0&&!"".equals(item.getKeyAsString())) {
                temp.put("ed", item.getKeyAsString());
            }
            resultList.add(temp);
        });
        long endtime = System.currentTimeMillis();
        System.out.println("业务分析获取数据查询耗时:" + (endtime - starttime) + "ms");
        return resultList;
    }
    /**
     * 完整性分析
     * @param startDate
     * @param endDate
     * @param orgCode
     * @return
     */
    public Envelop getArchivesFull(String startDate, String endDate, String orgCode) throws Exception{
        long starttime = System.currentTimeMillis();
        Envelop envelop = new Envelop();
        Date start = DateUtil.formatCharDateYMD(startDate);
        Date end = DateUtil.formatCharDateYMD(endDate);
        int day = (int) ((end.getTime() - start.getTime()) / (1000*3600*24))+1;
        Map<String,Object> resMap = new HashMap<String,Object>();
        List<Map<String,Map<String,Object>>> res = new ArrayList<Map<String,Map<String,Object>>>();
        int total=0;
        int inpatient_total=0;
        int oupatient_total=0;
        int total_es=0;
        int inpatient_total_es=0;
        int oupatient_total_es=0;
        int total_sc=0;
        int inpatient_total_sc=0;
        int oupatient_total_sc=0;
        for(int i =0;i<day;i++){
            Date date = DateUtil.addDate(i,start);
            Map<String,Map<String,Object>> map = new HashMap<String,Map<String,Object>>();
            Map<String,Object> rate = new HashMap<String,Object>();
            //平台数据
            Map<String,Object> map1 = getPatientCount(DateUtil.toString(date), orgCode);
            total+=Integer.parseInt(map1.get("total").toString());
            inpatient_total+=Integer.parseInt(map1.get("inpatient_total").toString());
            oupatient_total+=Integer.parseInt(map1.get("oupatient_total").toString());
            //医院数据
            Map<String,Object> map2 = getPatientCountEs(DateUtil.toString(date), orgCode);
            total_es+=Integer.parseInt(map2.get("total").toString());
            inpatient_total_es+=Integer.parseInt(map2.get("inpatient_total").toString());
            oupatient_total_es+=Integer.parseInt(map2.get("oupatient_total").toString());
            //上传数据
            Map<String,Object> map3 = getPatientNum(DateUtil.toString(date), orgCode);
            total_sc+= Integer.parseInt(map3.get("total").toString());
            inpatient_total_sc+= Integer.parseInt(map3.get("inpatient_total").toString());
            oupatient_total_sc+= Integer.parseInt(map3.get("oupatient_total").toString());
            //平台与医院
            if(Integer.parseInt(map2.get("total").toString())!=0){
                rate.put("total_rate", (Double.parseDouble(map1.get("total").toString()) / Double.parseDouble(map2.get("total").toString()))*100);
            }else{
                rate.put("total_rate", "0");
            }
            if(Integer.parseInt(map2.get("inpatient_total").toString())!=0){
                rate.put("inpatient_rate", (Double.parseDouble(map1.get("inpatient_total").toString()) / Double.parseDouble(map2.get("inpatient_total").toString()))*100);
            }else{
                rate.put("inpatient_rate", "0");
            }
            if(Integer.parseInt(map2.get("oupatient_total").toString())!=0){
                rate.put("oupatient_rate", (Double.parseDouble(map1.get("oupatient_total").toString()) / Double.parseDouble(map2.get("oupatient_total").toString()))*100);
            }else{
                rate.put("oupatient_rate", "0");
            }
            //平台与上传
            if(Integer.parseInt(map3.get("total").toString())!=0){
                rate.put("total_rate_sc", (Double.parseDouble(map1.get("total").toString()) / Double.parseDouble(map3.get("total").toString()))*100);
            }else{
                rate.put("total_rate_sc", "0");
            }
            if(Integer.parseInt(map3.get("inpatient_total").toString())!=0){
                rate.put("inpatient_rate_sc", (Double.parseDouble(map1.get("inpatient_total").toString()) / Double.parseDouble(map3.get("inpatient_total").toString()))*100);
            }else{
                rate.put("inpatient_rate_sc", "0");
            }
            if(Integer.parseInt(map3.get("oupatient_total").toString())!=0){
                rate.put("oupatient_rate_sc", (Double.parseDouble(map1.get("oupatient_total").toString()) / Double.parseDouble(map3.get("oupatient_total").toString()))*100);
            }else{
                rate.put("oupatient_rate_sc", "0");
            }
            map.put(DateUtil.toString(date),rate);
            res.add(map);
        }
        //平台总数
        resMap.put("total",total);
        resMap.put("inpatient_total",inpatient_total);
        resMap.put("oupatient_total",oupatient_total);
        //医院总数
        resMap.put("total_es",total_es);
        resMap.put("inpatient_total_es",inpatient_total_es);
        resMap.put("oupatient_total_es",oupatient_total_es);
        //上传总数
        resMap.put("total_sc",total_sc);
        resMap.put("inpatient_total_sc",inpatient_total_sc);
        resMap.put("oupatient_total_sc",oupatient_total_sc);
        //平台与医院比例
        if(total_es!=0){
            resMap.put("total_rate", ((double)total / (double)total_es)*100);
        }else{
            resMap.put("total_rate", "0");
        }
        if(inpatient_total_es!=0){
            resMap.put("inpatient_rate", ((double)inpatient_total / (double)inpatient_total_es)*100);
        }else{
            resMap.put("inpatient_rate", "0");
        }
        if(oupatient_total_es!=0){
            resMap.put("oupatient_rate", ((double)oupatient_total / (double)oupatient_total_es)*100);
        }else{
            resMap.put("oupatient_rate", "0");
        }
        //平台与上传比例
        if(total_sc!=0){
            resMap.put("total_rate_sc", ((double)total / (double)total_sc)*100);
        }else{
            resMap.put("total_rate_sc", "0");
        }
        if(inpatient_total_sc!=0){
            resMap.put("inpatient_rate_sc", ((double)inpatient_total / (double)inpatient_total_sc)*100);
        }else{
            resMap.put("inpatient_rate_sc", "0");
        }
        if(oupatient_total_sc!=0){
            resMap.put("oupatient_rate_sc", ((double)oupatient_total / (double)oupatient_total_sc)*100);
        }else{
            resMap.put("oupatient_rate_sc", "0");
        }
        envelop.setObj(resMap);
        envelop.setDetailModelList(res);
        envelop.setSuccessFlg(true);
        long endtime = System.currentTimeMillis();
        System.out.println("完整性查询耗时:" + (endtime - starttime) + "ms");
        return envelop;
    }
    /**
     * 平台就诊人数 新增
     * @param receiveDate
     * @param eventDate
     * @param orgCode
     * @return
     */
    public Map<String,Object> getPatientCountInc(String receiveDate, String eventDate, String orgCode) throws Exception{
        long starttime = System.currentTimeMillis();
        String sql1 ="";
        String sql2 ="";
        if(StringUtils.isNotEmpty(orgCode)){
            sql1 = "SELECT COUNT(DISTINCT event_no) FROM json_archives WHERE event_type=1 AND pack_type=1 AND org_code='"+orgCode+"' AND event_date BETWEEN" +
                    " '" + eventDate + " 00:00:00' AND '" +  eventDate + " 23:59:59' AND receive_date BETWEEN '" + receiveDate + " 00:00:00' AND '" +  receiveDate + " 23:59:59'";
            sql2 = "SELECT COUNT(DISTINCT event_no) FROM json_archives WHERE event_type=0 AND pack_type=1 AND org_code='"+orgCode+"' AND event_date BETWEEN " +
                    "'" + eventDate + " 00:00:00' AND '" +  eventDate + " 23:59:59' AND receive_date BETWEEN '" + receiveDate + " 00:00:00' AND '" +  receiveDate + " 23:59:59'";
        }else{
            sql1 = "SELECT COUNT(DISTINCT event_no) FROM json_archives WHERE event_type=1 AND pack_type=1 AND event_date " +
                    "BETWEEN '" + eventDate + " 00:00:00' AND '" +  eventDate + " 23:59:59' AND receive_date BETWEEN '" + receiveDate + " 00:00:00' AND '" +  receiveDate + " 23:59:59'";
            sql2 = "SELECT COUNT(DISTINCT event_no) FROM json_archives WHERE event_type=0 AND pack_type=1 AND event_date " +
                    "BETWEEN '" + eventDate + " 00:00:00' AND '" +  eventDate + " 23:59:59' AND receive_date BETWEEN '" + receiveDate + " 00:00:00' AND '" +  receiveDate + " 23:59:59'";
        }
        ResultSet resultSet1 = elasticSearchUtil.findBySql(sql1);
        ResultSet resultSet2 = elasticSearchUtil.findBySql(sql2);
        resultSet1.next();
        resultSet2.next();
        Map<String,Object> map = new HashMap<>();
        map.put("inpatient_inc",new Double(resultSet1.getObject("COUNT(DISTINCT event_no)").toString()).intValue());
        map.put("oupatient_inc",new Double(resultSet2.getObject("COUNT(DISTINCT event_no)").toString()).intValue());
        long endtime = System.currentTimeMillis();
        System.out.println("平台就诊人数 新增:" + (endtime - starttime) + "ms");
        return map;
    }
    /**
     * 平台就诊人数 去重复
     * @param dateStr
     * @param orgCode
     * @return
     */
    public Map<String,Object> getPatientCount(String dateStr, String orgCode) throws Exception{
        long starttime = System.currentTimeMillis();
        String sql1 ="";
        String sql2 ="";
        String sql3 ="";
        if(StringUtils.isNotEmpty(orgCode)){
            sql1 = "SELECT COUNT(DISTINCT event_no) FROM json_archives WHERE event_type=1 AND pack_type=1 AND org_code='"+orgCode+"' AND event_date BETWEEN" +
                    " '" + dateStr + " 00:00:00' AND '" +  dateStr + " 23:59:59'";
            sql2 = "SELECT COUNT(DISTINCT event_no) FROM json_archives WHERE event_type=0 AND pack_type=1 AND org_code='"+orgCode+"' AND event_date BETWEEN " +
                    "'" + dateStr + " 00:00:00' AND '" +  dateStr + " 23:59:59'";
            sql3 = "SELECT COUNT(DISTINCT event_no) FROM json_archives WHERE pack_type=1 AND org_code='"+orgCode+"' AND event_date BETWEEN " +
                    "'" + dateStr + " 00:00:00' AND '" +  dateStr + " 23:59:59'";
        }else{
            sql1 = "SELECT COUNT(DISTINCT event_no) FROM json_archives WHERE event_type=1 AND pack_type=1 AND event_date " +
                    "BETWEEN '" + dateStr + " 00:00:00' AND '" +  dateStr + " 23:59:59'";
            sql2 = "SELECT COUNT(DISTINCT event_no) FROM json_archives WHERE event_type=0 AND pack_type=1 AND event_date " +
                    "BETWEEN '" + dateStr + " 00:00:00' AND '" +  dateStr + " 23:59:59'";
            sql3 = "SELECT COUNT(DISTINCT event_no) FROM json_archives WHERE pack_type=1 AND event_date " +
                    "BETWEEN '" + dateStr + " 00:00:00' AND '" +  dateStr + " 23:59:59'";
        }
        ResultSet resultSet1 = elasticSearchUtil.findBySql(sql1);
        ResultSet resultSet2 = elasticSearchUtil.findBySql(sql2);
        ResultSet resultSet3 = elasticSearchUtil.findBySql(sql3);
        resultSet1.next();
        resultSet2.next();
        resultSet3.next();
        Map<String,Object> map = new HashMap<>();
        map.put("inpatient_total",new Double(resultSet1.getObject("COUNT(DISTINCT event_no)").toString()).intValue());
        map.put("oupatient_total",new Double(resultSet2.getObject("COUNT(DISTINCT event_no)").toString()).intValue());
        map.put("total",new Double(resultSet3.getObject("COUNT(DISTINCT event_no)").toString()).intValue());
        long endtime = System.currentTimeMillis();
        System.out.println("平台就诊人数 去重复:" + (endtime - starttime) + "ms");
        return map;
    }
    /**
     * 平台就诊人数
     * @param dateStr
     * @param orgCode
     * @return
     */
    public Map<String,Object> getPatientNum(String dateStr, String orgCode) throws Exception{
        long starttime = System.currentTimeMillis();
        String sql1 ="";
        String sql2 ="";
        String sql3 ="";
        if(StringUtils.isNotEmpty(orgCode)){
            sql1 = "SELECT COUNT(*) FROM json_archives WHERE event_type=1 AND pack_type=1  AND org_code='"+orgCode+"' AND event_date " +
                    "BETWEEN '" + dateStr + " 00:00:00' AND '" +  dateStr + " 23:59:59'";
            sql2 = "SELECT COUNT(*) FROM json_archives WHERE event_type=0 AND pack_type=1 AND org_code='"+orgCode+"' AND event_date " +
                    "BETWEEN '" + dateStr + " 00:00:00' AND '" +  dateStr + " 23:59:59'";
            sql3 = "SELECT COUNT(*) FROM json_archives WHERE pack_type=1 AND org_code='"+orgCode+"' AND event_date " +
                    "BETWEEN '" + dateStr + " 00:00:00' AND '" +  dateStr + " 23:59:59'";
        }else{
            sql1 = "SELECT COUNT(*) FROM json_archives WHERE event_type=1 AND pack_type=1 AND event_date " +
                    "BETWEEN '" + dateStr + " 00:00:00' AND '" +  dateStr + " 23:59:59'";
            sql2 = "SELECT COUNT(*) FROM json_archives WHERE event_type=0 AND pack_type=1 AND event_date " +
                    "BETWEEN '" + dateStr + " 00:00:00' AND '" +  dateStr + " 23:59:59'";
            sql3 = "SELECT COUNT(*) FROM json_archives WHERE pack_type=1 AND event_date " +
                    "BETWEEN '" + dateStr + " 00:00:00' AND '" +  dateStr + " 23:59:59'";
        }
        ResultSet resultSet1 = elasticSearchUtil.findBySql(sql1);
        ResultSet resultSet2 = elasticSearchUtil.findBySql(sql2);
        ResultSet resultSet3 = elasticSearchUtil.findBySql(sql3);
        resultSet1.next();
        resultSet2.next();
        resultSet3.next();
        Map<String,Object> map = new HashMap<String,Object>();
        map.put("inpatient_total",new Double(resultSet1.getObject("COUNT(*)").toString()).intValue());
        map.put("oupatient_total", new Double(resultSet2.getObject("COUNT(*)").toString()).intValue());
        map.put("total",new Double(resultSet3.getObject("COUNT(*)").toString()).intValue());
        long endtime = System.currentTimeMillis();
        System.out.println("平台就诊人数查询耗时:" + (endtime - starttime) + "ms");
        return map;
    }
    /**
     * 从质控包获取数据
     * @param date
     * @param orgCode
     * @return
     */
    public Map<String,Object> getPatientCountEs(String date, String orgCode) {
        long starttime = System.currentTimeMillis();
        Map<String,Object> resMap = new HashMap<String,Object>();
        int total=0;
        int inpatient_total=0;
        int oupatient_total=0;
        try {
            StringBuilder stringBuilder = new StringBuilder();
            stringBuilder.append("event_date=" + date + ";");
            if (StringUtils.isNotEmpty(orgCode)) {
                stringBuilder.append("org_code?" + orgCode);
            }
            List<Map<String, Object>> res = elasticSearchUtil.list("qc","daily_report", stringBuilder.toString());
            if(res!=null && res.size()>0){
                for(Map<String,Object> report : res){
                    total+=Integer.parseInt(report.get("HSI07_01_001").toString());
                    inpatient_total+=Integer.parseInt(report.get("HSI07_01_012").toString());
                    oupatient_total+=Integer.parseInt(report.get("HSI07_01_002").toString());
                }
            }
            resMap.put("total",total);
            resMap.put("inpatient_total",inpatient_total);
            resMap.put("oupatient_total",oupatient_total);
            long endtime = System.currentTimeMillis();
            System.out.println("从质控包获取数据查询耗时:" + (endtime - starttime) + "ms");
            return resMap;
        }catch (Exception e){
            e.printStackTrace();
            resMap.put("total",0);
            resMap.put("inpatient_total",0);
            resMap.put("oupatient_total",0);
            return resMap;
        }
    }
    /**
     * 及时性分析
     * @param startDate
     * @param endDate
     * @param orgCode
     * @return
     */
    public Envelop getArchivesTime(String startDate, String endDate, String orgCode) throws Exception{
        long starttime = System.currentTimeMillis();
        Envelop envelop = new Envelop();
        Date start = DateUtil.formatCharDateYMD(startDate);
        Date end = DateUtil.formatCharDateYMD(endDate);
        int day = (int) ((end.getTime() - start.getTime()) / (1000*3600*24))+1;
        Map<String,Object> resMap = new HashMap<String,Object>();
        List<Map<String,Map<String,Object>>> res = new ArrayList<Map<String,Map<String,Object>>>();
        int total=0;
        int inpatient_total=0;
        int oupatient_total=0;
        int total_es=0;
        int inpatient_total_es=0;
        int oupatient_total_es=0;
        for(int i =0;i<day;i++){
            Date date = DateUtil.addDate(i,start);
            Map<String,Map<String,Object>> map = new HashMap<String,Map<String,Object>>();
            Map<String,Object> rate = new HashMap<String,Object>();
            //平台数据
            Map<String,Object> list = getPatientCountTime(DateUtil.toString(date), orgCode);
            total+=Integer.parseInt(list.get("total").toString());
            inpatient_total+=Integer.parseInt(list.get("inpatient_total").toString());
            oupatient_total+=Integer.parseInt(list.get("oupatient_total").toString());
            //医院数据
            Map<String,Object> map2 = getPatientCountEs(DateUtil.toString(date), orgCode);
            total_es+=Integer.parseInt(map2.get("total").toString());
            inpatient_total_es+=Integer.parseInt(map2.get("inpatient_total").toString());
            oupatient_total_es+=Integer.parseInt(map2.get("oupatient_total").toString());
            //平台与医院
            if(Integer.parseInt(map2.get("total").toString())!=0){
                rate.put("total_rate", ((double)(Integer.parseInt(list.get("inpatient_total").toString())+Integer.parseInt(list.get("oupatient_total").toString())) / Double.parseDouble(map2.get("total").toString()))*100);
            }else{
                rate.put("total_rate", "0");
            }
            if(Integer.parseInt(map2.get("inpatient_total").toString())!=0){
                rate.put("inpatient_rate", (Double.parseDouble(list.get("inpatient_total").toString()) / Double.parseDouble(map2.get("inpatient_total").toString()))*100);
            }else{
                rate.put("inpatient_rate", "0");
            }
            if(Integer.parseInt(map2.get("oupatient_total").toString())!=0){
                rate.put("oupatient_rate", (Double.parseDouble(list.get("oupatient_total").toString()) / Double.parseDouble(map2.get("oupatient_total").toString()))*100);
            }else{
                rate.put("oupatient_rate", "0");
            }
            map.put(DateUtil.toString(date),rate);
            res.add(map);
        }
        //平台总数
        resMap.put("total",total);
        resMap.put("inpatient_total",inpatient_total);
        resMap.put("oupatient_total",oupatient_total);
        //医院总数
        resMap.put("total_es",total_es);
        resMap.put("inpatient_total_es",inpatient_total_es);
        resMap.put("oupatient_total_es",oupatient_total_es);
        //平台与医院比例
        if(total_es!=0){
            resMap.put("total_rate", ((double)total / (double)total_es)*100);
        }else{
            resMap.put("total_rate", "0");
        }
        if(inpatient_total_es!=0){
            resMap.put("inpatient_rate", ((double)inpatient_total / (double)inpatient_total_es)*100);
        }else{
            resMap.put("inpatient_rate", "0");
        }
        if(oupatient_total_es!=0){
            resMap.put("oupatient_rate", ((double)oupatient_total / (double)oupatient_total_es)*100);
        }else{
            resMap.put("oupatient_rate", "0");
        }
        envelop.setObj(resMap);
        envelop.setDetailModelList(res);
        envelop.setSuccessFlg(true);
        long endtime = System.currentTimeMillis();
        System.out.println("及时性查询耗时:" + (endtime - starttime) + "ms");
        return envelop;
    }
    /**
     * 及时性获取数据
     * @param date
     * @param orgCode
     * @return
     */
    public Map<String,Object> getPatientCountTime(String date, String orgCode) throws Exception{
        long starttime = System.currentTimeMillis();
        Date begin = DateUtil.parseDate(date, DateUtil.DEFAULT_DATE_YMD_FORMAT);
        Date end1 = DateUtil.addDate(2, begin);
        Date end2 = DateUtil.addDate(7, begin);
        String sql1 ="";
        String sql2 ="";
        String sql3 ="";
        if(StringUtils.isNotEmpty(orgCode)){
            sql1 = "SELECT COUNT(DISTINCT event_no) FROM json_archives WHERE event_type=1 AND pack_type=1 AND org_code='"+orgCode+"' AND event_date BETWEEN" +
                    " '" + date + " 00:00:00' AND '" +  date + " 23:59:59' AND receive_date BETWEEN"+
                    " '" + date + " 00:00:00' AND '" +  DateUtil.toString(end2) + " 23:59:59'";
            sql2 = "SELECT COUNT(DISTINCT event_no) FROM json_archives WHERE event_type=0 AND pack_type=1 AND org_code='"+orgCode+"' AND event_date BETWEEN " +
                    " '" + date + " 00:00:00' AND '" +  date + " 23:59:59' AND receive_date BETWEEN"+
                    " '" + date + " 00:00:00' AND '" +  DateUtil.toString(end1) + " 23:59:59'";
            sql3 = "SELECT COUNT(DISTINCT event_no) FROM json_archives WHERE pack_type=1 AND org_code='"+orgCode+"' AND event_date BETWEEN " +
                    " '" + date + " 00:00:00' AND '" +  date + " 23:59:59' AND receive_date BETWEEN"+
                    " '" + date + " 00:00:00' AND '" +  DateUtil.toString(end2) + " 23:59:59'";
        }else{
            sql1 = "SELECT COUNT(DISTINCT event_no) FROM json_archives WHERE event_type=1 AND pack_type=1 AND event_date BETWEEN" +
                    " '" + date + " 00:00:00' AND '" +  date + " 23:59:59' AND receive_date BETWEEN"+
                    " '" + date + " 00:00:00' AND '" +  DateUtil.toString(end2) + " 23:59:59' ";
            sql2 = "SELECT COUNT(DISTINCT event_no) FROM json_archives WHERE event_type=0 AND pack_type=1 AND event_date BETWEEN " +
                    " '" + date + " 00:00:00' AND '" +  date + " 23:59:59' AND receive_date BETWEEN"+
                    " '" + date + " 00:00:00' AND '" +  DateUtil.toString(end1) + " 23:59:59' ";
            sql3 = "SELECT COUNT(DISTINCT event_no) FROM json_archives WHERE  pack_type=1 AND event_date BETWEEN " +
                    " '" + date + " 00:00:00' AND '" +  date + " 23:59:59' AND receive_date BETWEEN"+
                    " '" + date + " 00:00:00' AND '" +  DateUtil.toString(end2) + " 23:59:59' ";
        }
        ResultSet resultSet1 = elasticSearchUtil.findBySql(sql1);
        ResultSet resultSet2 = elasticSearchUtil.findBySql(sql2);
        ResultSet resultSet3 = elasticSearchUtil.findBySql(sql3);
        Map<String,Object> map = new HashMap<>();
        resultSet1.next();
        resultSet2.next();
        resultSet3.next();
        map.put("inpatient_total",new Double(resultSet1.getObject("COUNT(DISTINCT event_no)").toString()).intValue());
        map.put("oupatient_total",new Double(resultSet2.getObject("COUNT(DISTINCT event_no)").toString()).intValue());
        map.put("total",new Double(resultSet3.getObject("COUNT(DISTINCT event_no)").toString()).intValue());
        long endtime = System.currentTimeMillis();
        System.out.println("及时性数据查询耗时:" + (endtime - starttime) + "ms");
        return map;
    }
    public Envelop getArchivesRight(String startDate, String endDate, String orgCode) {
        Envelop envelop = new Envelop();
        try{
            Date start = DateUtil.formatCharDateYMD(startDate);
            Date end = DateUtil.formatCharDateYMD(endDate);
            int day = (int) ((end.getTime() - start.getTime()) / (1000*3600*24))+1;
            List<Map<String, Object>> dataSetCountList = new ArrayList<Map<String, Object>>();
            Map<String,Object> resMap = new HashMap<String,Object>();
            for(int i =0;i<day;i++){
                Map<String,Object> map = new HashMap<String,Object>();
                Date date = DateUtil.addDate(i,start);
                Map<String,Object> dataSetCount = getErrorMetadata(DateUtil.toString(date), orgCode);
                map.put(DateUtil.toString(date),dataSetCount);
                dataSetCountList.add(map);
            }
            List<Map<String, Object>> errorCodeList = getErrorCode(startDate, endDate, orgCode);
            List<Map<String, Object>> codeList = getCode(startDate, endDate, orgCode);
            resMap.put("dataSet",dataSetCountList);
            resMap.put("errorCode",errorCodeList);
            resMap.put("code",codeList);
            envelop.setObj(resMap);
            envelop.setSuccessFlg(true);
        }catch(Exception e){
            e.printStackTrace();
            envelop.setSuccessFlg(false);
            envelop.setErrorMsg(e.getMessage());
        }
        return envelop;
    }
    /**
     * 获取数据集数量和环比
     * @param date
     * @param orgCode
     * @return
     */
    public Map<String,Object> getErrorMetadata(String date, String orgCode) {
        Map<String,Object> map = new HashMap<String,Object>();
        try{
            Date today = DateUtil.parseDate(date, DateUtil.DEFAULT_DATE_YMD_FORMAT);
            Date yesterday = DateUtil.addDate(-1, today);
            Date end = DateUtil.addDate(1, today);
            List<String> fields = new ArrayList<String>();
            fields.add("count");
            String sql1 ="";
            String sql2 ="";
            if(StringUtils.isNotEmpty(orgCode)){
                sql1 = "select count(metadata) as count from json_archives_qc/qc_metadata_info where org_code='"+orgCode+"'" +
                        " receive_date>='"+date+" 00:00:00' and receive_date<'" + date + " 23:59:59' and (qc_step=1 or qc_step=2)";
                sql2 = "select count(metadata) as count from json_archives_qc/qc_metadata_info where org_code='"+orgCode+"'" +
                        " receive_date>='"+yesterday+" 00:00:00' and receive_date<'" + yesterday + " 23:59:59' and (qc_step=1 or qc_step=2)";
            }else{
                sql1 = "select count(metadata) as count from json_archives_qc/qc_metadata_info where " +
                        " receive_date>='"+date+" 00:00:00' and receive_date<'" + date + " 23:59:59' and (qc_step=1 or qc_step=2)";
                sql2 = "select count(metadata) as count from json_archives_qc/qc_metadata_info where " +
                        " receive_date>='"+DateUtil.toString(yesterday)+" 00:00:00' and receive_date<'" + DateUtil.toString(yesterday) + " 23:59:59' and (qc_step=1 or qc_step=2)";
            }
            double num1=0;
            double num2=0;
            List<Map<String, Object>> list1 = elasticSearchUtil.findBySql(fields,sql1);
            if(list1!=null&&list1.size()>0){
                num1= (double)(list1.get(0).get("count"));
            }
            List<Map<String, Object>> list2 = elasticSearchUtil.findBySql(fields,sql2);
            if(list2!=null&&list2.size()>0){
                num2= (double)(list2.get(0).get("count"));
            }
            map.put("count",num1);
            if(num2!=0){
                map.put("rate",(num1-num2)/num2*100);
            }else{
                map.put("rate",0);
            }
        }catch(Exception e){
            e.printStackTrace();
            map.put("count",0);
            map.put("rate",0);
        }
        return map;
    }
    /**
     * 错误数据按规则分类占比
     * @param startDate
     * @param endDate
     * @param orgCode
     * @return
     */
    public List<Map<String,Object>> getErrorCode(String startDate, String endDate, String orgCode){
        List<Map<String, Object>> list = new ArrayList<Map<String, Object>>();
        Date today = DateUtil.parseDate(endDate, DateUtil.DEFAULT_DATE_YMD_FORMAT);
        Date end = DateUtil.addDate(1, today);
        try {
            List<String> fields = new ArrayList<String>();
            fields.add("qc_error_type");
            fields.add("count");
            String sql = "";
            if (StringUtils.isNotEmpty(orgCode)) {
                sql = "select qc_error_type, count(qc_error_type) as count from json_archives_qc/qc_metadata_info where org_code='"+orgCode+"'" +
                        " receive_date>='"+startDate+" 00:00:00' and receive_date<'" + endDate + " 23:59:59' and (qc_step=1 or qc_step=2) group by qc_error_type";
            } else {
                sql = "select qc_error_type, count(qc_error_type) as count from json_archives_qc/qc_metadata_info where " +
                        " receive_date>='"+startDate+" 00:00:00' and receive_date<'" + endDate + " 23:59:59' and (qc_step=1 or qc_step=2) group by qc_error_type";
            }
            list = elasticSearchUtil.findBySql(fields, sql);
        } catch (Exception e){
            e.printStackTrace();
        }
        return list;
    }
    /**
     * 错误数据按数据元分类占比
     * @param startDate
     * @param endDate
     * @param orgCode
     * @return
     */
    public List<Map<String,Object>> getCode(String startDate, String endDate, String orgCode){
        List<Map<String, Object>> list = new ArrayList<Map<String, Object>>();
        Date today = DateUtil.parseDate(endDate, DateUtil.DEFAULT_DATE_YMD_FORMAT);
        Date end = DateUtil.addDate(1, today);
        try {
            List<String> fields = new ArrayList<String>();
            fields.add("metadata");
            fields.add("count");
            String sql = "";
            if (StringUtils.isNotEmpty(orgCode)) {
                sql = "select metadata , count(metadata) as count from json_archives_qc/qc_metadata_info " +
                        " where receive_date>='"+startDate+" 00:00:00' and receive_date<'" + endDate + " 23:59:59'" +
                        " and (qc_step=1 or qc_step=2) and org_code='"+orgCode+"' group by metadata order by count desc ";
            } else {
                sql = "select metadata , count(metadata) as count from json_archives_qc/qc_metadata_info " +
                        " where receive_date>='"+startDate+" 00:00:00' and receive_date<'" + endDate + " 23:59:59' " +
                        " and (qc_step=1 or qc_step=2) group by metadata order by count desc ";
            }
            list = elasticSearchUtil.findBySql(fields, sql);
            if(list.size()>10){
                list = list.subList(0,10);
            }
        } catch (Exception e){
            e.printStackTrace();
        }
        return list;
    }
    /**
     * app接口
     * @param date
     * @return
     */
    public Envelop getStasticByDay(String date){
        Envelop envelop = new Envelop();
        Map<String,Object> resMap = new HashMap<>();
        Date begin = DateUtil.parseDate(date, DateUtil.DEFAULT_DATE_YMD_FORMAT);
        Date end = DateUtil.addDate(7, begin);
        try {
            String sql1 = "SELECT COUNT(DISTINCT event_no) FROM json_archives WHERE  pack_type=1 AND event_date " +
                    "BETWEEN '" + date + " 00:00:00' AND '" + date + " 23:59:59'";
            String sql2 = "SELECT COUNT(DISTINCT event_no) FROM json_archives WHERE pack_type=1 AND event_date BETWEEN " +
                    " '" + date + " 00:00:00' AND '" +  date + " 23:59:59' AND receive_date BETWEEN"+
                    " '" + date + " 00:00:00' AND '" +  DateUtil.toString(end) + " 23:59:59' ";
            String sql3 = "SELECT COUNT(*) FROM json_archives WHERE pack_type=1 AND" +
                    " receive_date BETWEEN '" + date + " 00:00:00' AND '" +  date + " 23:59:59'";
            String sql4 = "select count(metadata) as count from json_archives_qc/qc_metadata_info where " +
                    " receive_date>='"+date+" 00:00:00' and receive_date<'" + date + " 23:59:59' and (qc_step=1 or qc_step=2)";
            ResultSet resultSet1 = elasticSearchUtil.findBySql(sql1);
            resultSet1.next();
            ResultSet resultSet2 = elasticSearchUtil.findBySql(sql2);
            resultSet2.next();
            ResultSet resultSet3= elasticSearchUtil.findBySql(sql3);
            resultSet3.next();
            ResultSet resultSet4= elasticSearchUtil.findBySql(sql4);
            resultSet4.next();
            Map<String,Object> map = getPatientCountEs(date,"");
            //平台档案数据
            int count_pt = new Double(resultSet1.getObject("COUNT(DISTINCT event_no)").toString()).intValue();
            //质控包上传数据
            int count_zk = Integer.parseInt(map.get("total").toString());
            //及时性数据
            int count_js =  new Double(resultSet2.getObject("COUNT(DISTINCT event_no)").toString()).intValue();
            if(count_zk!=0){
                resMap.put("time_rate", String.format("%.2f", ((double)count_js / (double)count_zk)*100)+"%");
                resMap.put("full_rate", String.format("%.2f", ((double)count_pt / (double)count_zk)*100)+"%");
            }else{
                resMap.put("time_rate", "0.00%");
                resMap.put("full_rate", "0.00%");
            }
            resMap.put("archive_count",resultSet3.getObject("COUNT(*)"));
            resMap.put("error_count",resultSet4.getObject("count"));
            resMap.putAll(getPatientTotal(""));
            envelop.setSuccessFlg(true);
        }catch (Exception e){
            resMap.put("time_rate", "0.00%");
            resMap.put("full_rate", "0.00%");
            envelop.setSuccessFlg(false);
            e.printStackTrace();
        }
        envelop.setObj(resMap);
        return envelop;
    }
    /**
     * 平台就诊人数 总数
     * @param orgCode
     * @return
     */
    public Map<String,Object> getPatientTotal(String orgCode) throws Exception{
        String sql1 ="";
        String sql2 ="";
        String sql3 ="";
        if(StringUtils.isNotEmpty(orgCode)){
            sql1 = "SELECT COUNT(DISTINCT event_no) FROM json_archives WHERE event_type=1 AND pack_type=1 AND org_code='"+orgCode+"'";
            sql2 = "SELECT COUNT(DISTINCT event_no) FROM json_archives WHERE event_type=0 AND pack_type=1 AND org_code='"+orgCode+"'";
            sql3 = "SELECT COUNT(DISTINCT event_no) FROM json_archives WHERE pack_type=1 AND " +
                    " AND (event_type=0 or event_type=1 or event_type=2) org_code='"+orgCode+"'";
        }else{
            sql1 = "SELECT COUNT(DISTINCT event_no) FROM json_archives WHERE event_type=1 AND pack_type=1";
            sql2 = "SELECT COUNT(DISTINCT event_no) FROM json_archives WHERE event_type=0 AND pack_type=1";
            sql3 = "SELECT COUNT(DISTINCT event_no) FROM json_archives WHERE pack_type=1 AND (event_type=0 or event_type=1 or event_type=2)";
        }
        ResultSet resultSet1 = elasticSearchUtil.findBySql(sql1);
        ResultSet resultSet2 = elasticSearchUtil.findBySql(sql2);
        ResultSet resultSet3 = elasticSearchUtil.findBySql(sql3);
        resultSet1.next();
        resultSet2.next();
        resultSet3.next();
        Map<String,Object> map = new HashMap<>();
        map.put("inpatient_total",new Double(resultSet1.getObject("COUNT(DISTINCT event_no)").toString()).intValue());
        map.put("oupatient_total",new Double(resultSet2.getObject("COUNT(DISTINCT event_no)").toString()).intValue());
        map.put("archive_total",new Double(resultSet3.getObject("COUNT(DISTINCT event_no)").toString()).intValue());
        return map;
    }
    /**
     * 获取采集数据
     * @param startDate
     * @param endDate
     * @return
     * @throws Exception
     */
    public Envelop getReceiveNum(String startDate, String endDate) throws Exception {
        Envelop envelop = new Envelop();
        List<Map<String, Object>> res = new ArrayList<>();
        String city = getCurrentCity();
        res = getAreaList(city);
        for(Map<String, Object> map : res){
            map.put("receiveNum",getReceive(startDate, endDate, map.get("ID")+""));
            map.put("analyzeNum",getAnalyzer(startDate, endDate, map.get("ID")+""));
            map.put("uploadNum",getUpload(startDate, endDate, map.get("ID")+""));
        }
        envelop.setObj(res);
        envelop.setSuccessFlg(true);
        return envelop;
    }
    private String getCurrentCity(){
        String sql = "SELECT entry.value FROM system_dict_entries entry " +
                "INNER JOIN system_dicts dict ON dict.id = entry.dict_id " +
                "WHERE entry.code = 'CITY'";
        List<Map<String, Object>> list = jdbcTemplate.queryForList(sql);
        if(list!=null&&list.size()>0){
            return list.get(0).get("VALUE")+"";
        }else{
            return "361100";
        }
    }
    private List<Map<String,Object>> getAreaList(String city){
        return jdbcTemplate.queryForList("select id,name from address_dict where pid = "+city);
    }
    private long getReceive(String startDate, String endDate ,String area){
        StringBuilder stringBuilder = new StringBuilder();
        stringBuilder.append("pack_type=1;");
        stringBuilder.append("receive_date>=" + startDate + " 00:00:00;");
        stringBuilder.append("receive_date<" + endDate + " 23:59:59;");
        stringBuilder.append("org_area="+area+";");
        return elasticSearchUtil.count("json_archives","info",stringBuilder.toString());
    }
    private long getAnalyzer(String startDate, String endDate ,String area){
        StringBuilder stringBuilder = new StringBuilder();
        stringBuilder.append("archive_status=3;pack_type=1;");
        stringBuilder.append("parse_date>=" + startDate + " 00:00:00;");
        stringBuilder.append("parse_date<" + endDate + " 23:59:59;");
        stringBuilder.append("org_area="+area+";");
        return elasticSearchUtil.count("json_archives","info",stringBuilder.toString());
    }
    private long getUpload(String startDate, String endDate ,String area){
        StringBuilder stringBuilder = new StringBuilder();
        stringBuilder.append("archive_status=3;pack_type=1;");
        stringBuilder.append("create_date>=" + startDate + " 00:00:00;");
        stringBuilder.append("create_date<" + endDate + " 23:59:59;");
        stringBuilder.append("org_area="+area+";");
        return elasticSearchUtil.count("upload","info",stringBuilder.toString());
    }
}

+ 182 - 0
src/main/java/com/yihu/ehr/analyze/service/pack/PackageAnalyzeService.java

@ -0,0 +1,182 @@
package com.yihu.ehr.analyze.service.pack;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.yihu.ehr.analyze.feign.PackageMgrClient;
import com.yihu.ehr.analyze.model.ZipPackage;
import com.yihu.ehr.analyze.service.qc.PackageQcService;
import com.yihu.ehr.analyze.service.qc.StatusReportService;
import com.yihu.ehr.elasticsearch.ElasticSearchUtil;
import com.yihu.ehr.model.packs.EsSimplePackage;
import com.yihu.ehr.profile.AnalyzeStatus;
import com.yihu.ehr.profile.ProfileType;
import com.yihu.ehr.profile.exception.*;
import com.yihu.ehr.profile.queue.RedisCollection;
import net.lingala.zip4j.exception.ZipException;
import org.apache.commons.lang3.StringUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.data.redis.core.RedisTemplate;
import org.springframework.stereotype.Service;
import java.io.Serializable;
import java.text.DateFormat;
import java.text.SimpleDateFormat;
import java.util.Date;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
/**
 * 档案质控引擎.
 *
 * @author Sand
 * @version 1.0
 * @created 2015.09.09 15:04
 */
@Service
public class PackageAnalyzeService {
    private final static Logger logger = LoggerFactory.getLogger(PackageAnalyzeService.class);
    private static final String INDEX = "json_archives_qc";
    private static final String QC_DATASET_INFO = "qc_dataset_info";
    private static final String QC_METADATA_INFO = "qc_metadata_info";
    @Autowired
    protected ObjectMapper objectMapper;
    @Autowired
    private PackageQcService packageQcService;
    @Autowired
    private ElasticSearchUtil elasticSearchUtil;
    @Autowired
    private StatusReportService statusReportService;
    @Autowired
    private RedisTemplate<String, Serializable> redisTemplate;
    /**
     * analyze 档案分析服务
     * 1.从队列中取出档案
     * 2.分析档案
     * 3.数据入库
     *
     * @author Airhead
     * @created 2018.01.15
     */
    public void analyze() {
        boolean main = true;
        Serializable serializable = redisTemplate.opsForList().rightPop(RedisCollection.AnalyzeQueue);
        if (null == serializable) {
            serializable = redisTemplate.opsForSet().pop(RedisCollection.AnalyzeQueueVice);
            main = false;
        }
        EsSimplePackage esSimplePackage = null;
        ZipPackage zipPackage = null;
        try {
            if (serializable != null) {
                String packStr = serializable.toString();
                esSimplePackage = objectMapper.readValue(packStr, EsSimplePackage.class);
            }
            if (esSimplePackage != null) {
                //判断是否已经 解析完成|| 正在解析 || 质控完成 || 正在质控  (由于部署多个服务,运行的时间差可能导致多次加入队列,造成多次质控,解析)
                Map<String, Object> map = statusReportService.getJsonArchiveById(esSimplePackage.get_id());
                if(map != null){
                    if("3".equals(map.get("analyze_status")+"") || "1".equals(map.get("analyze_status")+"")){//已经质控完成的,或者正在质控的  直接返回
                        return;
                    }
                    //如果已经到了解析流程,必定已经完成质控流程
                    if("3".equals(map.get("archive_status")+"") || "1".equals(map.get("archive_status")+"")){ //如果已经解析完成了,或者正在解析, 将其标记为 已完成质控
                        statusReportService.reportStatus(esSimplePackage.get_id(), AnalyzeStatus.Finished, 0, null);
                        return;
                    }
                }
                statusReportService.reportStatus(esSimplePackage.get_id(), AnalyzeStatus.Acquired, 0, "正在质控中");
                zipPackage = new ZipPackage(esSimplePackage);
                zipPackage.download();
                zipPackage.unZip();
                ProfileType profileType = zipPackage.resolve();
                if (ProfileType.Standard == profileType) {
                    packageQcService.qcHandle(zipPackage);
                    //保存数据集质控数据
                    elasticSearchUtil.index(INDEX, QC_DATASET_INFO, zipPackage.getQcDataSetRecord());
                    //保存数据元质控数据
                    elasticSearchUtil.bulkIndex(INDEX, QC_METADATA_INFO, zipPackage.getQcMetadataRecords());
                    //报告质控状态
                    statusReportService.reportStatus(esSimplePackage.get_id(), AnalyzeStatus.Finished, 0, "Qc success");
                } else {
                    //报告非结构化档案包质控状态
                    statusReportService.reportStatus(esSimplePackage.get_id(), AnalyzeStatus.Finished, 0, "Ignore non-standard package file or re-upload package file");
                }
                //发送解析消息
                if (main) {
                    redisTemplate.opsForList().leftPush(RedisCollection.ResolveQueue, objectMapper.writeValueAsString(esSimplePackage));
                } else {
                    redisTemplate.opsForSet().add(RedisCollection.ResolveQueueVice, objectMapper.writeValueAsString(esSimplePackage));
                }
            }
        } catch (Throwable e) {
            int errorType = -1;
            if (e instanceof ZipException) {
                errorType = 1;
            } else if (e instanceof IllegalJsonFileException) {
                errorType = 2;
            } else if (e instanceof IllegalJsonDataException) {
                errorType = 3;
            } else if (e instanceof IllegalEmptyCheckException) {//非空
                errorType = 4;
            } else if (e instanceof IllegalValueCheckException) {//值域超出
                errorType = 5;
            } else if (e instanceof IllegalTypeCheckException) {//类型
                errorType = 6;
            } else if (e instanceof IllegalFormatCheckException) {//格式
                errorType = 7;
            } else if (e instanceof AnalyzerException) {
                errorType = 21;
            }
            if (esSimplePackage != null) {
                if (StringUtils.isNotBlank(e.getMessage())) {
                    statusReportService.reportStatus(esSimplePackage.get_id(), AnalyzeStatus.Failed, errorType, e.getMessage());
                    logger.error(e.getMessage(), e);
                } else {
                    statusReportService.reportStatus(esSimplePackage.get_id(), AnalyzeStatus.Failed, errorType, "Internal server error, please see task log for detail message.");
                    logger.error("Empty exception message, please see the following detail info.", e);
                }
            } else {
                logger.error("Empty pack cause by:" + e.getMessage());
            }
        } finally {
            if (zipPackage != null) {
                zipPackage.houseKeep();
            }
        }
    }
    public ZipPackage analyze (EsSimplePackage esSimplePackage) throws Throwable {
        long starttime = System.currentTimeMillis();
        ZipPackage zipPackage = null;
        try {
            if (esSimplePackage != null) {
                zipPackage = new ZipPackage(esSimplePackage);
                zipPackage.download();
                zipPackage.unZip();
                ProfileType profileType = zipPackage.resolve();
                if (ProfileType.Standard != profileType) {
                    throw new ZipException("Ignore non-standard package file or re-upload package file");
                }
                packageQcService.qcHandle(zipPackage);
            }
        } finally {
            if (zipPackage != null) {
                zipPackage.houseKeep();
            }
        }
        long endtime = System.currentTimeMillis();
        System.out.println("耗时:" + (endtime - starttime) + "ms");
        return zipPackage;
    }
    public void esSaveData(String index, String type, String dataList) throws Exception {
        List<Map<String, Object>> list = objectMapper.readValue(dataList, List.class);
        elasticSearchUtil.bulkIndex(index, type, list);
    }
}

+ 25 - 0
src/main/java/com/yihu/ehr/analyze/service/pack/PackageAnalyzer.java

@ -0,0 +1,25 @@
package com.yihu.ehr.analyze.service.pack;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.yihu.ehr.analyze.model.ZipPackage;
import com.yihu.ehr.profile.extractor.ExtractorChain;
import com.yihu.ehr.profile.util.DataSetParserUtil;
import org.springframework.beans.factory.annotation.Autowired;
/**
 * 包解析器。
 *
 * @author Sand
 * @version 1.0
 * @created 2016.04.13 16:14
 */
public abstract class PackageAnalyzer {
    @Autowired
    protected ObjectMapper objectMapper;
    @Autowired
    protected ExtractorChain extractorChain;
    @Autowired
    protected DataSetParserUtil dataSetParser;
    public abstract void analyze(ZipPackage profile) throws Exception;
}

+ 149 - 0
src/main/java/com/yihu/ehr/analyze/service/pack/StdPackageAnalyzer.java

@ -0,0 +1,149 @@
package com.yihu.ehr.analyze.service.pack;
import com.fasterxml.jackson.databind.JsonNode;
import com.yihu.ehr.analyze.model.ZipPackage;
import com.yihu.ehr.analyze.service.RedisService;
import com.yihu.ehr.profile.EventType;
import com.yihu.ehr.profile.exception.AnalyzerException;
import com.yihu.ehr.profile.exception.IllegalJsonFileException;
import com.yihu.ehr.profile.extractor.KeyDataExtractor;
import com.yihu.ehr.profile.family.ResourceCells;
import com.yihu.ehr.profile.model.PackageDataSet;
import com.yihu.ehr.solr.SolrUtil;
import org.apache.commons.lang3.StringUtils;
import org.apache.solr.common.SolrDocument;
import org.apache.solr.common.SolrDocumentList;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Component;
import java.io.File;
import java.io.IOException;
import java.util.Map;
import java.util.Set;
/**
 * 标准档案分析
 *
 * @author Airhead
 * @created 2018.01.16
 */
@Component
public class StdPackageAnalyzer extends PackageAnalyzer {
    public final static String StandardFolder = "standard";
    public final static String OriginFolder = "origin";
    @Autowired
    private RedisService redisService;
    @Autowired
    private SolrUtil solrUtil;
    @Override
    public void analyze(ZipPackage zipPackage) throws Exception {
        File root = zipPackage.getPackFile();
        //解析标准数据
        String standardPath = root.getAbsolutePath() + File.separator + StandardFolder;
        File standardFolder = new File(standardPath);
        parseFiles(zipPackage, standardFolder.listFiles(), false);
    }
    /**
     * 将标准和原始文件夹中的JSON文件转换为数据集,
     * 放入标准档案包中
     *
     * @param zipPackage 标准档案包中
     * @param files      文件夹
     * @param origin     是否为标准文件夹
     * @throws Exception
     * @throws IOException
     */
    private void parseFiles(ZipPackage zipPackage, File[] files, boolean origin) throws Exception {
        for (File file : files) {
            PackageDataSet dataSet = analyzeDataSet(file, origin);
            //补传标识
            if (dataSet.isReUploadFlg()) {
                zipPackage.setReUploadFlg(dataSet.isReUploadFlg());
                if(zipPackage.getEventType()==null && StringUtils.isNotBlank(dataSet.getOrgCode()) && StringUtils.isNotBlank(dataSet.getEventNo()) && dataSet.getEventTime()!=null){
                    //拼接出rowkey
                    String rowkey = dataSet.getOrgCode()+"_"+dataSet.getEventNo()+"_"+dataSet.getEventTime().getTime();
                    String q= "rowkey:"+rowkey;
                    //目前只有结构化档案有做质控,后期如果对非结构化等做质控,查询表名需要做判断
                    SolrDocumentList healthProfile = solrUtil.query("HealthProfile", q, null, 0, 1);
                    if (healthProfile != null && healthProfile.getNumFound() > 0) {
                        for (SolrDocument doc : healthProfile) {
                            String eventType = (String) doc.getFieldValue("event_type");
                            zipPackage.setEventType(EventType.create(eventType));
                        }
                    }else{
                        throw new IllegalJsonFileException("Please upload the complete package(" + rowkey + ") first !");
                    }
                }
            }
            //就诊事件信息
            if (zipPackage.getEventDate() == null || zipPackage.getEventType() == null) {
                Map<String, Object> properties = extractorChain.doExtract(dataSet, KeyDataExtractor.Filter.EventInfo);
                EventType eventType = (EventType) properties.get(ResourceCells.EVENT_TYPE);
                if (eventType != null) {
                    zipPackage.setEventType(eventType);
                }
            }
            //门诊或住院诊断
            if (zipPackage.getDiagnosisCode().size() <= 0 || zipPackage.getDiagnosisName().size() <= 0) {
                Map<String, Object> properties = extractorChain.doExtract(dataSet, KeyDataExtractor.Filter.Diagnosis);
                Set<String> diagnosisCode = (Set<String>) properties.get(ResourceCells.DIAGNOSIS);
                Set<String> diagnosisName = (Set<String>) properties.get(ResourceCells.DIAGNOSIS_NAME);
                if (diagnosisCode.size() > 0) {
                    zipPackage.setDiagnosisCode(diagnosisCode);
                }
                if (diagnosisName.size() > 0) {
                    zipPackage.setDiagnosisName(diagnosisName);
                }
            }
            //科室信息
            if (zipPackage.getDeptCode() == null) {
                Map<String, Object> properties = extractorChain.doExtract(dataSet, KeyDataExtractor.Filter.Dept);
                String deptCode = (String) properties.get(ResourceCells.DEPT_CODE);
                if (StringUtils.isNotEmpty(deptCode)) {
                    zipPackage.setDeptCode(deptCode);
                }
            }
            zipPackage.insertDataSet(dataSet.getCode(), dataSet);
            zipPackage.setPatientId(dataSet.getPatientId());
            zipPackage.setEventNo(dataSet.getEventNo());
            zipPackage.setOrgCode(dataSet.getOrgCode());
            String orgName = redisService.getOrgName(dataSet.getOrgCode());
            if (StringUtils.isEmpty(orgName)) {
                throw new AnalyzerException("Can not get org name for " + dataSet.getOrgCode() + ", forget to cache?");
            }
            zipPackage.setOrgName(orgName);
            String orgArea = redisService.getOrgArea(dataSet.getOrgCode());
            if (StringUtils.isEmpty(orgArea)) {
                throw new AnalyzerException("Can not get org area for " + dataSet.getOrgCode() + ", forget to cache?");
            }
            zipPackage.setOrgArea(orgArea);
            zipPackage.setCdaVersion(dataSet.getCdaVersion());
            zipPackage.setEventDate(dataSet.getEventTime());
        }
    }
    /**
     * 根据JSON文件生产数据集
     * @param jsonFile
     * @param isOrigin
     * @return
     * @throws IOException
     */
    private PackageDataSet analyzeDataSet(File jsonFile, boolean isOrigin) throws IOException {
        JsonNode jsonNode = objectMapper.readTree(jsonFile);
        if (jsonNode.isNull()) {
            throw new IllegalJsonFileException("Invalid json file when generate data set");
        }
        PackageDataSet dataSet = dataSetParser.parseStructuredJsonDataSet(jsonNode, isOrigin);
        return dataSet;
    }
}

+ 107 - 0
src/main/java/com/yihu/ehr/analyze/service/qc/DataElementValue.java

@ -0,0 +1,107 @@
package com.yihu.ehr.analyze.service.qc;
/**
 * @author Airhead
 * @created 2018-01-21
 */
public class DataElementValue {
    private String rowKey;
    private String table;
    private String columnFamily;
    private String version;
    private String code;
    private String value;
    private String orgCode;
    private String patientId;
    private String eventNo;
    private String eventTime;
    private String receiveTime;
    public String getRowKey() {
        return rowKey;
    }
    public void setRowKey(String rowKey) {
        this.rowKey = rowKey;
    }
    public String getTable() {
        return table;
    }
    public void setTable(String table) {
        this.table = table;
    }
    public String getColumnFamily() {
        return columnFamily;
    }
    public void setColumnFamily(String columnFamily) {
        this.columnFamily = columnFamily;
    }
    public String getVersion() {
        return version;
    }
    public void setVersion(String version) {
        this.version = version;
    }
    public String getCode() {
        return code;
    }
    public void setCode(String code) {
        this.code = code;
    }
    public String getValue() {
        return value;
    }
    public void setValue(String value) {
        this.value = value;
    }
    public String getOrgCode() {
        return orgCode;
    }
    public void setOrgCode(String orgCode) {
        this.orgCode = orgCode;
    }
    public String getPatientId() {
        return patientId;
    }
    public void setPatientId(String patientId) {
        this.patientId = patientId;
    }
    public String getEventNo() {
        return eventNo;
    }
    public void setEventNo(String eventNo) {
        this.eventNo = eventNo;
    }
    public String getEventTime() {
        return eventTime;
    }
    public void setEventTime(String eventTime) {
        this.eventTime = eventTime;
    }
    public String getReceiveTime() {
        return receiveTime;
    }
    public void setReceiveTime(String receiveTime) {
        this.receiveTime = receiveTime;
    }
}

+ 239 - 0
src/main/java/com/yihu/ehr/analyze/service/qc/PackageQcService.java

@ -0,0 +1,239 @@
package com.yihu.ehr.analyze.service.qc;
import com.yihu.ehr.analyze.config.RequireDatasetsConfig;
import com.yihu.ehr.analyze.model.ZipPackage;
import com.yihu.ehr.model.packs.EsSimplePackage;
import com.yihu.ehr.profile.ErrorType;
import com.yihu.ehr.profile.exception.IllegalJsonDataException;
import com.yihu.ehr.profile.model.MetaDataRecord;
import com.yihu.ehr.profile.model.PackageDataSet;
import com.yihu.ehr.redis.client.RedisClient;
import com.yihu.ehr.util.datetime.DateUtil;
import com.yihu.ehr.util.string.StringBuilderEx;
import org.apache.commons.lang3.StringUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.config.ConfigurableBeanFactory;
import org.springframework.context.annotation.Scope;
import org.springframework.stereotype.Service;
import java.lang.reflect.Method;
import java.text.DateFormat;
import java.text.SimpleDateFormat;
import java.util.*;
import java.util.concurrent.ConcurrentHashMap;
/**
 * @author Airhead
 * @created 2018-01-19
 */
@Service
@Scope(ConfigurableBeanFactory.SCOPE_SINGLETON)
public class PackageQcService {
    private static final Logger logger = LoggerFactory.getLogger(PackageQcService.class);
    private static final DateFormat DATE_FORMAT = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");
    private static final Map<String, String> DATASET_RECORDS = new ConcurrentHashMap<>();
    @Autowired
    private QcRuleCheckService qcRuleCheckService;
    @Autowired
    private RedisClient redisClient;
    @Autowired
    private RequireDatasetsConfig requireDatasetsConfig;
    /**
     * 处理质控消息,统一入口,减少异步消息的数量
     * 1.通过Redis的消息订阅发布来处理质控规则
     * 2.通过ES统计接收包的情况
     *
     * @param zipPackage 档案包
     */
    public void qcHandle(ZipPackage zipPackage) throws Throwable {
        if (zipPackage.getEventType() == null){
            throw new IllegalJsonDataException("Cannot extract event type");
        }
        List<String> details = new ArrayList<>();
        Map<String, PackageDataSet> dataSets = zipPackage.getDataSets();
        dataSets.keySet().forEach(item -> details.add(item));
        //必传数据集判断
        List<String> required = requireDatasetsConfig.getRequireDataset(zipPackage.getEventType());
        List<String> missing = new ArrayList<>();
        required.forEach(item -> {
            if (!details.contains(item)) {
                missing.add(item);
            }
        });
        EsSimplePackage esSimplePackage = zipPackage.getEsSimplePackage();
        Map<String, Object> qcDataSetRecord = zipPackage.getQcDataSetRecord();
        qcDataSetRecord.put("details", details);
        qcDataSetRecord.put("missing", missing);
        qcDataSetRecord.put("is_defect", missing.isEmpty() ? 0 : 1);
        qcDataSetRecord.put("_id", esSimplePackage.get_id());
        qcDataSetRecord.put("patient_id", zipPackage.getPatientId());
        qcDataSetRecord.put("pack_id", esSimplePackage.get_id());
        qcDataSetRecord.put("org_code", zipPackage.getOrgCode());
        qcDataSetRecord.put("org_name", zipPackage.getOrgName());
        qcDataSetRecord.put("org_area", zipPackage.getOrgArea());
        qcDataSetRecord.put("dept", zipPackage.getDeptCode());
        qcDataSetRecord.put("diagnosis_name", StringUtils.join(zipPackage.getDiagnosisName().toArray(),";"));
        qcDataSetRecord.put("receive_date", DATE_FORMAT.format(esSimplePackage.getReceive_date()));
        qcDataSetRecord.put("event_date", DateUtil.toStringLong(zipPackage.getEventDate()));
        qcDataSetRecord.put("event_type", zipPackage.getEventType() == null ? -1 : zipPackage.getEventType().getType());
        qcDataSetRecord.put("event_no", zipPackage.getEventNo());
        qcDataSetRecord.put("version", zipPackage.getCdaVersion());
        qcDataSetRecord.put("count", zipPackage.getDataSets().size());
        qcDataSetRecord.put("qc_step", 1);
        qcDataSetRecord.put("create_date", DATE_FORMAT.format(new Date()));
        for (String dataSetCode : dataSets.keySet()) {
            Map<String, MetaDataRecord> records = dataSets.get(dataSetCode).getRecords();
            //存放已经生成了质控信息的数据元
            Set<String> existSet = new HashSet<>();
            List<String> listDataElement = getDataElementList(dataSets.get(dataSetCode).getCdaVersion(), dataSetCode);
            for (String recordKey : records.keySet()) {
                Map<String, String> dataGroup = records.get(recordKey).getDataGroup();
                for (String metadata : listDataElement) {
                    //如果该数据元已经有质控数据则跳过
                    if (existSet.contains(dataSetCode + "$" + metadata)) {
                        continue;
                    }
                    String method = redisClient.get("qc_" + zipPackage.getCdaVersion() + ":" + dataSetCode + ":" + metadata);
                    if (method != null) {
                        Class clazz = QcRuleCheckService.class;
                        Method _method = clazz.getMethod(method, new Class[]{String.class, String.class, String.class, String.class});
                        _method.setAccessible(true);
                        ErrorType errorType;
                        try {
                            errorType = (ErrorType) _method.invoke(qcRuleCheckService, zipPackage.getCdaVersion(), dataSetCode, metadata, dataGroup.get(metadata));
                        } catch (Exception e) {
                            throw e.getCause();
                        }
                        if (errorType != ErrorType.Normal) {
                            Map<String, Object> qcMetadataRecord = new HashMap<>();
                            StringBuilder _id = new StringBuilder();
                            _id.append(esSimplePackage.get_id())
                                    .append("$")
                                    .append(dataSetCode)
                                    .append("$")
                                    .append(metadata);
                            qcMetadataRecord.put("_id", _id.toString());
                            qcMetadataRecord.put("patient_id", zipPackage.getPatientId());
                            qcMetadataRecord.put("pack_id", esSimplePackage.get_id());
                            qcMetadataRecord.put("org_code", zipPackage.getOrgCode());
                            qcMetadataRecord.put("org_name", zipPackage.getOrgName());
                            qcMetadataRecord.put("org_area", zipPackage.getOrgArea());
                            qcMetadataRecord.put("dept", zipPackage.getDeptCode());
                            qcMetadataRecord.put("diagnosis_name", StringUtils.join(zipPackage.getDiagnosisName().toArray(), ";"));
                            qcMetadataRecord.put("receive_date", DATE_FORMAT.format(esSimplePackage.getReceive_date()));
                            qcMetadataRecord.put("event_date", DateUtil.toStringLong(zipPackage.getEventDate()));
                            qcMetadataRecord.put("event_type", zipPackage.getEventType() == null ? -1 : zipPackage.getEventType().getType());
                            qcMetadataRecord.put("event_no", zipPackage.getEventNo());
                            qcMetadataRecord.put("version", zipPackage.getCdaVersion());
                            qcMetadataRecord.put("dataset", dataSetCode);
                            qcMetadataRecord.put("metadata", metadata);
                            qcMetadataRecord.put("value", dataGroup.get(metadata));
                            qcMetadataRecord.put("qc_step", 1); //标准质控环节
                            qcMetadataRecord.put("qc_error_type", errorType.getType()); //标准质控错误类型
                            qcMetadataRecord.put("qc_error_name", errorType.getName()); //标准质控错误名称
                            qcMetadataRecord.put("qc_error_message", String.format("%s failure for meta data %s of %s in %s", method, metadata, dataSetCode, zipPackage.getCdaVersion()));
                            qcMetadataRecord.put("create_date", DATE_FORMAT.format(new Date()));
                            qcMetadataRecord.put("pack_pwd", esSimplePackage.getPwd());
                            zipPackage.getQcMetadataRecords().add(qcMetadataRecord);
                            existSet.add(dataSetCode + "$" + metadata);
                        }
                    }
                }
            }
        }
        details.forEach(item -> {
            this.updateDatasetDetails(zipPackage.getOrgCode(), DATE_FORMAT.format(esSimplePackage.getReceive_date()), zipPackage.getCdaVersion(),
                    item, zipPackage.getEventType().getType(), dataSets.get(item).getRecords().size());
        });
    }
    private List<String> getDataElementList(String version, String dataSetCode) {
        String metadataCodes = redisClient.get(makeKey("std_data_set_" + version, dataSetCode, "metada_code"));
        String[] metadataList = StringUtils.split(metadataCodes, ",");
        if (metadataList == null){
            logger.error("version:" + version + ",dataSetCode:" + dataSetCode);
            return new ArrayList<>();
        }
        return Arrays.asList(metadataList);
    }
    /**
     * 获取key
     */
    private String makeKey(String table, String key, String column) {
        return new StringBuilderEx("%1:%2:%3")
                .arg(table)
                .arg(key)
                .arg(column)
                .toString();
    }
   /* private void updateDatasetDetail(String orgCode, String receiveDate, String version,
                                     String dataset, int eventType, int row) throws Exception{
        String date = receiveDate.substring(0, 10);
        StringBuilder stringBuilder = new StringBuilder();
        stringBuilder.append("dataset=" + dataset+";");
        stringBuilder.append("receive_date>=" + date + " 00:00:00;");
        stringBuilder.append("receive_date<" + date + " 23:59:59;");
        stringBuilder.append("org_code=" + orgCode + ";");
        stringBuilder.append("event_type=" + eventType + ";");
        List<Map<String, Object>> list = elasticSearchUtil.list("json_archives_qc","qc_dataset_detail", stringBuilder.toString());
        if (list != null && list.size() > 0){
            Map<String, Object> map = list.get(0);
            map.put("row", Integer.parseInt(map.get("row").toString()) + row);
            map.put("count", Integer.parseInt(map.get("count").toString()) + 1);
            elasticSearchUtil.voidUpdate("json_archives_qc", "qc_dataset_detail", map.get("_id")+"", map);
        } else {
            Map<String, Object> map = new HashMap<>();
            map.put("org_code", orgCode);
            map.put("event_type", eventType);
            map.put("receive_date", date + " 00:00:00");
            map.put("dataset", dataset);
            map.put("dataset_name", redisClient.get("std_data_set_" + version + ":" + dataset + ":name"));
            map.put("row", row);
            map.put("count", 1);
            elasticSearchUtil.index("json_archives_qc", "qc_dataset_detail", map);
        }
    }*/
    private void updateDatasetDetails (String orgCode, String receiveDate, String version,
                                       String dataset, int eventType, int row) {
        String date = receiveDate.substring(0, 10);
        StringBuilder record = new StringBuilder();
        record.append(orgCode)
                .append(";")
                .append(date)
                .append(";")
                .append(version)
                .append(";")
                .append(eventType)
                .append(";")
                .append(dataset);
        String val = DATASET_RECORDS.get(record.toString());
        if (val != null) {
            String [] _val = val.split(";");
            int count = Integer.parseInt(_val[0]) + 1;
            int rows = Integer.parseInt(_val[1]) + row;
            //双重检查,防止处理数据的时候保存数据的任务线程已经刷新了【DATASET_RECORDS】数据集合,降低前后数据不一致的概率
            if (DATASET_RECORDS.get(record.toString()) != null) {
                DATASET_RECORDS.put(record.toString(), count + ";" + rows);
            } else {
                DATASET_RECORDS.put(record.toString(), 1 + ";" + row);
            }
        } else {
            DATASET_RECORDS.put(record.toString(), 1 + ";" + row);
        }
    }
    //获取数据集合的时候加锁
    public static synchronized Map<String, String> getDatasetDetails (){
        Map<String, String> temp = new HashMap<>();
        temp.putAll(DATASET_RECORDS);
        DATASET_RECORDS.clear();
        return temp;
    }
}

+ 255 - 0
src/main/java/com/yihu/ehr/analyze/service/qc/QcRuleCheckService.java

@ -0,0 +1,255 @@
package com.yihu.ehr.analyze.service.qc;
import com.yihu.ehr.analyze.service.RedisService;
import com.yihu.ehr.profile.ErrorType;
import com.yihu.ehr.profile.exception.*;
import com.yihu.ehr.util.datetime.DateUtil;
import org.apache.commons.lang3.StringUtils;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
/**
 * @author Airhead
 * @created 2018-01-21
 */
@Service
public class QcRuleCheckService {
    @Autowired
    private RedisService redisService;
    /**
     * 检查值是否为空
     *
     * @param version
     * @param dataSetCode
     * @param metadata
     * @param value
     * @throws Exception
     */
    public ErrorType emptyCheck (String version, String dataSetCode, String metadata, String value) {
        if (StringUtils.isBlank(value)) {
            return ErrorType.EmptyError;
        }
        return ErrorType.Normal;
    }
    /**
     * 检查值是否为空
     *
     * @param version
     * @param dataSetCode
     * @param metadata
     * @param value
     * @throws Exception
     */
    public ErrorType emptyCheckThrowable (String version, String dataSetCode, String metadata, String value) throws Exception {
        ErrorType type = this.emptyCheck(version, dataSetCode, metadata, value);
        if (ErrorType.Normal != type) {
            throw new IllegalEmptyCheckException(String.format("Meta data %s of %s in %s is empty", metadata, dataSetCode, version));
        }
        return type;
    }
    /**
     * 检查值类型是否正确
     *
     * @param version
     * @param dataSetCode
     * @param metadata
     * @param value
     */
    public ErrorType typeCheck (String version, String dataSetCode, String metadata, String value) throws Exception {
        if (StringUtils.isBlank(value)) {
            return ErrorType.TypeError;
        }
        String type = redisService.getMetaDataType(version, dataSetCode, metadata);
        if (StringUtils.isBlank(type)) {
            throw new AnalyzerException(String.format("Unable to get meta data type for %s of %s in %s", metadata, dataSetCode, version));
        }
        switch (type) {
            case "D":
                if (!value.contains("T") && !value.contains("Z")) {
                    StringBuilder error = new StringBuilder();
                    error.append("Invalid date time format ")
                            .append(dataSetCode)
                            .append(" ")
                            .append(metadata)
                            .append(" ")
                            .append(value)
                            .append(" for std version ")
                            .append(version)
                            .append(".");
                    throw new IllegalJsonDataException(error.toString());
                }
                return ErrorType.Normal;
            case "DT":
                if (!value.contains("T") && !value.contains("Z")) {
                    StringBuilder error = new StringBuilder();
                    error.append("Invalid date time format ")
                            .append(dataSetCode)
                            .append(" ")
                            .append(metadata)
                            .append(" ")
                            .append(value)
                            .append(" for std version ")
                            .append(version)
                            .append(".");
                    throw new IllegalJsonDataException(error.toString());
                }
                return ErrorType.Normal;
            case "L":
                if (!("F".equals(value) && "T".equals(value) && "0".equals(value) && "1".equals(value))) {
                    return ErrorType.TypeError;
                }
                return ErrorType.Normal;
            case "N":
                if (!StringUtils.isNumeric(value)) {
                    return ErrorType.TypeError;
                }
                return ErrorType.Normal;
            case "T":
                if (DateUtil.strToDate(value) == null) {
                    return ErrorType.TypeError;
                }
                return ErrorType.Normal;
            case "BY":
                return ErrorType.Normal;
            // S1,S2,S3,BY
            default:
                return ErrorType.Normal;
        }
    }
    /**
     * 检查值类型是否正确
     *
     * @param version
     * @param dataSetCode
     * @param metadata
     * @param value
     */
    public ErrorType typeCheckThrowable (String version, String dataSetCode, String metadata, String value) throws Exception {
        ErrorType type = typeCheck(version, dataSetCode, metadata, value);
        if (ErrorType.Normal != type) {
            throw new IllegalTypeCheckException(String.format("Data type for %s of %s in %s is error", metadata, dataSetCode, version));
        }
        return type;
    }
    /**
     * 检查值格式是否正确
     * 暂未实现
     *
     * @param version
     * @param dataSetCode
     * @param metadata
     * @param value
     * @return
     * @throws Exception
     */
    public ErrorType formatCheck (String version, String dataSetCode, String metadata, String value) {
        if (StringUtils.isBlank(value)) {
            return ErrorType.FormatError;
        }
        String format = redisService.getMetaDataFormat(version, dataSetCode, metadata);
        if (StringUtils.isBlank(format)) {
            throw new AnalyzerException(String.format("Unable to get meta data format for %s of %s in %s", metadata, dataSetCode, version));
        }
        switch (format) {
            case "DT15":
                if (!value.contains("T") && !value.contains("Z")) {
                    StringBuilder error = new StringBuilder();
                    error.append("Invalid date time format ")
                            .append(dataSetCode)
                            .append(" ")
                            .append(metadata)
                            .append(" ")
                            .append(value)
                            .append(" for std version ")
                            .append(version)
                            .append(".");
                    throw new IllegalJsonDataException(error.toString());
                }
                return ErrorType.Normal;
            case "D8":
                if (!value.contains("T") && !value.contains("Z")) {
                    StringBuilder error = new StringBuilder();
                    error.append("Invalid date time format ")
                            .append(dataSetCode)
                            .append(" ")
                            .append(metadata)
                            .append(" ")
                            .append(value)
                            .append(" for std version ")
                            .append(version)
                            .append(".");
                    throw new IllegalJsonDataException(error.toString());
                }
                return ErrorType.Normal;
            default:
                return ErrorType.Normal;
        }
    }
    /**
     * 检查值格式是否正确
     * 暂未实现
     *
     * @param version
     * @param dataSetCode
     * @param metadata
     * @param value
     * @return
     * @throws Exception
     */
    public ErrorType formatCheckThrowable (String version, String dataSetCode, String metadata, String value) throws Exception {
        ErrorType type = formatCheck(version, dataSetCode, metadata, value);
        if (ErrorType.Normal != type) {
            throw new IllegalTypeCheckException(String.format("Data format for %s of %s in %s is error", metadata, dataSetCode, version));
        }
        return type;
    }
    /**
     * 检查数据是否在值域范围
     *
     * @param version
     * @param dataSetCode
     * @param metadata
     * @param value
     * @throws Exception
     */
    public ErrorType valueCheck (String version, String dataSetCode, String metadata, String value) {
        if (StringUtils.isBlank(value)) {
            return ErrorType.ValueError;
        }
        String dictId = redisService.getMetaDataDict(version, dataSetCode, metadata);
        if (StringUtils.isBlank(dictId)) {
            throw new AnalyzerException(String.format("Unable to get dict id for %s of %s in %s", metadata, dataSetCode, version));
        }
        String _value = redisService.getDictEntryValue(version, dictId, value);
        if (StringUtils.isBlank(_value)) {
            return ErrorType.ValueError;
        }
        return ErrorType.Normal;
    }
    /**
     * 检查值是否为空
     *
     * @param version
     * @param dataSetCode
     * @param metadata
     * @param value
     * @throws Exception
     */
    public ErrorType valueCheckThrowable (String version, String dataSetCode, String metadata, String value) throws Exception {
        ErrorType type = this.valueCheck(version, dataSetCode, metadata, value);
        if (type != ErrorType.Normal) {
            throw new IllegalValueCheckException(String.format("Value %s for meta data %s of %s in %s out of range", value, metadata, dataSetCode, version));
        }
        return type;
    }
}

+ 57 - 0
src/main/java/com/yihu/ehr/analyze/service/qc/StatusReportService.java

@ -0,0 +1,57 @@
package com.yihu.ehr.analyze.service.qc;
import com.yihu.ehr.elasticsearch.ElasticSearchUtil;
import com.yihu.ehr.profile.AnalyzeStatus;
import org.apache.commons.lang3.StringUtils;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
import java.text.DateFormat;
import java.text.SimpleDateFormat;
import java.util.Date;
import java.util.HashMap;
import java.util.Map;
/**
 * Created by progr1mmer on 2018/6/19.
 */
@Service
public class StatusReportService {
    private static final DateFormat DATE_FORMAT = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");
    private static final String MAIN_INDEX = "json_archives";
    private static final String MAIN_INFO = "info";
    @Autowired
    private ElasticSearchUtil elasticSearchUtil;
    public void reportStatus(String _id, AnalyzeStatus analyzeStatus, int errorType, String message) {
        Map<String, Object> updateSource = new HashMap<>();
        if (analyzeStatus == AnalyzeStatus.Failed) {
            if (3 <= errorType && errorType <= 7) {
                updateSource.put("analyze_fail_count", 3);
            } else {
                Map<String, Object> sourceMap = elasticSearchUtil.findById(MAIN_INDEX, MAIN_INFO, _id);
                if (null == sourceMap) {
                    return;
                }
                if ((int)sourceMap.get("analyze_fail_count") < 3) {
                    int failCount = (int)sourceMap.get("analyze_fail_count");
                    updateSource.put("analyze_fail_count", failCount + 1);
                }
            }
        } else if (analyzeStatus == AnalyzeStatus.Acquired) {
            updateSource.put("analyze_date", DATE_FORMAT.format(new Date()));
        }
        if(StringUtils.isNoneBlank(message)){
            updateSource.put("message", message);
        }
        updateSource.put("error_type", errorType);
        updateSource.put("analyze_status", analyzeStatus.ordinal());
        elasticSearchUtil.voidUpdate(MAIN_INDEX, MAIN_INFO, _id, updateSource);
    }
    public Map<String, Object> getJsonArchiveById(String id){
        return  elasticSearchUtil.findById(MAIN_INDEX, MAIN_INFO, id);
    }
}

+ 126 - 0
src/main/java/com/yihu/ehr/analyze/service/scheduler/SchedulerService.java

@ -0,0 +1,126 @@
package com.yihu.ehr.analyze.service.scheduler;
import com.yihu.ehr.analyze.config.SchedulerConfig;
import com.yihu.ehr.analyze.job.PackageAnalyzeJob;
import io.swagger.annotations.ApiParam;
import org.quartz.*;
import org.quartz.impl.matchers.GroupMatcher;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.http.HttpStatus;
import org.springframework.http.ResponseEntity;
import org.springframework.stereotype.Service;
import org.springframework.web.bind.annotation.RequestParam;
import java.util.Set;
import java.util.UUID;
import static org.quartz.JobBuilder.newJob;
import static org.quartz.TriggerBuilder.newTrigger;
/**
 * @author Airhead
 * @created 2018-01-23
 */
@Service
public class SchedulerService {
    private static final String PACK_ANALYZER = "PackAnalyzer";
    private static final String PACK_ANALYZER_JOB = "PackAnalyzerJob-";
    private static final String PACK_ANALYZER_TRIGGER = "PackAnalyzerTrigger-";
    @Autowired
    private Scheduler scheduler;
    @Autowired
    private SchedulerConfig config;
    public ResponseEntity<String> updateScheduler(
            @ApiParam(name = "pause", value = "true:暂停 , false:执行", required = true, defaultValue = "true")
            @RequestParam(value = "pause") boolean pause) {
        try {
            if (pause) {
                scheduler.pauseAll();
            } else {
                scheduler.resumeAll();
            }
            return new ResponseEntity<>((String) null, HttpStatus.OK);
        } catch (SchedulerException e) {
            return new ResponseEntity<>(e.getMessage(), HttpStatus.INTERNAL_SERVER_ERROR);
        }
    }
    public ResponseEntity<Integer> addJob(
            @ApiParam(name = "count", value = "任务数量(不要超过系统设定值)", required = true, defaultValue = "4")
            @RequestParam(value = "count") int count,
            @ApiParam(name = "cronExp", value = "触发器CRON表达式", required = true, defaultValue = "0/4 * * * * ?")
            @RequestParam(value = "cronExp") String cronExp) {
        try {
            if (count > config.getJobMaxSize()) {
                count = config.getJobMaxSize();
            }
            GroupMatcher groupMatcher = GroupMatcher.groupEquals(PACK_ANALYZER);
            Set<JobKey> jobKeys = scheduler.getJobKeys(groupMatcher);
            if (null != jobKeys) {
                int activeJob = jobKeys.size();
                for (int i = 0; i < count - activeJob; i++) {
                    String suffix = UUID.randomUUID().toString().substring(0, 8);
                    JobDetail jobDetail = newJob(PackageAnalyzeJob.class)
                            .withIdentity(PACK_ANALYZER_JOB + suffix, PACK_ANALYZER)
                            .build();
                    CronTrigger trigger = newTrigger()
                            .withIdentity(PACK_ANALYZER_TRIGGER + suffix, PACK_ANALYZER)
                            .withSchedule(CronScheduleBuilder.cronSchedule(cronExp))
                            .startNow()
                            .build();
                    scheduler.scheduleJob(jobDetail, trigger);
                }
            } else {
                for (int i = 0; i < count; i++) {
                    String suffix = UUID.randomUUID().toString().substring(0, 8);
                    JobDetail jobDetail = newJob(PackageAnalyzeJob.class)
                            .withIdentity(PACK_ANALYZER_JOB + suffix, PACK_ANALYZER)
                            .build();
                    CronTrigger trigger = newTrigger()
                            .withIdentity(PACK_ANALYZER_TRIGGER + suffix, PACK_ANALYZER)
                            .withSchedule(CronScheduleBuilder.cronSchedule(cronExp))
                            .startNow()
                            .build();
                    scheduler.scheduleJob(jobDetail, trigger);
                }
            }
            return new ResponseEntity<>(config.getJobMaxSize(), HttpStatus.OK);
        } catch (Exception e) {
            return new ResponseEntity<>(-1, HttpStatus.INTERNAL_SERVER_ERROR);
        }
    }
    public ResponseEntity<String> removeJob(
            @ApiParam(name = "count", value = "任务数量", required = true, defaultValue = "4")
            @RequestParam(value = "count") int count) {
        try {
            GroupMatcher groupMatcher = GroupMatcher.groupEquals(PACK_ANALYZER);
            Set<JobKey> jobKeySet = scheduler.getJobKeys(groupMatcher);
            if (jobKeySet != null) {
                for (JobKey jobKey : jobKeySet) {
                    scheduler.deleteJob(jobKey);
                    if (--count == 0) break;
                }
            }
            return new ResponseEntity<>((String) null, HttpStatus.OK);
        } catch (SchedulerException e) {
            return new ResponseEntity<>(e.getMessage(), HttpStatus.INTERNAL_SERVER_ERROR);
        }
    }
    public ResponseEntity<Integer> count() {
        try {
            GroupMatcher groupMatcher = GroupMatcher.groupEquals(PACK_ANALYZER);
            Set<JobKey> jobKeySet = scheduler.getJobKeys(groupMatcher);
            int count = 0;
            if (jobKeySet != null) {
                count = jobKeySet.size();
            }
            return new ResponseEntity<>(count, HttpStatus.OK);
        } catch (SchedulerException e) {
            return new ResponseEntity<>(-1, HttpStatus.INTERNAL_SERVER_ERROR);
        }
    }
}

+ 100 - 0
src/main/java/com/yihu/ehr/analyze/service/scheduler/WarningSchedulerService.java

@ -0,0 +1,100 @@
package com.yihu.ehr.analyze.service.scheduler;
import com.yihu.ehr.analyze.job.WarningQuestionJob;
import com.yihu.ehr.analyze.service.dataQuality.DqPaltformResourceWarningService;
import org.quartz.*;
import org.quartz.impl.matchers.GroupMatcher;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
import java.util.Set;
import static org.quartz.JobBuilder.newJob;
import static org.quartz.TriggerBuilder.newTrigger;
/**
 * 预警问题生成
 * @author yeshijie on 2018/6/13.
 */
@Service
public class WarningSchedulerService {
    private static final String WARNING_QUESTION = "WarningQuestion";
    private static final String WARNING_QUESTION_JOB = "WarningQuestionJob";
    private static final String WARNING_QUESTION_TRIGGER = "WarningQuestionTrigger";
    @Autowired
    private Scheduler scheduler;
    @Autowired
    private DqPaltformResourceWarningService dqPaltformResourceWarningService;
    /**
     * 初始化job
     */
    public void init(){
        try {
            String cronExp = dqPaltformResourceWarningService.getCronExp(null);
            GroupMatcher groupMatcher = GroupMatcher.groupEquals(WARNING_QUESTION);
            Set<JobKey> jobKeys = scheduler.getJobKeys(groupMatcher);
            //如果已经在执行了 就忽略
            if (null == jobKeys||jobKeys.size()==0) {
                JobDetail jobDetail = newJob(WarningQuestionJob.class)
                        .withIdentity(WARNING_QUESTION_JOB, WARNING_QUESTION)
                        .build();
                CronTrigger trigger = newTrigger()
                        .withIdentity(WARNING_QUESTION_TRIGGER, WARNING_QUESTION)
                        .withSchedule(CronScheduleBuilder.cronSchedule(cronExp))
                        .startNow()
                        .build();
                scheduler.scheduleJob(jobDetail, trigger);
            }
        }catch (Exception e){
            e.printStackTrace();
        }
    }
    /**
     * 新增job
     * @param cronExp (0 0 9 * * ?) 每天9:00点执行
     */
    public void addJob(String cronExp) {
        try {
            GroupMatcher groupMatcher = GroupMatcher.groupEquals(WARNING_QUESTION);
            Set<JobKey> jobKeys = scheduler.getJobKeys(groupMatcher);
            if (null != jobKeys&&jobKeys.size()>0) {
                //已经有定时任务在执行了 先停止旧任务
                for (JobKey jobKey : jobKeys) {
                    scheduler.deleteJob(jobKey);
                }
            }
            JobDetail jobDetail = newJob(WarningQuestionJob.class)
                    .withIdentity(WARNING_QUESTION_JOB, WARNING_QUESTION)
                    .build();
            CronTrigger trigger = newTrigger()
                    .withIdentity(WARNING_QUESTION_TRIGGER, WARNING_QUESTION)
                    .withSchedule(CronScheduleBuilder.cronSchedule(cronExp))
                    .startNow()
                    .build();
            scheduler.scheduleJob(jobDetail, trigger);
        } catch (Exception e) {
            e.printStackTrace();
        }
    }
    /**
     * 删除job
     */
    public void removeJob() {
        try {
            GroupMatcher groupMatcher = GroupMatcher.groupEquals(WARNING_QUESTION);
            Set<JobKey> jobKeys = scheduler.getJobKeys(groupMatcher);
            if (jobKeys != null&&jobKeys.size()>0) {
                for (JobKey jobKey : jobKeys) {
                    scheduler.deleteJob(jobKey);
                }
            }
        } catch (SchedulerException e) {
            e.printStackTrace();
        }
    }
}

+ 159 - 0
src/main/resources/application.yml

@ -0,0 +1,159 @@
server:
  port: ${svr-pack-analyzer.server.port}
info:
  app:
    name: SVR-PACK-ANALYZER
    description: EHR Platform Microservice.
    version: 1.0.0
spring:
  datasource:
    driver-class-name: com.mysql.jdbc.Driver
    max-active: 20
    max-idle: 8
    min-idle: 8
    validation-query: SELECT 1
    test-on-borrow: true
  # REDIS
  redis:
    database: 0 # Database index used by the connection factory.
    timeout: 0 # Connection timeout in milliseconds.
    #sentinel:
    #  master: # Name of Redis server.
    #  nodes: # Comma-separated list of host:port pairs.
    pool:
      max-active: 8 # Max number of connections that can be allocated by the pool at a given time. Use a negative value for no limit.
      max-idle: 8 # Max number of "idle" connections in the pool. Use a negative value to indicate an unlimited number of idle connections.
      max-wait: -1 # Maximum amount of time (in milliseconds) a connection allocation should block before throwing an exception when the pool is exhausted. Use a negative value to block indefinitely.
      min-idle: 1 # Target for the minimum number of idle connections to maintain in the pool. This setting only has an effect if it is positive.
fast-dfs:
  connect-timeout: 10
  network-timeout: 60
  charset: ISO8859-1
  pool:
    init-size: 5
    max-size: 20
    wait-time: 500
  http:
    tracker-http-port: 80
    anti-steal-token: no
    secret-key: FastDFS1234567890
eip:
  tenant: jkzl
analyze:
  job:
    minSize: 10 #质控初始任务数
    maxSize: 10 #质控最大任务数
    cronExp: 0/1 * * * * ? #质控任务触发间隔表达式
quality:
  orgCode : 1 #默认机构
  version: 59083976eebd #默认版本号
  cloud: medicalCloud #默认云平台code
  cloudName: 上饶医疗云 #默认云平台name
ehr:
  # 档案包数据提取器参数,从数据集中提取摘要、事件时间与身份标识
  require-data-sets:
    clinic:
      - HDSA00_01 #人口学信息
      - HDSD00_85 #门诊-挂号
      - HDSD00_73 #门诊-诊断记录
      - HDSD00_71 #门诊-费用汇总
      - HDSD00_70 #门诊-费用清单
    resident:
      - HDSA00_01 #人口学信息
      - HDSD00_13 #住院-入院记录
      - HDSD00_69 #住院-诊断记录
      - HDSD00_15 #住院-医嘱信息
      - HDSD00_68 #住院-费用汇总
      - HDSD00_67 #住院-费用清单
      - HDSD00_16 #住院-出院小结
    medicalExam:
      - HDSA00_01 #人口学信息
      - HDSB05_03 #体检-登记信息
      - HDSB05_81 #健康体检-项目子表
  pack-extractor:
    # 事件提取参数,用于生成事件摘要
    # 门诊从“门诊摘要”与“挂号”数据集提取事件
    # 住院从“病人摘要”,“入院记录”与“病案首页”提取事件 Clinic 0门诊 Resident 1住院  MedicalExam 2体检
    event:
      data-sets:
        - HDSC01_02: Clinic #门诊-挂号 v1.0
        - HDSD00_85: Clinic #门诊-挂号 v1.3
        - HDSC02_09: Resident #住院-入院记录 v1.0
        - HDSD00_13: Resident #住院-入院记录 v1.3
        - HDSD00_16: Resident #住院-出院小结 v1.3
        - HDSD00_11: Resident #住院-病案首页 v1.5
        - HDSB05_03: MedicalExam #体检-登记信息 v1.3
    #诊断信息
    diagnosis:
      data-sets:
        - HDSC01_03 #门诊-诊断记录 v1.0
        - HDSD00_73 #门诊-诊断记录 v1.3
        - HDSC02_17 #住院-诊断记录 v1.0
        - HDSD00_69 #住院-诊断记录 v1.3
        - HDSB05_84 #体检-诊断记录 v1.3
      code-meta-data:
        - HDSD00_01_550 #门诊-疾病临床诊断在特定分类体系中的代码 v1.0 & v1.3
        - HDSD00_69_002 #住院-疾病临床诊断在特定分类体系中的代码 v1.3
        - JDSB05_84_003 #体检-疾病临床诊断在特定分类体系中的代码 v1.3
      name-meta-data:
        - HDSD00_01_549 #门诊-疾病临床诊断在特定分类体系中的名称 v1.0 & v1.3
        - HDSD00_69_001 #住院-疾病临床诊断在特定分类体系中的名称 v1.3
        - JDSB05_84_004 #体检-疾病临床诊断在特定分类体系中的名称 v1.3
    #身份提取参数,从人口学提取
    identity:
      data-sets:
        - HDSA00_01 #人口学信息 v1.0 & v1.3
      meta-data:
        id-card-no: HDSA00_01_017 #身份证号码 v1.0 & v1.3
        id-card-type: HDSA00_01_016 #身份证类型 v1.0 & v1.3
        patient-name: HDSA00_01_009 #本人姓名 v1.3 (v1.0:HDSD00_01_002)
    #卡提取参数,从就诊摘要提取
    card:
      data-sets:
        - HDSC01_02 #门诊-挂号 v1.0
        - HDSD00_85 #门诊-挂号 v1.3
        - HDSC02_09 #住院-入院记录 v1.0
        - HDSD00_13 #住院-入院记录 v1.3
      card-num:
        - JDSD00_85_005 #门诊-就诊卡号 CARD_NUM
        - JDSD00_13_006 #住院-就诊卡号 CARD_NUM
      card-type:
        - JDSD00_85_006 #门诊-就诊卡类型 CARD_TYPE
        - JDSD00_13_007 #住院-就诊卡类型 CARD_TYPE
    #科室信息
    dept:
      data-sets:
        - HDSD00_85 #门诊-挂号 v1.3
        - HDSD00_13 #住院-入院记录 v1.3
      meta-data:
        - JDSD00_85_001 #门诊-就诊科室代码
        - JDSD00_13_004 #住院-入院科室编码
---
spring:
  profiles: dev
  datasource:
    url: jdbc:mysql://172.19.103.50:3306/healtharchive?useUnicode=true&characterEncoding=UTF-8&useSSL=false
    username: chenweishan
    password: chenweishan
  redis:
    host: 172.19.103.47 # Redis server host.
    port: 6379
    password: redis!@456
  data:
    solr:
      zk-host: node1.hde.h3c.com,node2.hde.h3c.com,node3.hde.h3c.com:2181/solr
fast-dfs:
  tracker-server: 172.19.103.13:22122
  public-server: http://172.19.103.52
elasticsearch:
  cluster-name: elasticsearch
  cluster-nodes: 172.19.103.9:9300

+ 29 - 0
src/main/resources/banner.txt

@ -0,0 +1,29 @@
                                   _oo8oo_
                                  o8888888o
                                  88" . "88
                                  (| -_- |)
                                  0\  =  /0
                                ___/'==='\___
                              .' \\|     |// '.
                             / \\|||  :  |||// \
                            / _||||| -:- |||||_ \
                           |   | \\\  -  /// |   |
                           | \_|  ''\---/''  |_/ |
                           \  .-\__  '-'  __/-.  /
                         ___'. .'  /--.--\  '. .'___
                      ."" '<  '.___\_<|>_/___.'  >' "".
                     | | :  `- \`.:`\ _ /`:.`/ -`  : | |
                     \  \ `-.   \_ __\ /__ _/   .-` /  /
                 =====`-.____`.___ \_____/ ___.`____.-`=====
                                   `=---=`
                ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
                      佛祖保佑     永不宕机     永无Bug
 __   _      ___       ___    __    __    _          __    _       __    _     _    ____  ____  ___
( (` \ \  / | |_)     | |_)  / /\  / /`  | |_/      / /\  | |\ |  / /\  | |   \ \_/  / / | |_  | |_)
_)_)  \_\/  |_| \     |_|   /_/--\ \_\_, |_| \     /_/--\ |_| \| /_/--\ |_|__  |_|  /_/_ |_|__ |_| \

+ 23 - 0
src/main/resources/bootstrap.yml

@ -0,0 +1,23 @@
spring:
  application:
    name: svr-pack-analyzer
  cloud:
    config:
      username: user
      password: configuration
---
spring:
  profiles: dev
  cloud:
    config:
      uri: ${spring.config.uri:http://172.19.103.73:1221}
      label: ${spring.config.label:dev}
---
spring:
  profiles: prod
  cloud:
    config:
      uri: ${spring.config.uri}
      label: ${spring.config.label}