Browse Source

first commit

wangweiqun 6 years ago
commit
a735bd464a
63 changed files with 7495 additions and 0 deletions
  1. 1159 0
      ehr-cloud/pom.xml
  2. 606 0
      ehr-ms-parent-pom/pom.xml
  3. 129 0
      pom.xml
  4. 10 0
      readme.md
  5. 33 0
      src/main/java/com/yihu/ehr/SvrPackResolve.java
  6. 222 0
      src/main/java/com/yihu/ehr/resolve/FilePackageResolver.java
  7. 224 0
      src/main/java/com/yihu/ehr/resolve/ImmediateDataResolver.java
  8. 262 0
      src/main/java/com/yihu/ehr/resolve/LinkPackageResolver.java
  9. 24 0
      src/main/java/com/yihu/ehr/resolve/PackageResolver.java
  10. 266 0
      src/main/java/com/yihu/ehr/resolve/SimplePackageResolver.java
  11. 177 0
      src/main/java/com/yihu/ehr/resolve/StdPackageResolver.java
  12. 35 0
      src/main/java/com/yihu/ehr/resolve/config/EventIndexConfig.java
  13. 55 0
      src/main/java/com/yihu/ehr/resolve/config/SchedulerConfig.java
  14. 77 0
      src/main/java/com/yihu/ehr/resolve/controller/ArchiveRelationEndPoint.java
  15. 241 0
      src/main/java/com/yihu/ehr/resolve/controller/ResolveEndPoint.java
  16. 84 0
      src/main/java/com/yihu/ehr/resolve/controller/SchedulerEndPoint.java
  17. 34 0
      src/main/java/com/yihu/ehr/resolve/dao/DataSetPackageDao.java
  18. 87 0
      src/main/java/com/yihu/ehr/resolve/dao/FileResourceDao.java
  19. 72 0
      src/main/java/com/yihu/ehr/resolve/dao/MasterResourceDao.java
  20. 16 0
      src/main/java/com/yihu/ehr/resolve/dao/PatientDao.java
  21. 17 0
      src/main/java/com/yihu/ehr/resolve/dao/RsDictionaryEntryDao.java
  22. 78 0
      src/main/java/com/yihu/ehr/resolve/dao/SubResourceDao.java
  23. 40 0
      src/main/java/com/yihu/ehr/resolve/feign/DataSetPackageMgrClient.java
  24. 36 0
      src/main/java/com/yihu/ehr/resolve/feign/PackageMgrClient.java
  25. 117 0
      src/main/java/com/yihu/ehr/resolve/job/HealthCheckTask.java
  26. 160 0
      src/main/java/com/yihu/ehr/resolve/job/PackageResolveJob.java
  27. 102 0
      src/main/java/com/yihu/ehr/resolve/job/SchedulerManager.java
  28. 63 0
      src/main/java/com/yihu/ehr/resolve/log/PackResolveLogger.java
  29. 120 0
      src/main/java/com/yihu/ehr/resolve/model/stage1/FilePackage.java
  30. 138 0
      src/main/java/com/yihu/ehr/resolve/model/stage1/LinkPackage.java
  31. 155 0
      src/main/java/com/yihu/ehr/resolve/model/stage1/OriginalPackage.java
  32. 81 0
      src/main/java/com/yihu/ehr/resolve/model/stage1/RsDictionaryEntry.java
  33. 82 0
      src/main/java/com/yihu/ehr/resolve/model/stage1/SimplePackage.java
  34. 161 0
      src/main/java/com/yihu/ehr/resolve/model/stage1/StandardPackage.java
  35. 68 0
      src/main/java/com/yihu/ehr/resolve/model/stage1/details/CdaDocument.java
  36. 88 0
      src/main/java/com/yihu/ehr/resolve/model/stage1/details/LinkFile.java
  37. 96 0
      src/main/java/com/yihu/ehr/resolve/model/stage1/details/OriginFile.java
  38. 11 0
      src/main/java/com/yihu/ehr/resolve/model/stage2/MasterRecord.java
  39. 21 0
      src/main/java/com/yihu/ehr/resolve/model/stage2/QcMetadataRecords.java
  40. 139 0
      src/main/java/com/yihu/ehr/resolve/model/stage2/ResourceBucket.java
  41. 31 0
      src/main/java/com/yihu/ehr/resolve/model/stage2/ResourceRecord.java
  42. 56 0
      src/main/java/com/yihu/ehr/resolve/model/stage2/SubRecord.java
  43. 104 0
      src/main/java/com/yihu/ehr/resolve/service/profile/ArchiveRelationService.java
  44. 81 0
      src/main/java/com/yihu/ehr/resolve/service/profile/PrescriptionService.java
  45. 57 0
      src/main/java/com/yihu/ehr/resolve/service/resource/stage1/PackModelFactory.java
  46. 122 0
      src/main/java/com/yihu/ehr/resolve/service/resource/stage1/ResolveService.java
  47. 42 0
      src/main/java/com/yihu/ehr/resolve/service/resource/stage2/FtpFileService.java
  48. 103 0
      src/main/java/com/yihu/ehr/resolve/service/resource/stage2/IdentifyService.java
  49. 420 0
      src/main/java/com/yihu/ehr/resolve/service/resource/stage2/PackMillService.java
  50. 92 0
      src/main/java/com/yihu/ehr/resolve/service/resource/stage2/PatientService.java
  51. 22 0
      src/main/java/com/yihu/ehr/resolve/service/resource/stage2/QcRecordService.java
  52. 100 0
      src/main/java/com/yihu/ehr/resolve/service/resource/stage2/RedisService.java
  53. 66 0
      src/main/java/com/yihu/ehr/resolve/service/resource/stage2/ResourceService.java
  54. 34 0
      src/main/java/com/yihu/ehr/resolve/service/resource/stage2/RsDictionaryEntryService.java
  55. 74 0
      src/main/java/com/yihu/ehr/resolve/service/resource/stage2/StatusReportService.java
  56. 77 0
      src/main/java/com/yihu/ehr/resolve/util/FileTableUtil.java
  57. 22 0
      src/main/java/com/yihu/ehr/resolve/util/LocalTempPathUtil.java
  58. 168 0
      src/main/resources/application.yml
  59. 29 0
      src/main/resources/banner.txt
  60. 23 0
      src/main/resources/bootstrap.yml
  61. 138 0
      src/main/resources/logback-spring.xml
  62. 15 0
      src/test/java/com/yihu/ehr/SvrPackResolveApplicationTests.java
  63. 33 0
      src/test/java/com/yihu/ehr/Test.java

+ 1159 - 0
ehr-cloud/pom.xml

@ -0,0 +1,1159 @@
<?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.0.0"
         xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
         xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
    <modelVersion>4.0.0</modelVersion>
    <groupId>com.yihu.ehr</groupId>
    <artifactId>ehr-cloud</artifactId>
    <version>1.2.0</version>
    <packaging>pom</packaging>
    <name>ehr-cloud</name>
    <description>EHR parent pom for all</description>
    <url>http://ehr.yihu.com</url>
    <organization>
        <name>JKZL Software, Inc.</name>
        <url>http://www.yihu.com</url>
    </organization>
    <licenses>
        <license>
            <name>Apache License, Version 2.0</name>
            <url>http://www.apache.org/licenses/LICENSE-2.0</url>
        </license>
    </licenses>
    <developers>
        <developer>
            <id>sand</id>
            <name>Sand Wen</name>
            <email>sand.fj.wen@gmail.com</email>
            <organization>JKZL Software, Inc.</organization>
            <organizationUrl>http://www.yihu.com</organizationUrl>
            <roles>
                <role>Project lead</role>
                <role>Project designer</role>
                <role>Project programmer</role>
            </roles>
        </developer>
        <developer>
            <id>yzh</id>
            <name>叶泽华</name>
            <email>yzh@qq.com</email>
            <organization>JKZL Software, Inc.</organization>
            <organizationUrl>http://www.yihu.com</organizationUrl>
            <roles>
                <role>Project programmer</role>
            </roles>
        </developer>
        <developer>
            <id>cws</id>
            <name>陈维山</name>
            <email>hill9868@qq.com</email>
            <organization>JKZL Software, Inc.</organization>
            <organizationUrl>http://www.yihu.com</organizationUrl>
            <roles>
                <role>Project programmer</role>
            </roles>
        </developer>
        <developer>
            <id>hzy</id>
            <name>黄志勇</name>
            <email>hzy@qq.com</email>
            <organization>JKZL Software, Inc.</organization>
            <organizationUrl>http://www.yihu.com</organizationUrl>
            <roles>
                <role>Project programmer</role>
            </roles>
        </developer>
    </developers>
    <properties>
        <project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
        <version.spring-framework>4.3.13.RELEASE</version.spring-framework>
        <version.spring-boot>1.5.9.RELEASE</version.spring-boot>
        <version.spring-cloud-starter>1.3.2.RELEASE</version.spring-cloud-starter>
        <version.spring-cloud>1.4.2.RELEASE</version.spring-cloud>
        <version.spring-security>4.2.3.RELEASE</version.spring-security>
        <version.spring-oauth2>2.0.14.RELEASE</version.spring-oauth2>
        <version.spring-session>1.3.1.RELEASE</version.spring-session>
        <version.spring-session-hazelcast>2.0.1.RELEASE</version.spring-session-hazelcast>
        <version.spring-data-commons>1.13.9.RELEASE</version.spring-data-commons>
        <version.spring-data-hadoop>2.2.0.RELEASE</version.spring-data-hadoop>
        <version.spring-data-solr>2.1.3.RELEASE</version.spring-data-solr>
        <version.spring-data-redis>1.7.1.RELEASE</version.spring-data-redis>
        <version.spring-data-jpa>1.11.10.RELEASE</version.spring-data-jpa>
        <version.spring-kafka>1.0.5.RELEASE</version.spring-kafka>
        <version.commons-bean-utils>1.9.2</version.commons-bean-utils>
        <version.commons-codec>1.9</version.commons-codec>
        <version.commons-collections>3.2.1</version.commons-collections>
        <version.commons-compress>1.9</version.commons-compress>
        <version.commons-dbcp2>2.1.1</version.commons-dbcp2>
        <version.commons-dbutils>1.6</version.commons-dbutils>
        <version.commons-io>2.4</version.commons-io>
        <version.commons-lang3>3.2.1</version.commons-lang3>
        <version.commons-pool2>2.4.2</version.commons-pool2>
        <version.zookeeper>3.4.6</version.zookeeper>
        <version.hadoop-client>2.6.5</version.hadoop-client>
        <version.hbase-client>1.1.1</version.hbase-client>
        <version.solr>5.5.4</version.solr>
        <version.hibernate>4.3.11.Final</version.hibernate>
        <version.hibernate-validator>6.0.10.Final</version.hibernate-validator>
        <version.hibernate-jpa-api>1.0.0.Final</version.hibernate-jpa-api>
        <version.http-core>4.4.3</version.http-core>
        <version.http-client>4.5.1</version.http-client>
        <version.http-mime>4.5.1</version.http-mime>
        <version.io-dropwizard-metrics>3.1.2</version.io-dropwizard-metrics>
        <version.java>1.8</version.java>
        <version.jackson>2.6.6</version.jackson>
        <version.jedis>2.9.0</version.jedis>
        <version.jcl-over-slf4j>1.7.19</version.jcl-over-slf4j>
        <version.jul-over-slf4j>1.7.21</version.jul-over-slf4j>
        <version.joda-time>2.8.2</version.joda-time>
        <version.junit>4.12</version.junit>
        <version.logging>1.2</version.logging>
        <version.log4j>1.2.17</version.log4j>
        <version.log4j2>2.4.1</version.log4j2>
        <version.logback>1.1.7</version.logback>
        <version.mysql>5.1.45</version.mysql>
        <version.pinyin4j>2.5.0</version.pinyin4j>
        <version.quartz>2.2.3</version.quartz>
        <version.servlet-api>3.1.0</version.servlet-api>
        <version.slf4j>1.7.21</version.slf4j>
        <version.statsd-client>3.1.0</version.statsd-client>
        <version.swagger>2.7.0</version.swagger>
        <version.swagger-ui>2.7.0</version.swagger-ui>
        <version.thrift>0.9.1</version.thrift>
        <version.tomcat-embed>8.5.27</version.tomcat-embed>
        <version.websocket-api>1.1</version.websocket-api>
        <version.zip4j>1.3.2</version.zip4j>
        <version.poi>3.12</version.poi>
        <version.scala>2.10.6</version.scala>
        <version.elasticsearch>2.1.0</version.elasticsearch>
        <version.elasticsearch-sql>2.4.1.0</version.elasticsearch-sql>
        <version.jest>2.4.0</version.jest>
        <version.alibaba-druid>1.0.15</version.alibaba-druid>
        <version.feign>9.5.0</version.feign>
        <version.hystrix>1.5.10</version.hystrix>
        <version.archaius>0.7.5</version.archaius>
        <version.ehr>1.2.0</version.ehr>
        <version.eip>1.3.1</version.eip>
        <version.json>20160212</version.json>
        <version.json-lib>2.4</version.json-lib>
        <version.fastjson>1.2.17</version.fastjson>
        <version.commons-net>3.3</version.commons-net>
        <version.jxl>2.6</version.jxl>
        <version.fastdfs>1.27</version.fastdfs>
        <version.spring.boot.admin>1.5.7</version.spring.boot.admin>
        <version.jettison>1.3.7</version.jettison>
    </properties>
    <dependencyManagement>
        <dependencies>
            <!--<dependency>-->
                <!--<groupId>org.springframework.boot</groupId>-->
                <!--<artifactId>spring-boot-dependencies</artifactId>-->
                <!--<version>1.5.9.RELEASE</version>-->
                <!--<type>pom</type>-->
                <!--<scope>import</scope>-->
            <!--</dependency>-->
            <!--<dependency>-->
                <!--<groupId>org.springframework.cloud</groupId>-->
                <!--<artifactId>spring-cloud-dependencies</artifactId>-->
                <!--<version>Finchley.M5</version>-->
                <!--<type>pom</type>-->
                <!--<scope>import</scope>-->
            <!--</dependency>-->
            <!-- Base library-->
            <dependency>
                <groupId>javax.servlet</groupId>
                <artifactId>javax.servlet-api</artifactId>
                <version>${version.servlet-api}</version>
                <scope>provided</scope>
            </dependency>
            <dependency>
                <groupId>junit</groupId>
                <artifactId>junit</artifactId>
                <version>${version.junit}</version>
                <scope>test</scope>
            </dependency>
            <!-- Spring framework family -->
            <dependency>
                <groupId>org.springframework</groupId>
                <artifactId>spring-aop</artifactId>
                <version>${version.spring-framework}</version>
                <scope>${dependency.scope}</scope>
            </dependency>
            <dependency>
                <groupId>org.springframework</groupId>
                <artifactId>spring-aspects</artifactId>
                <version>${version.spring-framework}</version>
                <scope>${dependency.scope}</scope>
            </dependency>
            <dependency>
                <groupId>org.springframework</groupId>
                <artifactId>spring-beans</artifactId>
                <version>${version.spring-framework}</version>
                <scope>${dependency.scope}</scope>
            </dependency>
            <dependency>
                <groupId>org.springframework</groupId>
                <artifactId>spring-context</artifactId>
                <version>${version.spring-framework}</version>
                <scope>${dependency.scope}</scope>
            </dependency>
            <dependency>
                <groupId>org.springframework</groupId>
                <artifactId>spring-context-support</artifactId>
                <version>${version.spring-framework}</version>
                <scope>${dependency.scope}</scope>
            </dependency>
            <dependency>
                <groupId>org.springframework</groupId>
                <artifactId>spring-core</artifactId>
                <version>${version.spring-framework}</version>
                <scope>${dependency.scope}</scope>
            </dependency>
            <dependency>
                <groupId>org.springframework</groupId>
                <artifactId>spring-expression</artifactId>
                <version>${version.spring-framework}</version>
                <scope>${dependency.scope}</scope>
            </dependency>
            <dependency>
                <groupId>org.springframework</groupId>
                <artifactId>spring-jdbc</artifactId>
                <version>${version.spring-framework}</version>
                <scope>${dependency.scope}</scope>
            </dependency>
            <dependency>
                <groupId>org.springframework</groupId>
                <artifactId>spring-messaging</artifactId>
                <version>${version.spring-framework}</version>
                <scope>${dependency.scope}</scope>
            </dependency>
            <dependency>
                <groupId>org.springframework</groupId>
                <artifactId>spring-orm</artifactId>
                <version>${version.spring-framework}</version>
                <scope>${dependency.scope}</scope>
            </dependency>
            <dependency>
                <groupId>org.springframework</groupId>
                <artifactId>spring-oxm</artifactId>
                <version>${version.spring-framework}</version>
                <scope>${dependency.scope}</scope>
            </dependency>
            <dependency>
                <groupId>org.springframework</groupId>
                <artifactId>spring-test</artifactId>
                <version>${version.spring-framework}</version>
                <scope>${dependency.scope}</scope>
            </dependency>
            <dependency>
                <groupId>org.springframework</groupId>
                <artifactId>spring-tx</artifactId>
                <version>${version.spring-framework}</version>
                <scope>${dependency.scope}</scope>
            </dependency>
            <dependency>
                <groupId>org.springframework</groupId>
                <artifactId>spring-web</artifactId>
                <version>${version.spring-framework}</version>
                <scope>${dependency.scope}</scope>
            </dependency>
            <dependency>
                <groupId>org.springframework</groupId>
                <artifactId>spring-webmvc</artifactId>
                <version>${version.spring-framework}</version>
                <scope>${dependency.scope}</scope>
            </dependency>
            <!-- https://mvnrepository.com/artifact/org.springframework.kafka/spring-kafka -->
            <dependency>
                <groupId>org.springframework.kafka</groupId>
                <artifactId>spring-kafka</artifactId>
                <version>${version.spring-kafka}</version>
            </dependency>
            <!-- Spring boot family -->
            <dependency>
                <groupId>org.springframework.boot</groupId>
                <artifactId>spring-boot</artifactId>
                <version>${version.spring-boot}</version>
                <scope>${dependency.scope}</scope>
            </dependency>
            <dependency>
                <groupId>org.springframework.boot</groupId>
                <artifactId>spring-boot-actuator</artifactId>
                <version>${version.spring-boot}</version>
                <scope>${dependency.scope}</scope>
            </dependency>
            <dependency>
                <groupId>org.springframework.boot</groupId>
                <artifactId>spring-boot-autoconfigure</artifactId>
                <version>${version.spring-boot}</version>
                <scope>${dependency.scope}</scope>
            </dependency>
            <dependency>
                <groupId>org.springframework.boot</groupId>
                <artifactId>spring-boot-devtools</artifactId>
                <version>${version.spring-boot}</version>
                <scope>${dependency.scope}</scope>
            </dependency>
            <dependency>
                <groupId>org.springframework.boot</groupId>
                <artifactId>spring-boot-starter</artifactId>
                <version>${version.spring-boot}</version>
                <scope>${dependency.scope}</scope>
            </dependency>
            <dependency>
                <groupId>org.springframework.boot</groupId>
                <artifactId>spring-boot-starter-actuator</artifactId>
                <version>${version.spring-boot}</version>
                <scope>${dependency.scope}</scope>
            </dependency>
            <dependency>
                <groupId>org.springframework.boot</groupId>
                <artifactId>spring-boot-starter-aop</artifactId>
                <version>${version.spring-boot}</version>
                <scope>${dependency.scope}</scope>
            </dependency>
            <dependency>
                <groupId>org.springframework.boot</groupId>
                <artifactId>spring-boot-starter-data-jpa</artifactId>
                <version>${version.spring-boot}</version>
                <scope>${dependency.scope}</scope>
            </dependency>
            <dependency>
                <groupId>org.springframework.boot</groupId>
                <artifactId>spring-boot-starter-data-redis</artifactId>
                <version>${version.spring-boot}</version>
                <scope>${dependency.scope}</scope>
            </dependency>
            <dependency>
                <groupId>org.springframework.boot</groupId>
                <artifactId>spring-boot-starter-batch</artifactId>
                <version>${version.spring-boot}</version>
                <scope>${dependency.scope}</scope>
            </dependency>
            <dependency>
                <groupId>org.springframework.boot</groupId>
                <artifactId>spring-boot-starter-jdbc</artifactId>
                <version>${version.spring-boot}</version>
                <scope>${dependency.scope}</scope>
            </dependency>
            <dependency>
                <groupId>org.springframework.boot</groupId>
                <artifactId>spring-boot-starter-security</artifactId>
                <version>${version.spring-boot}</version>
                <scope>${dependency.scope}</scope>
                <optional>true</optional>
            </dependency>
            <dependency>
                <groupId>org.springframework.boot</groupId>
                <artifactId>spring-boot-starter-thymeleaf</artifactId>
                <version>${version.spring-boot}</version>
                <scope>${dependency.scope}</scope>
            </dependency>
            <dependency>
                <groupId>org.springframework.boot</groupId>
                <artifactId>spring-boot-starter-test</artifactId>
                <version>${version.spring-boot}</version>
                <scope>test</scope>
            </dependency>
            <dependency>
                <groupId>org.springframework.boot</groupId>
                <artifactId>spring-boot-starter-web</artifactId>
                <version>${version.spring-boot}</version>
                <scope>${dependency.scope}</scope>
            </dependency>
            <dependency>
                <groupId>org.springframework.boot</groupId>
                <artifactId>spring-boot-starter-tomcat</artifactId>
                <version>${version.spring-boot}</version>
                <scope>${dependency.scope}</scope>
            </dependency>
            <dependency>
                <groupId>org.springframework.boot</groupId>
                <artifactId>spring-boot-starter-data-mongodb</artifactId>
                <version>${version.spring-boot}</version>
                <scope>${dependency.scope}</scope>
            </dependency>
            <!-- Spring cloud family -->
            <dependency>
                <groupId>org.springframework.cloud</groupId>
                <artifactId>spring-cloud-starter</artifactId>
                <version>${version.spring-cloud-starter}</version>
            </dependency>
            <dependency>
                <groupId>org.springframework.cloud</groupId>
                <artifactId>spring-cloud-config-server</artifactId>
                <version>${version.spring-cloud}</version>
            </dependency>
            <dependency>
                <groupId>org.springframework.cloud</groupId>
                <artifactId>spring-cloud-starter-archaius</artifactId>
                <version>${version.spring-cloud}</version>
            </dependency>
            <dependency>
                <groupId>org.springframework.cloud</groupId>
                <artifactId>spring-cloud-starter-config</artifactId>
                <version>${version.spring-cloud}</version>
            </dependency>
            <dependency>
                <groupId>org.springframework.cloud</groupId>
                <artifactId>spring-cloud-starter-eureka</artifactId>
                <version>${version.spring-cloud}</version>
            </dependency>
            <dependency>
                <groupId>org.springframework.cloud</groupId>
                <artifactId>spring-cloud-starter-eureka-server</artifactId>
                <version>${version.spring-cloud}</version>
            </dependency>
            <dependency>
                <groupId>org.springframework.cloud</groupId>
                <artifactId>spring-cloud-netflix-eureka</artifactId>
                <version>${version.spring-cloud}</version>
            </dependency>
            <dependency>
                <groupId>org.springframework.cloud</groupId>
                <artifactId>spring-cloud-starter-feign</artifactId>
                <version>${version.spring-cloud}</version>
            </dependency>
            <dependency>
                <groupId>org.springframework.cloud</groupId>
                <artifactId>spring-cloud-starter-ribbon</artifactId>
                <version>${version.spring-cloud}</version>
            </dependency>
            <dependency>
                <groupId>org.springframework.cloud</groupId>
                <artifactId>spring-cloud-starter-zuul</artifactId>
                <version>${version.spring-cloud}</version>
            </dependency>
            <dependency>
                <groupId>org.springframework.cloud</groupId>
                <artifactId>spring-cloud-starter-hystrix</artifactId>
                <version>${version.spring-cloud}</version>
            </dependency>
            <dependency>
                <groupId>org.springframework.cloud</groupId>
                <artifactId>spring-cloud-starter-hystrix-dashboard</artifactId>
                <version>${version.spring-cloud}</version>
            </dependency>
            <!-- Feign -->
            <dependency>
                <groupId>io.github.openfeign</groupId>
                <artifactId>feign-core</artifactId>
                <version>${version.feign}</version>
            </dependency>
            <!-- Hystrix -->
            <dependency>
                <groupId>com.netflix.hystrix</groupId>
                <artifactId>hystrix-core</artifactId>
                <version>${version.hystrix}</version>
            </dependency>
            <!-- Archaius -->
            <dependency>
                <groupId>com.netflix.archaius</groupId>
                <artifactId>archaius-core</artifactId>
                <version>${version.archaius}</version>
            </dependency>
            <!-- Spring data family -->
            <dependency>
                <groupId>org.springframework.data</groupId>
                <artifactId>spring-data-commons</artifactId>
                <version>${version.spring-data-commons}</version>
                <scope>${dependency.scope}</scope>
            </dependency>
            <dependency>
                <groupId>org.springframework.data</groupId>
                <artifactId>spring-data-hadoop-hbase</artifactId>
                <version>${version.spring-data-hadoop}</version>
                <scope>${dependency.scope}</scope>
            </dependency>
            <dependency>
                <groupId>org.springframework.data</groupId>
                <artifactId>spring-data-solr</artifactId>
                <version>${version.spring-data-solr}</version>
                <scope>${dependency.scope}</scope>
            </dependency>
            <!-- Spring session family-->
            <dependency>
                <groupId>org.springframework.session</groupId>
                <artifactId>spring-session-hazelcast</artifactId>
                <version>${version.spring-session-hazelcast}</version>
                <scope>${dependency.scope}</scope>
            </dependency>
            <dependency>
                <groupId>org.springframework.session</groupId>
                <artifactId>spring-session</artifactId>
                <version>${version.spring-session}</version>
                <scope>${dependency.scope}</scope>
            </dependency>
            <dependency>
                <groupId>org.springframework.session</groupId>
                <artifactId>spring-session-data-redis</artifactId>
                <version>${version.spring-session}</version>
                <scope>${dependency.scope}</scope>
            </dependency>
            <!-- Spring Security -->
            <!--<dependency>
                <groupId>org.springframework.security</groupId>
                <artifactId>spring-security-config</artifactId>
                <version>${version.spring-security}</version>
                <scope>${dependency.scope}</scope>
            </dependency>
            <dependency>
                <groupId>org.springframework.security</groupId>
                <artifactId>spring-security-core</artifactId>
                <version>${version.spring-security}</version>
                <scope>${dependency.scope}</scope>
            </dependency>
            <dependency>
                <groupId>org.springframework.security</groupId>
                <artifactId>spring-security-crypto</artifactId>
                <version>${version.spring-security}</version>
                <scope>${dependency.scope}</scope>
            </dependency>
            <dependency>
                <groupId>org.springframework.security</groupId>
                <artifactId>spring-security-web</artifactId>
                <version>${version.spring-security}</version>
                <scope>${dependency.scope}</scope>
            </dependency>-->
            <!-- Oauth2 -->
            <dependency>
                <groupId>org.springframework.security.oauth</groupId>
                <artifactId>spring-security-oauth2</artifactId>
                <version>${version.spring-oauth2}</version>
                <scope>${dependency.scope}</scope>
                <exclusions>
                    <exclusion>
                        <groupId>org.springframework.security</groupId>
                        <artifactId>*</artifactId>
                    </exclusion>
                </exclusions>
            </dependency>
            <!--Jackson library -->
            <dependency>
                <groupId>com.fasterxml.jackson.core</groupId>
                <artifactId>jackson-annotations</artifactId>
                <version>${version.jackson}</version>
                <scope>${dependency.scope}</scope>
            </dependency>
            <dependency>
                <groupId>com.fasterxml.jackson.core</groupId>
                <artifactId>jackson-core</artifactId>
                <version>${version.jackson}</version>
                <scope>${dependency.scope}</scope>
            </dependency>
            <dependency>
                <groupId>com.fasterxml.jackson.core</groupId>
                <artifactId>jackson-databind</artifactId>
                <version>${version.jackson}</version>
                <scope>${dependency.scope}</scope>
            </dependency>
            <!-- Http library -->
            <dependency>
                <groupId>org.apache.httpcomponents</groupId>
                <artifactId>httpcore</artifactId>
                <version>${version.http-core}</version>
                <scope>${dependency.scope}</scope>
            </dependency>
            <dependency>
                <groupId>org.apache.httpcomponents</groupId>
                <artifactId>httpclient</artifactId>
                <version>${version.http-client}</version>
                <scope>${dependency.scope}</scope>
            </dependency>
            <dependency>
                <groupId>org.apache.httpcomponents</groupId>
                <artifactId>httpmime</artifactId>
                <version>${version.http-mime}</version>
                <scope>${dependency.scope}</scope>
            </dependency>
            <!--Apache commons library -->
            <dependency>
                <groupId>org.codehaus.woodstox</groupId>
                <artifactId>stax2-api</artifactId>
                <version>3.1.4</version>
                <scope>${dependency.scope}</scope>
            </dependency>
            <dependency>
                <groupId>org.codehaus.woodstox</groupId>
                <artifactId>woodstox-core-asl</artifactId>
                <version>4.4.1</version>
                <scope>${dependency.scope}</scope>
            </dependency>
            <dependency>
                <groupId>org.noggit</groupId>
                <artifactId>noggit</artifactId>
                <version>0.6</version>
                <scope>${dependency.scope}</scope>
            </dependency>
            <dependency>
                <groupId>org.apache.commons</groupId>
                <artifactId>commons-lang3</artifactId>
                <version>${version.commons-lang3}</version>
                <scope>${dependency.scope}</scope>
            </dependency>
            <dependency>
                <groupId>org.apache.poi</groupId>
                <artifactId>poi</artifactId>
                <version>${version.poi}</version>
                <scope>${dependency.scope}</scope>
            </dependency>
            <dependency>
                <groupId>commons-beanutils</groupId>
                <artifactId>commons-beanutils</artifactId>
                <version>${version.commons-bean-utils}</version>
                <scope>${dependency.scope}</scope>
            </dependency>
            <dependency>
                <groupId>org.apache.commons</groupId>
                <artifactId>commons-pool2</artifactId>
                <version>${version.commons-pool2}</version>
                <scope>${dependency.scope}</scope>
            </dependency>
            <dependency>
                <groupId>commons-dbutils</groupId>
                <artifactId>commons-dbutils</artifactId>
                <version>${version.commons-dbutils}</version>
                <scope>${dependency.scope}</scope>
            </dependency>
            <dependency>
                <groupId>org.apache.commons</groupId>
                <artifactId>commons-dbcp2</artifactId>
                <version>${version.commons-dbcp2}</version>
                <scope>${dependency.scope}</scope>
            </dependency>
            <dependency>
                <groupId>commons-io</groupId>
                <artifactId>commons-io</artifactId>
                <version>${version.commons-io}</version>
                <scope>${dependency.scope}</scope>
            </dependency>
            <dependency>
                <groupId>commons-collections</groupId>
                <artifactId>commons-collections</artifactId>
                <version>${version.commons-collections}</version>
                <scope>${dependency.scope}</scope>
            </dependency>
            <dependency>
                <groupId>commons-codec</groupId>
                <artifactId>commons-codec</artifactId>
                <version>${version.commons-codec}</version>
                <scope>${dependency.scope}</scope>
            </dependency>
            <dependency>
                <groupId>org.apache.commons</groupId>
                <artifactId>commons-compress</artifactId>
                <version>${version.commons-compress}</version>
                <scope>${dependency.scope}</scope>
            </dependency>
            <!-- Mysql library -->
            <dependency>
                <groupId>mysql</groupId>
                <artifactId>mysql-connector-java</artifactId>
                <version>${version.mysql}</version>
                <scope>${dependency.scope}</scope>
            </dependency>
            <!-- Hibernate framework library -->
            <dependency>
                <groupId>org.hibernate</groupId>
                <artifactId>hibernate-core</artifactId>
                <version>${version.hibernate}</version>
                <scope>${dependency.scope}</scope>
            </dependency>
            <dependency>
                <groupId>org.hibernate</groupId>
                <artifactId>hibernate-validator</artifactId>
                <version>${version.hibernate-validator}</version>
            </dependency>
            <dependency>
                <groupId>org.hibernate</groupId>
                <artifactId>hibernate-entitymanager</artifactId>
                <version>${version.hibernate}</version>
                <scope>${dependency.scope}</scope>
            </dependency>
            <dependency>
                <groupId>org.hibernate.javax.persistence</groupId>
                <artifactId>hibernate-jpa-2.1-api</artifactId>
                <version>${version.hibernate-jpa-api}</version>
                <scope>${dependency.scope}</scope>
            </dependency>
            <!-- Zookeeper library -->
            <dependency>
                <groupId>org.apache.zookeeper</groupId>
                <artifactId>zookeeper</artifactId>
                <version>${version.zookeeper}</version>
                <scope>${dependency.scope}</scope>
            </dependency>
            <!-- Hadoop library -->
            <dependency>
                <groupId>org.apache.hadoop</groupId>
                <artifactId>hadoop-annotations</artifactId>
                <version>${version.hadoop-client}</version>
                <scope>${dependency.scope}</scope>
            </dependency>
            <dependency>
                <groupId>org.apache.hadoop</groupId>
                <artifactId>hadoop-auth</artifactId>
                <version>${version.hadoop-client}</version>
                <scope>${dependency.scope}</scope>
            </dependency>
            <dependency>
                <groupId>org.apache.hadoop</groupId>
                <artifactId>hadoop-common</artifactId>
                <version>${version.hadoop-client}</version>
                <scope>${dependency.scope}</scope>
            </dependency>
            <dependency>
                <groupId>org.apache.hadoop</groupId>
                <artifactId>hadoop-distcp</artifactId>
                <version>${version.hadoop-client}</version>
                <scope>${dependency.scope}</scope>
            </dependency>
            <dependency>
                <groupId>org.apache.hadoop</groupId>
                <artifactId>hadoop-hdfs</artifactId>
                <version>${version.hadoop-client}</version>
                <scope>${dependency.scope}</scope>
            </dependency>
            <dependency>
                <groupId>org.apache.hadoop</groupId>
                <artifactId>hadoop-mapreduce-client-common</artifactId>
                <version>${version.hadoop-client}</version>
                <scope>${dependency.scope}</scope>
            </dependency>
            <dependency>
                <groupId>org.apache.hadoop</groupId>
                <artifactId>hadoop-mapreduce-client-core</artifactId>
                <version>${version.hadoop-client}</version>
                <scope>${dependency.scope}</scope>
            </dependency>
            <dependency>
                <groupId>org.apache.hadoop</groupId>
                <artifactId>hadoop-mapreduce-client-jobclient</artifactId>
                <version>${version.hadoop-client}</version>
                <scope>${dependency.scope}</scope>
            </dependency>
            <dependency>
                <groupId>org.apache.hadoop</groupId>
                <artifactId>hadoop-mapreduce-client-shuffle</artifactId>
                <version>${version.hadoop-client}</version>
                <scope>${dependency.scope}</scope>
            </dependency>
            <dependency>
                <groupId>org.apache.hadoop</groupId>
                <artifactId>hadoop-streaming</artifactId>
                <version>${version.hadoop-client}</version>
                <scope>${dependency.scope}</scope>
            </dependency>
            <dependency>
                <groupId>org.apache.hadoop</groupId>
                <artifactId>hadoop-yarn-api</artifactId>
                <version>${version.hadoop-client}</version>
                <scope>${dependency.scope}</scope>
            </dependency>
            <dependency>
                <groupId>org.apache.hadoop</groupId>
                <artifactId>hadoop-yarn-client</artifactId>
                <version>${version.hadoop-client}</version>
                <scope>${dependency.scope}</scope>
            </dependency>
            <dependency>
                <groupId>org.apache.hadoop</groupId>
                <artifactId>hadoop-yarn-common</artifactId>
                <version>${version.hadoop-client}</version>
                <scope>${dependency.scope}</scope>
            </dependency>
            <dependency>
                <groupId>org.apache.hadoop</groupId>
                <artifactId>hadoop-yarn-server-common</artifactId>
                <version>${version.hadoop-client}</version>
                <scope>${dependency.scope}</scope>
            </dependency>
            <dependency>
                <groupId>org.apache.hadoop</groupId>
                <artifactId>hadoop-yarn-server-nodemanager</artifactId>
                <version>${version.hadoop-client}</version>
                <scope>${dependency.scope}</scope>
            </dependency>
            <dependency>
                <groupId>org.apache.hbase</groupId>
                <artifactId>hbase-client</artifactId>
                <version>${version.hbase-client}</version>
                <scope>${dependency.scope}</scope>
                <exclusions>
                    <exclusion>
                        <groupId>org.apache.hadoop</groupId>
                        <artifactId>*</artifactId>
                    </exclusion>
                    <exclusion>
                        <groupId>org.slf4j</groupId>
                        <artifactId>slf4j-log4j12</artifactId>
                    </exclusion>
                </exclusions>
            </dependency>
            <dependency>
                <groupId>org.apache.hbase</groupId>
                <artifactId>hbase-common</artifactId>
                <version>${version.hbase-client}</version>
                <exclusions>
                    <exclusion>
                        <groupId>org.slf4j</groupId>
                        <artifactId>slf4j-log4j12</artifactId>
                    </exclusion>
                </exclusions>
            </dependency>
            <dependency>
                <groupId>org.apache.hbase</groupId>
                <artifactId>hbase-protocol</artifactId>
                <version>${version.hbase-client}</version>
                <exclusions>
                    <exclusion>
                        <groupId>org.slf4j</groupId>
                        <artifactId>slf4j-log4j12</artifactId>
                    </exclusion>
                </exclusions>
            </dependency>
            <!-- export library -->
            <dependency>
                <groupId>net.sourceforge.jexcelapi</groupId>
                <artifactId>jxl</artifactId>
                <version>${version.jxl}</version>
            </dependency>
            <!-- Google library -->
            <dependency>
                <groupId>com.google.guava</groupId>
                <artifactId>guava</artifactId>
                <version>18.0</version>
            </dependency>
            <dependency>
                <groupId>com.google.code.findbugs</groupId>
                <artifactId>jsr305</artifactId>
                <version>3.0.1</version>
            </dependency>
            <dependency>
                <groupId>com.google.code.gson</groupId>
                <artifactId>gson</artifactId>
                <version>2.6.2</version>
            </dependency>
            <dependency>
                <groupId>com.google.protobuf</groupId>
                <artifactId>protobuf-java</artifactId>
                <version>2.5.0</version>
            </dependency>
            <dependency>
                <groupId>com.google.inject</groupId>
                <artifactId>guice</artifactId>
                <version>4.1.0</version>
            </dependency>
            <!-- Solr library -->
            <dependency>
                <groupId>org.apache.solr</groupId>
                <artifactId>solr-core</artifactId>
                <version>${version.solr}</version>
                <scope>${dependency.scope}</scope>
                <exclusions>
                    <exclusion>
                        <groupId>commons-lang</groupId>
                        <artifactId>commons-lang</artifactId>
                    </exclusion>
                </exclusions>
            </dependency>
            <dependency>
                <groupId>org.apache.solr</groupId>
                <artifactId>solr-solrj</artifactId>
                <version>${version.solr}</version>
                <exclusions>
                    <exclusion>
                        <groupId>commons-lang</groupId>
                        <artifactId>commons-lang</artifactId>
                    </exclusion>
                </exclusions>
            </dependency>
            <!-- Elasticsearch -->
            <dependency>
                <groupId>org.elasticsearch</groupId>
                <artifactId>elasticsearch</artifactId>
                <version>${version.elasticsearch}</version>
            </dependency>
            <dependency>
                <groupId>org.nlpcn</groupId>
                <artifactId>elasticsearch-sql</artifactId>
                <version>${version.elasticsearch-sql}</version>
            </dependency>
            <!-- Jest -->
            <dependency>
                <groupId>io.searchbox</groupId>
                <artifactId>jest</artifactId>
                <version>${version.jest}</version>
            </dependency>
            <!-- Redis library -->
            <dependency>
                <groupId>redis.clients</groupId>
                <artifactId>jedis</artifactId>
                <version>${version.jedis}</version>
                <scope>${dependency.scope}</scope>
            </dependency>
            <!-- FastDFS library -->
            <dependency>
                <groupId>org.csource</groupId>
                <artifactId>fastdfs-client-java</artifactId>
                <version>${version.fastdfs}</version>
            </dependency>
            <!-- Quartz library -->
            <dependency>
                <groupId>org.quartz-scheduler</groupId>
                <artifactId>quartz</artifactId>
                <version>${version.quartz}</version>
                <scope>${dependency.scope}</scope>
            </dependency>
            <dependency>
                <groupId>org.quartz-scheduler</groupId>
                <artifactId>quartz-jobs</artifactId>
                <version>${version.quartz}</version>
            </dependency>
            <!-- Zip library -->
            <dependency>
                <groupId>net.lingala.zip4j</groupId>
                <artifactId>zip4j</artifactId>
                <version>${version.zip4j}</version>
                <scope>${dependency.scope}</scope>
            </dependency>
            <!-- Pinyin library -->
            <dependency>
                <groupId>com.belerweb</groupId>
                <artifactId>pinyin4j</artifactId>
                <version>${version.pinyin4j}</version>
                <scope>${dependency.scope}</scope>
            </dependency>
            <!-- Swagger-ui library -->
            <dependency>
                <groupId>io.springfox</groupId>
                <artifactId>springfox-swagger2</artifactId>
                <version>${version.swagger}</version>
                <scope>${dependency.scope}</scope>
            </dependency>
            <dependency>
                <groupId>io.springfox</groupId>
                <artifactId>springfox-swagger-ui</artifactId>
                <version>${version.swagger-ui}</version>
                <scope>${dependency.scope}</scope>
            </dependency>
            <!-- date Time util library -->
            <dependency>
                <groupId>joda-time</groupId>
                <artifactId>joda-time</artifactId>
                <version>${version.joda-time}</version>
                <scope>${dependency.scope}</scope>
            </dependency>
            <dependency>
                <groupId>org.apache.thrift</groupId>
                <artifactId>libthrift</artifactId>
                <version>${version.thrift}</version>
                <scope>${dependency.scope}</scope>
            </dependency>
            <dependency>
                <groupId>com.timgroup</groupId>
                <artifactId>java-statsd-client</artifactId>
                <version>${version.statsd-client}</version>
                <scope>${dependency.scope}</scope>
            </dependency>
            <dependency>
                <groupId>io.dropwizard.metrics</groupId>
                <artifactId>metrics-core</artifactId>
                <version>${version.io-dropwizard-metrics}</version>
                <scope>${dependency.scope}</scope>
            </dependency>
            <!-- Log framework library -->
            <dependency>
                <groupId>commons-logging</groupId>
                <artifactId>commons-logging</artifactId>
                <version>${version.logging}</version>
                <scope>${dependency.scope}</scope>
            </dependency>
            <dependency>
                <groupId>log4j</groupId>
                <artifactId>log4j</artifactId>
                <version>${version.log4j}</version>
                <scope>${dependency.scope}</scope>
            </dependency>
            <dependency>
                <groupId>org.slf4j</groupId>
                <artifactId>slf4j-api</artifactId>
                <version>${version.slf4j}</version>
                <scope>${dependency.scope}</scope>
            </dependency>
            <dependency>
                <groupId>ch.qos.logback</groupId>
                <artifactId>logback-classic</artifactId>
                <version>${version.logback}</version>
                <scope>${dependency.scope}</scope>
            </dependency>
            <!-- Alibaba -->
            <dependency>
                <groupId>com.alibaba</groupId>
                <artifactId>druid</artifactId>
                <version>${version.alibaba-druid}</version>
            </dependency>
            <dependency>
                <groupId>com.alibaba</groupId>
                <artifactId>fastjson</artifactId>
                <version>${version.fastjson}</version>
            </dependency>
            <!-- Spring Boot Admin -->
            <dependency>
                <groupId>de.codecentric</groupId>
                <artifactId>spring-boot-admin-starter-server</artifactId>
                <version>${version.spring.boot.admin}</version>
            </dependency>
            <dependency>
                <groupId>de.codecentric</groupId>
                <artifactId>spring-boot-admin-server-ui</artifactId>
                <version>${version.spring.boot.admin}</version>
            </dependency>
            <!-- Extend library-->
            <dependency>
                <groupId>org.codehaus.jettison</groupId>
                <artifactId>jettison</artifactId>
                <version>${version.jettison}</version>
            </dependency>
            <dependency>
                <groupId>org.json</groupId>
                <artifactId>json</artifactId>
                <version>${version.json}</version>
            </dependency>
            <dependency>
                <groupId>net.sf.json-lib</groupId>
                <artifactId>json-lib</artifactId>
                <version>${version.json-lib}</version>
            </dependency>
            <dependency>
                <groupId>commons-net</groupId>
                <artifactId>commons-net</artifactId>
                <version>${version.commons-net}</version>
            </dependency>
            <!-- 个推相关 -->
            <dependency>
                <groupId>com.gexin.platform</groupId>
                <artifactId>gexin-rp-sdk-http</artifactId>
                <version>4.0.1.17</version>
            </dependency>
            <dependency>
                <groupId>com.gexin.platform</groupId>
                <artifactId>gexin-rp-fastjson</artifactId>
                <version>1.0.0.1</version>
            </dependency>
            <dependency>
                <groupId>com.gexin.platform</groupId>
                <artifactId>gexin-rp-sdk-base</artifactId>
                <version>4.0.0.22</version>
            </dependency>
            <dependency>
                <groupId>com.gexin.platform</groupId>
                <artifactId>gexin-rp-sdk-template</artifactId>
                <version>4.0.0.16</version>
            </dependency>
            <dependency>
                <groupId>com.gexin.platform</groupId>
                <artifactId>protobuf-java</artifactId>
                <version>2.5.0</version>
            </dependency>
        </dependencies>
    </dependencyManagement>
    <repositories>
        <repository>
            <id>public</id>
            <name>public</name>
            <url>http://172.19.103.43:8081/nexus/content/groups/public/</url>
            <releases>
                <enabled>true</enabled>
            </releases>
            <snapshots>
                <enabled>false</enabled>
            </snapshots>
        </repository>
    </repositories>
    <pluginRepositories>
        <pluginRepository>
            <id>public</id>
            <name>public</name>
            <url>http://172.19.103.43:8081/nexus/content/groups/public/</url>
            <releases>
                <enabled>true</enabled>
            </releases>
            <snapshots>
                <enabled>false</enabled>
            </snapshots>
        </pluginRepository>
    </pluginRepositories>
    <build>
        <extensions>
            <extension>
                <groupId>org.apache.maven.wagon</groupId>
                <artifactId>wagon-ssh</artifactId>
                <version>2.10</version>
            </extension>
            <extension>
                <groupId>org.apache.maven.wagon</groupId>
                <artifactId>wagon-http-lightweight</artifactId>
                <version>2.10</version>
            </extension>
        </extensions>
        <plugins>
            <plugin>
                <groupId>org.apache.maven.plugins</groupId>
                <artifactId>maven-compiler-plugin</artifactId>
                <configuration>
                    <source>1.8</source>
                    <target>1.8</target>
                    <compilerArguments>
                        <verbose/>
                        <bootclasspath>${java.home}/lib/rt.jar;${java.home}/lib/jce.jar</bootclasspath>
                    </compilerArguments>
                </configuration>
                <version>3.1</version>
            </plugin>
        </plugins>
    </build>
</project>

+ 606 - 0
ehr-ms-parent-pom/pom.xml

@ -0,0 +1,606 @@
<?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.0.0"
         xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
         xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
    <modelVersion>4.0.0</modelVersion>
    <parent>
        <groupId>com.yihu.ehr</groupId>
        <artifactId>ehr-cloud</artifactId>
        <version>1.2.0</version>
        <relativePath>../ehr-cloud/pom.xml</relativePath>
    </parent>
    <artifactId>ehr-ms-parent-pom</artifactId>
    <packaging>pom</packaging>
    <description>EHR micro service parent pom</description>
    <build>
        <pluginManagement>
            <plugins>
                <plugin>
                    <groupId>org.jooq</groupId>
                    <artifactId>jooq-codegen-maven</artifactId>
                    <version>${jooq.version}</version>
                </plugin>
                <plugin>
                    <groupId>org.springframework.boot</groupId>
                    <artifactId>spring-boot-maven-plugin</artifactId>
                    <version>${spring-boot.version}</version>
                </plugin>
                <plugin>
                    <groupId>org.apache.maven.plugins</groupId>
                    <artifactId>maven-antrun-plugin</artifactId>
                    <version>1.7</version>
                </plugin>
                <plugin>
                    <groupId>org.apache.maven.plugins</groupId>
                    <artifactId>maven-assembly-plugin</artifactId>
                    <version>2.5.1</version>
                    <configuration>
                        <recompressZippedFiles>false</recompressZippedFiles>
                    </configuration>
                </plugin>
                <plugin>
                    <groupId>org.apache.maven.plugins</groupId>
                    <artifactId>maven-clean-plugin</artifactId>
                    <version>2.5</version>
                </plugin>
                <plugin>
                    <groupId>org.apache.maven.plugins</groupId>
                    <artifactId>maven-deploy-plugin</artifactId>
                    <version>2.8.2</version>
                </plugin>
                <plugin>
                    <groupId>org.apache.maven.plugins</groupId>
                    <artifactId>maven-dependency-plugin</artifactId>
                    <version>2.10</version>
                </plugin>
                <plugin>
                    <groupId>org.apache.maven.plugins</groupId>
                    <artifactId>maven-eclipse-plugin</artifactId>
                    <version>2.9</version>
                </plugin>
                <plugin>
                    <groupId>org.apache.maven.plugins</groupId>
                    <artifactId>maven-failsafe-plugin</artifactId>
                    <version>2.18</version>
                </plugin>
                <plugin>
                    <groupId>org.apache.maven.plugins</groupId>
                    <artifactId>maven-install-plugin</artifactId>
                    <version>2.5.2</version>
                </plugin>
                <plugin>
                    <groupId>org.apache.maven.plugins</groupId>
                    <artifactId>maven-help-plugin</artifactId>
                    <version>2.2</version>
                </plugin>
                <plugin>
                    <groupId>org.apache.maven.plugins</groupId>
                    <artifactId>maven-jar-plugin</artifactId>
                    <version>2.5</version>
                </plugin>
                <plugin>
                    <groupId>org.apache.maven.plugins</groupId>
                    <artifactId>maven-javadoc-plugin</artifactId>
                    <version>2.10.1</version>
                </plugin>
                <plugin>
                    <groupId>org.apache.maven.plugins</groupId>
                    <artifactId>maven-resources-plugin</artifactId>
                    <version>2.7</version>
                </plugin>
                <plugin>
                    <groupId>org.apache.maven.plugins</groupId>
                    <artifactId>maven-shade-plugin</artifactId>
                    <version>2.3</version>
                </plugin>
                <plugin>
                    <groupId>org.apache.maven.plugins</groupId>
                    <artifactId>maven-site-plugin</artifactId>
                    <version>3.3</version>
                    <dependencies>
                        <dependency>
                            <groupId>org.apache.maven.doxia</groupId>
                            <artifactId>doxia-module-markdown</artifactId>
                            <version>1.5</version>
                        </dependency>
                    </dependencies>
                </plugin>
                <plugin>
                    <groupId>org.apache.maven.plugins</groupId>
                    <artifactId>maven-source-plugin</artifactId>
                    <version>2.4</version>
                </plugin>
                <plugin>
                    <groupId>org.apache.maven.plugins</groupId>
                    <artifactId>maven-surefire-plugin</artifactId>
                    <version>2.18.1</version>
                </plugin>
                <plugin>
                    <groupId>org.apache.maven.plugins</groupId>
                    <artifactId>maven-war-plugin</artifactId>
                    <version>2.5</version>
                </plugin>
                <plugin>
                    <groupId>org.codehaus.mojo</groupId>
                    <artifactId>build-helper-maven-plugin</artifactId>
                    <version>1.9.1</version>
                </plugin>
                <plugin>
                    <groupId>org.codehaus.mojo</groupId>
                    <artifactId>exec-maven-plugin</artifactId>
                    <version>1.3.2</version>
                </plugin>
                <plugin>
                    <groupId>org.codehaus.mojo</groupId>
                    <artifactId>versions-maven-plugin</artifactId>
                    <version>2.2</version>
                </plugin>
                <plugin>
                    <groupId>pl.project13.maven</groupId>
                    <artifactId>git-commit-id-plugin</artifactId>
                    <version>2.1.11</version>
                </plugin>
            </plugins>
        </pluginManagement>
        <plugins>
            <plugin>
                <groupId>org.springframework.boot</groupId>
                <artifactId>spring-boot-maven-plugin</artifactId>
                <version>${version.spring-boot}</version>
                <executions>
                    <execution>
                        <goals>
                            <goal>repackage</goal>
                        </goals>
                    </execution>
                </executions>
            </plugin>
            <plugin>
                <groupId>org.apache.maven.plugins</groupId>
                <artifactId>maven-checkstyle-plugin</artifactId>
                <version>2.17</version>
                <executions>
                    <execution>
                        <id>checkstyle-validation</id>
                        <phase>validate</phase>
                        <configuration>
                            <skip>true</skip>
                            <configLocation>src/checkstyle/checkstyle.xml</configLocation>
                            <suppressionsLocation>src/checkstyle/checkstyle-suppressions.xml</suppressionsLocation>
                            <headerLocation>src/checkstyle/checkstyle-header.txt</headerLocation>
                            <propertyExpansion>checkstyle.build.directory=${project.build.directory}</propertyExpansion>
                            <encoding>UTF-8</encoding>
                            <consoleOutput>true</consoleOutput>
                            <failsOnError>true</failsOnError>
                            <includeTestSourceDirectory>true</includeTestSourceDirectory>
                        </configuration>
                        <goals>
                            <goal>check</goal>
                        </goals>
                    </execution>
                </executions>
            </plugin>
            <plugin>
                <groupId>org.apache.maven.plugins</groupId>
                <artifactId>maven-compiler-plugin</artifactId>
                <version>3.1</version>
                <configuration>
                    <source>${version.java}</source>
                    <target>${version.java}</target>
                </configuration>
            </plugin>
            <plugin>
                <groupId>org.apache.maven.plugins</groupId>
                <artifactId>maven-enforcer-plugin</artifactId>
                <version>1.4</version>
                <executions>
                    <execution>
                        <id>enforce-rules</id>
                        <goals>
                            <goal>enforce</goal>
                        </goals>
                        <configuration>
                            <rules>
                                <requireJavaVersion>
                                    <version>[1.8,)</version>
                                </requireJavaVersion>
                                <!--<requireProperty>
                                    <property>main.basedir</property>
                                </requireProperty>-->
                                <!--<requireProperty>
                                    <property>project.organization.name</property>
                                </requireProperty>-->
                                <!--<requireProperty>
                                    <property>project.name</property>
                                </requireProperty>-->
                                <!--<requireProperty>
                                    <property>project.description</property>
                                </requireProperty>-->
                            </rules>
                            <fail>true</fail>
                        </configuration>
                    </execution>
                </executions>
            </plugin>
            <plugin>
                <groupId>org.apache.maven.plugins</groupId>
                <artifactId>maven-jar-plugin</artifactId>
                <configuration>
                    <archive>
                        <manifest>
                            <addDefaultImplementationEntries>true</addDefaultImplementationEntries>
                            <addDefaultSpecificationEntries>true</addDefaultSpecificationEntries>
                        </manifest>
                    </archive>
                </configuration>
            </plugin>
            <plugin>
                <groupId>org.apache.maven.plugins</groupId>
                <artifactId>maven-surefire-plugin</artifactId>
                <configuration>
                    <includes>
                        <include>**/*Tests.java</include>
                    </includes>
                    <excludes>
                        <exclude>**/Abstract*.java</exclude>
                    </excludes>
                    <!--<systemPropertyVariables>-->
                    <!--<java.security.egd>file:/dev/./urandom</java.security.egd>-->
                    <!--<java.awt.headless>true</java.awt.headless>-->
                    <!--</systemPropertyVariables>-->
                    <argLine>-Xmx1024m</argLine>
                </configuration>
            </plugin>
            <plugin>
                <groupId>org.apache.maven.plugins</groupId>
                <artifactId>maven-war-plugin</artifactId>
                <configuration>
                    <failOnMissingWebXml>false</failOnMissingWebXml>
                </configuration>
            </plugin>
            <plugin>
                <groupId>org.apache.maven.plugins</groupId>
                <artifactId>maven-source-plugin</artifactId>
                <executions>
                    <execution>
                        <id>attach-sources</id>
                        <goals>
                            <goal>jar-no-fork</goal>
                        </goals>
                    </execution>
                </executions>
            </plugin>
            <!--<plugin>
                <groupId>org.codehaus.mojo</groupId>
                <artifactId>animal-sniffer-maven-plugin</artifactId>
                <configuration>
                    <skip>${disable.checks}</skip>
                    <signature>
                        <groupId>org.codehaus.mojo.signature</groupId>
                        <artifactId>java16</artifactId>
                        <version>1.0</version>
                    </signature>
                    <annotations>
                        <annotation>org.springframework.lang.UsesJava8</annotation>
                        <annotation>org.springframework.lang.UsesJava7</annotation>
                        <annotation>org.springframework.boot.lang.UsesUnsafeJava</annotation>
                    </annotations>
                </configuration>
                <executions>
                    <execution>
                        <id>enforce-java-6</id>
                        <phase>test</phase>
                        <goals>
                            <goal>check</goal>
                        </goals>
                    </execution>
                </executions>
            </plugin>-->
        </plugins>
    </build>
    <dependencyManagement>
        <dependencies>
            <dependency>
                <groupId>com.yihu.ehr</groupId>
                <artifactId>commons-admin-gateway-model</artifactId>
                <version>${version.ehr}</version>
            </dependency>
            <dependency>
                <groupId>com.yihu.ehr</groupId>
                <artifactId>commons-data-elasticsearch</artifactId>
                <version>${version.ehr}</version>
            </dependency>
            <dependency>
                <groupId>com.yihu.ehr</groupId>
                <artifactId>commons-data-fastdfs</artifactId>
                <version>${version.ehr}</version>
            </dependency>
            <dependency>
                <groupId>com.yihu.ehr</groupId>
                <artifactId>commons-data-hbase</artifactId>
                <version>${version.ehr}</version>
            </dependency>
            <dependency>
                <groupId>com.yihu.ehr</groupId>
                <artifactId>commons-data-mysql</artifactId>
                <version>${version.ehr}</version>
            </dependency>
            <dependency>
                <groupId>com.yihu.ehr</groupId>
                <artifactId>commons-data-query</artifactId>
                <version>${version.ehr}</version>
            </dependency>
            <dependency>
                <groupId>com.yihu.ehr</groupId>
                <artifactId>commons-data-redis</artifactId>
                <version>${version.ehr}</version>
            </dependency>
            <dependency>
                <groupId>com.yihu.ehr</groupId>
                <artifactId>commons-data-solr</artifactId>
                <version>${version.ehr}</version>
            </dependency>
            <dependency>
                <groupId>com.yihu.ehr</groupId>
                <artifactId>commons-ehr-constants</artifactId>
                <version>${version.ehr}</version>
            </dependency>
            <dependency>
                <groupId>com.yihu.ehr</groupId>
                <artifactId>commons-entity</artifactId>
                <version>${version.ehr}</version>
            </dependency>
            <!--<dependency>-->
            <!--<groupId>com.yihu.ehr</groupId>-->
            <!--<artifactId>commons-metrics</artifactId>-->
            <!--<version>${version.ehr}</version>-->
            <!--</dependency>-->
            <dependency>
                <groupId>com.yihu.ehr</groupId>
                <artifactId>commons-profile-core</artifactId>
                <version>${version.ehr}</version>
            </dependency>
            <dependency>
                <groupId>com.yihu.ehr</groupId>
                <artifactId>commons-redis-mq</artifactId>
                <version>${version.ehr}</version>
            </dependency>
            <dependency>
                <groupId>com.yihu.ehr</groupId>
                <artifactId>commons-rest-model</artifactId>
                <version>${version.ehr}</version>
            </dependency>
            <dependency>
                <groupId>com.yihu.hos</groupId>
                <artifactId>common-rest-model</artifactId>
                <version>${version.eip}</version>
            </dependency>
            <dependency>
                <groupId>com.yihu.ehr</groupId>
                <artifactId>commons-ui-swagger</artifactId>
                <version>${version.ehr}</version>
            </dependency>
            <dependency>
                <groupId>com.yihu.ehr</groupId>
                <artifactId>commons-util</artifactId>
                <version>${version.ehr}</version>
            </dependency>
            <dependency>
                <groupId>com.yihu.ehr</groupId>
                <artifactId>commons-web</artifactId>
                <version>${version.ehr}</version>
            </dependency>
            <!--<dependency>
                <groupId>com.yihu.ehr</groupId>
                <artifactId>commons-metrics</artifactId>
                <version>${version.ehr}</version>
            </dependency>-->
        </dependencies>
    </dependencyManagement>
    <!--
    profiles分为三种场景(dev,test,prod),三种部署模式(jar,war,docker).预计是9种模式,
    但目前仅使用到dev,test,prod的jar,test的war.若有需要可以组合配置这些部署模式.
    - dev的可执行jar包,在本机调试,不需要配置wagon参数。
    - test,prod的可执行jar包,需要在编译后传送到服务器上部署,故需配置wagon参数,参数可根据服务需要自行配置。
    - dev,test,prod的war包,编译后使用tomcat api部署,故需配置tomcat参数,参数可根据服务需要自行配置。
    -->
    <profiles>
        <profile>
            <id>dev-jar</id>
            <activation>
                <activeByDefault>true</activeByDefault>
            </activation>
            <properties>
                <skipTests>true</skipTests>
                <packaging.type>jar</packaging.type>
                <dependency.scope>compile</dependency.scope>
            </properties>
            <dependencies>
                <dependency>
                    <groupId>org.springframework.boot</groupId>
                    <artifactId>spring-boot-starter-web</artifactId>
                </dependency>
                <dependency>
                    <groupId>org.springframework.boot</groupId>
                    <artifactId>spring-boot-starter-tomcat</artifactId>
                </dependency>
                <!--<dependency>
                    <groupId>org.springframework.boot</groupId>
                    <artifactId>spring-boot-devtools</artifactId>
                    <scope>${dependency.scope}</scope>
                </dependency>-->
            </dependencies>
        </profile>
        <profile>
            <id>test-jar</id>
            <properties>
                <skipTests>true</skipTests>
                <packaging.type>jar</packaging.type>
                <dependency.scope>compile</dependency.scope>
                <wagonServerId/>
                <wagonUrl/>
            </properties>
            <build>
                <plugins>
                    <plugin>
                        <groupId>org.codehaus.mojo</groupId>
                        <artifactId>wagon-maven-plugin</artifactId>
                        <version>1.0</version>
                        <configuration>
                            <serverId>${wagonServerId}</serverId>
                            <fromFile>${project.build.directory}/${project.build.finalName}.jar</fromFile>
                            <url>${wagonUrl}</url>
                            <commands>
                                <command>pkill -f ${project.build.finalName}.jar</command>
                                <command>nohub java -Djava.security.egd=file:/dev/./urandom -jar
                                    ${project.build.finalName}.jar &amp;
                                </command>
                            </commands>
                            <displayCommandOutputs>true</displayCommandOutputs>
                        </configuration>
                    </plugin>
                </plugins>
            </build>
            <dependencies>
                <dependency>
                    <groupId>org.springframework.boot</groupId>
                    <artifactId>spring-boot-starter-web</artifactId>
                    <exclusions>
                        <exclusion>
                            <groupId>org.springframework.boot</groupId>
                            <artifactId>spring-boot-starter-logging</artifactId>
                        </exclusion>
                    </exclusions>
                </dependency>
                <dependency>
                    <groupId>org.springframework.boot</groupId>
                    <artifactId>spring-boot-devtools</artifactId>
                    <scope>${dependency.scope}</scope>
                </dependency>
            </dependencies>
        </profile>
        <profile>
            <id>test-war</id>
            <activation>
                <property>
                    <name>spring.profiles.active</name>
                    <value>default,test</value>
                </property>
            </activation>
            <properties>
                <skipTests>true</skipTests>
                <packaging.type>war</packaging.type>
                <dependency.scope>provided</dependency.scope>
            </properties>
            <dependencies>
                <dependency>
                    <groupId>org.springframework.boot</groupId>
                    <artifactId>spring-boot-starter-web</artifactId>
                    <scope>${dependency.scope}</scope>
                    <exclusions>
                        <exclusion>
                            <groupId>org.springframework.boot</groupId>
                            <artifactId>spring-boot-starter-logging</artifactId>
                        </exclusion>
                    </exclusions>
                </dependency>
                <dependency>
                    <groupId>org.springframework.boot</groupId>
                    <artifactId>spring-boot-starter-tomcat</artifactId>
                    <scope>${dependency.scope}</scope>
                </dependency>
                <dependency>
                    <groupId>org.springframework.boot</groupId>
                    <artifactId>spring-boot-devtools</artifactId>
                    <scope>${dependency.scope}</scope>
                </dependency>
            </dependencies>
            <build>
                <plugins>
                    <plugin>
                        <groupId>org.apache.tomcat.maven</groupId>
                        <artifactId>tomcat7-maven-plugin</artifactId>
                        <version>2.2</version>
                        <configuration>
                            <url>http://localhost:8080/manager/text</url>
                            <server>tomcat8</server>
                            <username>deployer</username>
                            <password>jkzldeployer</password>
                            <path>/${project.artifactId}</path>
                            <update>true</update>
                        </configuration>
                    </plugin>
                </plugins>
            </build>
        </profile>
        <profile>
            <id>prod-jar</id>
            <activation>
                <property>
                    <name>spring.profiles.active</name>
                    <value>default,prod</value>
                </property>
            </activation>
            <properties>
                <skipTests>true</skipTests>
                <packaging.type>war</packaging.type>
                <dependency.scope>compile</dependency.scope>
                <wagonServerId>11.1.2.21</wagonServerId>
                <wagonUrl>scp://user:password@11.1.2.21/home/root/ehr-release</wagonUrl>
            </properties>
            <dependencies>
                <dependency>
                    <groupId>org.springframework.boot</groupId>
                    <artifactId>spring-boot-starter-web</artifactId>
                    <scope>${dependency.scope}</scope>
                    <exclusions>
                        <exclusion>
                            <groupId>org.springframework.boot</groupId>
                            <artifactId>spring-boot-starter-logging</artifactId>
                        </exclusion>
                    </exclusions>
                </dependency>
            </dependencies>
        </profile>
        <profile>
            <id>prod-war</id>
            <properties>
                <skipTests>true</skipTests>
                <packaging.type>war</packaging.type>
                <dependency.scope>provided</dependency.scope>
            </properties>
            <dependencies>
                <dependency>
                    <groupId>org.springframework.boot</groupId>
                    <artifactId>spring-boot-starter-web</artifactId>
                    <scope>${dependency.scope}</scope>
                    <exclusions>
                        <exclusion>
                            <groupId>org.springframework.boot</groupId>
                            <artifactId>spring-boot-starter-logging</artifactId>
                        </exclusion>
                    </exclusions>
                </dependency>
                <dependency>
                    <groupId>org.springframework.boot</groupId>
                    <artifactId>spring-boot-starter-tomcat</artifactId>
                    <scope>${dependency.scope}</scope>
                </dependency>
                <dependency>
                    <groupId>org.springframework.boot</groupId>
                    <artifactId>spring-boot-devtools</artifactId>
                    <scope>${dependency.scope}</scope>
                </dependency>
            </dependencies>
            <build>
                <plugins>
                </plugins>
            </build>
        </profile>
    </profiles>
</project>

+ 129 - 0
pom.xml

@ -0,0 +1,129 @@
<?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
         xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
    <modelVersion>4.0.0</modelVersion>
    <parent>
        <groupId>com.yihu.ehr</groupId>
        <artifactId>ehr-ms-parent-pom</artifactId>
        <version>1.2.0</version>
        <relativePath>ehr-ms-parent-pom/pom.xml</relativePath>
    </parent>
    <artifactId>svr-pack-resolve</artifactId>
    <!--<packaging>${packaging.type}</packaging>-->
    <packaging>war</packaging>
    <properties>
        <wagonServerId>192.168.1.220</wagonServerId>
        <wagonUrl>scp://sand:timeneverstop@192.168.1.221/home/sand/ehr-release</wagonUrl>
    </properties>
    <dependencies>
        <!-- true -->
        <dependency>
            <groupId>org.springframework.boot</groupId>
            <artifactId>spring-boot-starter-test</artifactId>
            <scope>test</scope>
        </dependency>
        <dependency>
            <groupId>org.springframework.cloud</groupId>
            <artifactId>spring-cloud-starter-config</artifactId>
            <scope>${dependency.scope}</scope>
        </dependency>
        <dependency>
            <groupId>org.springframework.cloud</groupId>
            <artifactId>spring-cloud-starter-eureka</artifactId>
            <scope>${dependency.scope}</scope>
        </dependency>
        <dependency>
            <groupId>org.springframework.cloud</groupId>
            <artifactId>spring-cloud-starter-feign</artifactId>
            <scope>${dependency.scope}</scope>
        </dependency>
        <!-- actuator -->
        <dependency>
            <groupId>org.springframework.boot</groupId>
            <artifactId>spring-boot-starter-actuator</artifactId>
        </dependency>
        <dependency>
            <groupId>org.springframework.kafka</groupId>
            <artifactId>spring-kafka</artifactId>
        </dependency>
        <dependency>
            <groupId>com.timgroup</groupId>
            <artifactId>java-statsd-client</artifactId>
            <scope>${dependency.scope}</scope>
        </dependency>
        <dependency>
            <groupId>javax.servlet</groupId>
            <artifactId>javax.servlet-api</artifactId>
            <scope>${dependency.scope}</scope>
        </dependency>
        <dependency>
            <groupId>org.quartz-scheduler</groupId>
            <artifactId>quartz</artifactId>
            <scope>${dependency.scope}</scope>
        </dependency>
        <dependency>
            <groupId>com.yihu.ehr</groupId>
            <artifactId>commons-data-hbase</artifactId>
            <scope>compile</scope>
        </dependency>
        <dependency>
            <groupId>com.yihu.ehr</groupId>
            <artifactId>commons-data-fastdfs</artifactId>
            <scope>compile</scope>
        </dependency>
        <dependency>
            <groupId>com.yihu.ehr</groupId>
            <artifactId>commons-ehr-constants</artifactId>
            <scope>compile</scope>
        </dependency>
        <dependency>
            <groupId>com.yihu.ehr</groupId>
            <artifactId>commons-util</artifactId>
            <scope>compile</scope>
        </dependency>
        <dependency>
            <groupId>com.yihu.ehr</groupId>
            <artifactId>commons-profile-core</artifactId>
            <scope>compile</scope>
        </dependency>
        <dependency>
            <groupId>com.yihu.ehr</groupId>
            <artifactId>commons-ui-swagger</artifactId>
            <scope>compile</scope>
        </dependency>
        <dependency>
            <groupId>com.yihu.ehr</groupId>
            <artifactId>commons-rest-model</artifactId>
            <scope>compile</scope>
        </dependency>
        <dependency>
            <groupId>com.yihu.ehr</groupId>
            <artifactId>commons-data-redis</artifactId>
            <scope>compile</scope>
        </dependency>
        <dependency>
            <groupId>com.yihu.ehr</groupId>
            <artifactId>commons-web</artifactId>
            <scope>compile</scope>
        </dependency>
        <dependency>
            <groupId>com.yihu.ehr</groupId>
            <artifactId>commons-data-mysql</artifactId>
            <scope>compile</scope>
        </dependency>
        <dependency>
            <groupId>com.yihu.ehr</groupId>
            <artifactId>commons-entity</artifactId>
            <scope>compile</scope>
        </dependency>
        <dependency>
            <groupId>com.yihu.ehr</groupId>
            <artifactId>commons-data-elasticsearch</artifactId>
        </dependency>
    </dependencies>
</project>

+ 10 - 0
readme.md

@ -0,0 +1,10 @@
#  版本变更:
#### 1.创建 HealthFile 表 ,列族 basic,d
    curl -X POST --header 'Content-Type: application/json' --header 'Accept: text/plain' 'http://172.17.110.227:10220/api/v1.0/createTable?tableName=HealthFile&columnFamilies=basic%2Cd' 
#### 2.创建 HealthFileSub 表,列族 basic,d   
    curl -X POST --header 'Content-Type: application/json' --header 'Accept: text/plain' 'http://172.17.110.227:10220/api/v1.0/createTable?tableName=HealthFileSub&columnFamilies=basic%2Cd'
#### 3.配置HBase集群replication时需要将该参数设置为1.   
    1.alter 'HealthFile', NAME => 'basic',  REPLICATION_SCOPE => '1' 
	2.alter 'HealthFile' , NAME => 'd',  REPLICATION_SCOPE => '1'
	3.alter 'HealthFileSub',  NAME => 'basic',  REPLICATION_SCOPE => '1' 
	4.alter 'HealthFileSub' , NAME => 'd',  REPLICATION_SCOPE => '1' 

+ 33 - 0
src/main/java/com/yihu/ehr/SvrPackResolve.java

@ -0,0 +1,33 @@
package com.yihu.ehr;
import org.springframework.boot.SpringApplication;
import org.springframework.boot.autoconfigure.EnableAutoConfiguration;
import org.springframework.boot.autoconfigure.security.SecurityAutoConfiguration;
import org.springframework.boot.builder.SpringApplicationBuilder;
import org.springframework.boot.web.support.SpringBootServletInitializer;
import org.springframework.cloud.client.discovery.EnableDiscoveryClient;
import org.springframework.cloud.netflix.feign.EnableFeignClients;
import org.springframework.context.annotation.ComponentScan;
import org.springframework.context.annotation.Configuration;
import org.springframework.context.annotation.EnableAspectJAutoProxy;
import org.springframework.scheduling.annotation.EnableScheduling;
@Configuration
@EnableAutoConfiguration(exclude = {
        SecurityAutoConfiguration.class})
@ComponentScan
@EnableDiscoveryClient
@EnableAspectJAutoProxy(proxyTargetClass = true)
@EnableFeignClients
@EnableScheduling
public class SvrPackResolve extends SpringBootServletInitializer {
    public static void main(String[] args) {
        SpringApplication.run(SvrPackResolve.class, args);
    }
    @Override
    protected SpringApplicationBuilder configure(SpringApplicationBuilder application) {
        return application.sources(SvrPackResolve.class);
    }
}

+ 222 - 0
src/main/java/com/yihu/ehr/resolve/FilePackageResolver.java

@ -0,0 +1,222 @@
package com.yihu.ehr.resolve;
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.node.ArrayNode;
import com.fasterxml.jackson.databind.node.ObjectNode;
import com.yihu.ehr.constants.UrlScope;
import com.yihu.ehr.fastdfs.FastDFSUtil;
import com.yihu.ehr.profile.EventType;
import com.yihu.ehr.profile.exception.IllegalJsonDataException;
import com.yihu.ehr.profile.exception.IllegalJsonFileException;
import com.yihu.ehr.profile.extractor.KeyDataExtractor;
import com.yihu.ehr.profile.family.ResourceCells;
import com.yihu.ehr.profile.model.MetaDataRecord;
import com.yihu.ehr.profile.model.PackageDataSet;
import com.yihu.ehr.resolve.model.stage1.FilePackage;
import com.yihu.ehr.resolve.model.stage1.OriginalPackage;
import com.yihu.ehr.resolve.model.stage1.details.CdaDocument;
import com.yihu.ehr.resolve.model.stage1.details.OriginFile;
import com.yihu.ehr.util.datetime.DateTimeUtil;
import com.yihu.ehr.util.datetime.DateUtil;
import org.apache.commons.lang3.StringUtils;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Component;
import java.io.File;
import java.util.Date;
import java.util.Iterator;
import java.util.Map;
/**
 * 文件档案包解析器.
 *
 * @author Sand
 * @version 1.0
 * @created 2015.09.09 15:04
 */
@Component
public class FilePackageResolver extends PackageResolver {
    @Autowired
    private FastDFSUtil fastDFSUtil;
    @Override
    public void resolve(OriginalPackage originalPackage, File root) throws Exception {
        File documents = new File(root.getAbsolutePath() + File.separator + "documents.json");
        parseFile((FilePackage)originalPackage, documents);
    }
    private void parseFile(FilePackage filePackage, File documents) throws Exception {
        JsonNode root = objectMapper.readTree(documents);
        if (root.isNull()) {
            throw new IllegalJsonFileException("Invalid json file when generate data set");
        }
        String demographicId = root.get("demographic_id") == null ? "" : root.get("demographic_id").asText();
        String patientId = root.get("patient_id") == null ? "" : root.get("patient_id").asText();
        String orgCode = root.get("org_code") == null ? "" : root.get("org_code").asText();
        String eventNo = root.get("event_no") == null ? "" : root.get("event_no").asText();
        int eventType = root.get("event_type") == null ? -1 : root.get("event_type").asInt();
        String eventDate = root.get("event_time") == null ? "" : root.get("event_time").asText();
        String createDate = root.get("create_date") == null ? "" : root.get("create_date").asText();
        String cdaVersion = root.get("inner_version") == null ? "" : root.get("inner_version").asText();
        //验证档案基础数据的完整性,当其中某字段为空的情况下直接提示档案包信息缺失。
        StringBuilder errorMsg = new StringBuilder();
        if (StringUtils.isEmpty(patientId)){
            errorMsg.append("patientId is null;");
        }
        if (StringUtils.isEmpty(eventNo)){
            errorMsg.append("eventNo is null;");
        }
        if (StringUtils.isEmpty(orgCode)){
            errorMsg.append("orgCode is null;");
        }
        if (StringUtils.isEmpty(cdaVersion)) {
            errorMsg.append("innerVersion is null;");
        }
        if (StringUtils.isEmpty(eventDate)) {
            errorMsg.append("eventTime is null;");
        }
        if (!StringUtils.isEmpty(errorMsg.toString())){
            throw new IllegalJsonDataException(errorMsg.toString());
        }
        filePackage.setPatientId(patientId);
        filePackage.setEventNo(eventNo);
        if (eventType != -1) {
            filePackage.setEventType(EventType.create(eventType));
        }
        filePackage.setOrgCode(orgCode);
        filePackage.setCdaVersion(cdaVersion);
        filePackage.setCreateDate(DateUtil.strToDate(createDate));
        filePackage.setEventTime(DateUtil.strToDate(eventDate));
        filePackage.setDemographicId(demographicId);
        parseDataSets(filePackage, (ObjectNode) root.get("data_sets"));
        parseFiles(filePackage, (ArrayNode) root.get("files"), documents.getParent() + File.separator + "documents");
    }
    private void parseDataSets(FilePackage filePackage, ObjectNode dataSets) throws Exception {
        if (dataSets == null) {
            return;
        }
        Iterator<Map.Entry<String, JsonNode>> iterator = dataSets.fields();
        while (iterator.hasNext()) {
            Map.Entry<String, JsonNode> item = iterator.next();
            String dataSetCode = item.getKey();
            PackageDataSet dataSet = new PackageDataSet();
            dataSet.setCode(dataSetCode);
            dataSet.setPatientId(filePackage.getPatientId());
            dataSet.setEventNo(filePackage.getEventNo());
            dataSet.setOrgCode(filePackage.getOrgCode());
            dataSet.setCdaVersion(filePackage.getCdaVersion());
            dataSet.setCreateTime(filePackage.getCreateDate());
            dataSet.setEventTime(filePackage.getEventTime());
            ArrayNode records = (ArrayNode) item.getValue();
            for (int i = 0; i < records.size(); ++i) {
                MetaDataRecord record = new MetaDataRecord();
                ObjectNode jsonRecord = (ObjectNode) records.get(i);
                Iterator<Map.Entry<String, JsonNode>> filedIterator = jsonRecord.fields();
                while (filedIterator.hasNext()) {
                    Map.Entry<String, JsonNode> field = filedIterator.next();
                    //String metaData = translateMetaDataCode(profile.getCdaVersion(), dataSetCode, field.getKey());
                    String value = field.getValue().asText().equals("null") ? "" : field.getValue().asText();
                    if (field.getKey() != null) {
                        record.putMetaData(field.getKey(), value);
                    }
                }
                dataSet.addRecord(Integer.toString(i), record);
            }
            //提取身份信息
            if (StringUtils.isEmpty(filePackage.getDemographicId()) || StringUtils.isEmpty(filePackage.getPatientName())) {
                Map<String, Object> properties = extractorChain.doExtract(dataSet, KeyDataExtractor.Filter.Identity);
                String demographicId = (String) properties.get(ResourceCells.DEMOGRAPHIC_ID);
                String patientName = (String) properties.get(ResourceCells.PATIENT_NAME);
                if (!StringUtils.isEmpty(demographicId)) {
                    filePackage.setDemographicId(demographicId.trim());
                }
                if (!StringUtils.isEmpty(patientName)) {
                    filePackage.setPatientName(patientName);
                }
            }
            filePackage.insertDataSet(dataSetCode, dataSet);
        }
    }
    private void parseFiles(FilePackage profile, ArrayNode files, String documentsPath) throws Exception {
        for (int i = 0; i < files.size(); ++i) {
            ObjectNode objectNode = (ObjectNode) files.get(i);
            String cdaDocumentId = objectNode.get("cda_doc_id").asText();
            Date expireDate = null;
            if (objectNode.get("expire_date") != null) {
                expireDate = DateTimeUtil.simpleDateParse(objectNode.get("expire_date").asText());
            }
            // 解析过程中,使用cda文档id作为文档列表的主键,待解析完成后,统一更新为rowkey
            CdaDocument cdaDocument = profile.getCdaDocuments().get(cdaDocumentId);
            if (cdaDocument == null) {
                cdaDocument = new CdaDocument();
                cdaDocument.setId(cdaDocumentId);
                profile.getCdaDocuments().put(cdaDocumentId, cdaDocument);
            }
            ArrayNode content = (ArrayNode) objectNode.get("content");
            for (int j = 0; j < content.size(); ++j) {
                ObjectNode file = (ObjectNode) content.get(j);
                JsonNode mimeNode = file.get("mime_type");
                if (mimeNode== null) {
                    throw new IllegalJsonFileException("mime_type is null");
                }
                String mine_type = mimeNode.asText();//必填
                String url_scope = file.get("url_scope") == null ? "" : file.get("url_scope").asText();//可选
                String url = file.get("url")== null ? "": file.get("url").asText();//可选
                JsonNode emrId = file.get("emr_id");
                if (emrId == null){
                    throw new IllegalJsonFileException("emr_id is null");
                }
                String emr_id = emrId.asText();
                JsonNode emrName = file.get("emr_name");
                if (emrName == null){
                    throw new IllegalJsonFileException("emr_name is null");
                }
                String emr_name = emrName.asText();
                String note = file.has("note")?file.get("note").asText():"";//可选
                OriginFile originFile = new OriginFile();
                originFile.setMime(mine_type);
                originFile.setExpireDate(expireDate);
                if ("public".equalsIgnoreCase(url_scope)){
                    originFile.setUrlScope(UrlScope.valueOf(0));
                } else if ("private".equalsIgnoreCase(url_scope)){
                    originFile.setUrlScope(UrlScope.valueOf(1));
                }
                originFile.setEmrId(emr_id);
                originFile.setEmrName(emr_name);
                if (!StringUtils.isBlank(note)) {
                    originFile.setNote(note);
                }
                if (file.get("name") != null) {
                    String fileList[] = file.get("name").asText().split(";");
                    for (String fileName : fileList) {
                        File f = new File(documentsPath + File.separator + fileName);
                        if (f.exists()) {
                            String storageUrl = saveFile(documentsPath + File.separator + fileName);
                            originFile.addUrl(fileName.substring(0, fileName.indexOf('.')), storageUrl);
                        }
                    }
                }
                if (!StringUtils.isEmpty(url)) {
                    for (String fileUrl : url.split(",")) {
                        originFile.addUrl(fileUrl, fileUrl);
                    }
                }
                cdaDocument.getOriginFiles().add(originFile);
            }
        }
    }
    private String saveFile(String fileName) throws Exception {
        ObjectNode objectNode = fastDFSUtil.upload(fileName, "File from unstructured profile package.");
        return objectNode.get(FastDFSUtil.GROUP_NAME).asText() + "/" + objectNode.get(FastDFSUtil.REMOTE_FILE_NAME).asText();
    }
}

+ 224 - 0
src/main/java/com/yihu/ehr/resolve/ImmediateDataResolver.java

@ -0,0 +1,224 @@
package com.yihu.ehr.resolve;
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.node.ArrayNode;
import com.yihu.ehr.profile.EventType;
import com.yihu.ehr.profile.extractor.ExtractorChain;
import com.yihu.ehr.profile.extractor.KeyDataExtractor;
import com.yihu.ehr.profile.family.ResourceCells;
import com.yihu.ehr.profile.model.MetaDataRecord;
import com.yihu.ehr.profile.model.PackageDataSet;
import com.yihu.ehr.profile.exception.IllegalJsonDataException;
import com.yihu.ehr.profile.exception.IllegalJsonFileException;
import com.yihu.ehr.resolve.model.stage1.StandardPackage;
import com.yihu.ehr.util.datetime.DateTimeUtil;
import com.yihu.ehr.util.datetime.DateUtil;
import org.apache.commons.lang3.StringUtils;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Component;
import java.io.IOException;
import java.text.ParseException;
import java.util.*;
/**
 * 即时交互档案数据解析器.
 *
 * @author HZY
 * @created 2018.01.04 14:40
 */
@Component
public class ImmediateDataResolver {
    @Autowired
    protected ObjectMapper objectMapper;
    @Autowired
    protected ExtractorChain extractorChain;
    public void resolve(StandardPackage standardPackage, String data) throws Exception {
        //解析标准数据
        parseData(standardPackage, data);
    }
    /**
     * 解析及时交互档案的json数据
     *
     * @param standardPackage 标准档案数据类
     * @param data            即时交互json数据
     * @throws Exception
     * @throws IOException
     */
    private void parseData(StandardPackage standardPackage, String data) throws Exception {
        //解析数据集数据
        JsonNode dataNode = objectMapper.readValue(data, JsonNode.class);
        if (dataNode.isNull()) {
            throw new IllegalJsonFileException("Invalid json file when generate data set");
        }
        JsonNode eventTypeNode = dataNode.get("event_type");
        if (eventTypeNode == null){
            throw new IllegalJsonDataException("Not event_type in json data when generate data set");
        }
        int eventType = dataNode.get("event_type").asInt();
        List<PackageDataSet> packageDataSetList = parseStructuredImmediateJson(dataNode);
        if (packageDataSetList != null) {
            for (PackageDataSet dataSet : packageDataSetList) {
                if (dataSet.isReUploadFlg()) {
                    standardPackage.setReUploadFlg(true);
                }
            }
        }
        if (standardPackage.isReUploadFlg()) {
            for (PackageDataSet dataSet : packageDataSetList) {
                dataSet.setCode(dataSet.getCode());
                standardPackage.setEventTime(dataSet.getEventTime());
                standardPackage.setPatientId(dataSet.getPatientId());
                standardPackage.setEventNo(dataSet.getEventNo());
                standardPackage.setOrgCode(dataSet.getOrgCode());
                standardPackage.setCdaVersion(dataSet.getCdaVersion());
                standardPackage.setCreateDate(dataSet.getCreateTime());
                standardPackage.insertDataSet(dataSet.getCode(), dataSet);
            }
            return;
        }
        for (PackageDataSet dataSet : packageDataSetList) {
            //将单个JSON文件转化为单个数据集
            String dataSetCode = dataSet.getCode();
            dataSet.setCode(dataSetCode);
            // Extract key data from data set if exists
            //就诊卡信息
            if (StringUtils.isEmpty(standardPackage.getCardId()) || StringUtils.isEmpty(standardPackage.getCardType())) {
                Map<String, Object> properties = extractorChain.doExtract(dataSet, KeyDataExtractor.Filter.CardInfo);
                String cardId = (String) properties.get(ResourceCells.CARD_ID);
                String cardType = (String) properties.get(ResourceCells.CARD_TYPE);
                if (!StringUtils.isEmpty(cardId)) {
                    standardPackage.setCardId(cardId);
                }
                if (!StringUtils.isEmpty(cardType)) {
                    standardPackage.setCardType(cardType);
                }
            }
            //身份信息
            if (StringUtils.isEmpty(standardPackage.getDemographicId()) || StringUtils.isEmpty(standardPackage.getPatientName())) {
                Map<String, Object> properties = extractorChain.doExtract(dataSet, KeyDataExtractor.Filter.Identity);
                String demographicId = (String) properties.get(ResourceCells.DEMOGRAPHIC_ID);
                String patientName = (String) properties.get(ResourceCells.PATIENT_NAME);
                if (!StringUtils.isEmpty(demographicId)) {
                    standardPackage.setDemographicId(demographicId);
                }
                if (!StringUtils.isEmpty(patientName)) {
                    standardPackage.setPatientName(patientName);
                }
            }
            //就诊事件信息
            if (standardPackage.getEventTime() == null || standardPackage.getEventType() == null) {
                Date eventDate = dataSet.getEventTime();
                EventType mEventType = EventType.create(eventType);
                if (eventDate != null) {
                    standardPackage.setEventTime(eventDate);
                }
                if (mEventType != null) {
                    standardPackage.setEventType(mEventType);
                }
            }
            //门诊或住院诊断
            if (standardPackage.getDiagnosisCode() == null || standardPackage.getDiagnosisCode().size() <= 0) {
                Map<String, Object> properties = extractorChain.doExtract(dataSet, KeyDataExtractor.Filter.Diagnosis);
                Set<String> diagnosisList = (Set<String>) properties.get(ResourceCells.DIAGNOSIS);
                if (diagnosisList != null && diagnosisList.size() > 0) {
                    standardPackage.setDiagnosisCode(diagnosisList);
                }
            }
            standardPackage.setPatientId(dataSet.getPatientId());
            standardPackage.setEventNo(dataSet.getEventNo());
            standardPackage.setOrgCode(dataSet.getOrgCode());
            standardPackage.setCdaVersion(dataSet.getCdaVersion());
            standardPackage.setCreateDate(dataSet.getCreateTime());
            standardPackage.insertDataSet(dataSetCode, dataSet);
        }
    }
    /**
     * 结构化 - 即时交互档案数据集处理
     * @param jsonNode
     * @return
     */
    private List<PackageDataSet> parseStructuredImmediateJson(JsonNode jsonNode) {
        List<PackageDataSet> packageDataSetList = new ArrayList<>();
        //获取就诊事件索引信息
        String patientId = jsonNode.get("patient_id") == null ? "" : jsonNode.get("patient_id").asText();
        String eventNo = jsonNode.get("event_no") == null ? "" : jsonNode.get("event_no").asText();
        String orgCode = jsonNode.get("org_code") == null ? "" : jsonNode.get("org_code").asText();
        String version = jsonNode.get("inner_version") == null ? "" : jsonNode.get("inner_version").asText();
        String createTime = jsonNode.get("create_date") == null ? "" : jsonNode.get("create_date").asText();
        String eventTime = jsonNode.path("event_time") == null ? "" : jsonNode.path("event_time").asText();
        boolean reUploadFlg = jsonNode.path("reUploadFlg") == null ? false : jsonNode.path("reUploadFlg").asBoolean();
        //验证档案基础数据的完整性,当其中某字段为空的情况下直接提示档案信息缺失。
        StringBuilder errorMsg = new StringBuilder();
        if (StringUtils.isEmpty(patientId)){
            errorMsg.append("patientId is null;");
        }
        if (StringUtils.isEmpty(eventNo) ){
            errorMsg.append("eventNo is null;");
        }
        if (StringUtils.isEmpty(orgCode)){
            errorMsg.append("orgCode is null;");
        }
        if (null == jsonNode.get("data")){
            errorMsg.append("dataSets is null;");
        }
        if (!StringUtils.isEmpty(errorMsg.toString())){
            throw new IllegalJsonDataException(errorMsg.toString());
        }
        //获取档案中数据集json数据
        JsonNode dataNode = jsonNode.get("data");
        Iterator<Map.Entry<String, JsonNode>> fields = dataNode.fields();
        while (fields.hasNext()){
            //遍历标准数据集数据
            Map.Entry<String, JsonNode> next = fields.next();
            String dataSetCode = next.getKey();
            ArrayNode dataSets = (ArrayNode) next.getValue();
            PackageDataSet dataSet = new PackageDataSet();
            dataSet.setPatientId(patientId);
            dataSet.setEventNo(eventNo);
            dataSet.setCdaVersion(version);
            dataSet.setCode(dataSetCode);
            dataSet.setOrgCode(orgCode);
            try {
                dataSet.setEventTime(DateUtil.strToDate(eventTime));
                dataSet.setCreateTime(DateTimeUtil.simpleDateParse(createTime));
            } catch (ParseException e) {
                throw new IllegalJsonDataException("Invalid date time format.");
            }
            dataSet.setReUploadFlg(reUploadFlg);
            for (int i = 0; i < dataSets.size(); i ++) {
                MetaDataRecord record = new MetaDataRecord();
                JsonNode recordNode = dataSets.get(i);
                Iterator<Map.Entry<String, JsonNode>> iterator = recordNode.fields();
                while (iterator.hasNext()) {
                    Map.Entry<String, JsonNode> item = iterator.next();
                    String metaDataKey = item.getKey();
                    String metaDataValue = item.getValue().asText().equals("null") ? "" : item.getValue().asText();
                    record.putMetaData(metaDataKey, metaDataValue);
                }
                dataSet.addRecord(Integer.toString(i), record);
            }
            packageDataSetList.add(dataSet);
        }
        return packageDataSetList;
    }
}

+ 262 - 0
src/main/java/com/yihu/ehr/resolve/LinkPackageResolver.java

@ -0,0 +1,262 @@
package com.yihu.ehr.resolve;
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.node.ArrayNode;
import com.fasterxml.jackson.databind.node.ObjectNode;
import com.yihu.ehr.fastdfs.FastDFSUtil;
import com.yihu.ehr.profile.EventType;
import com.yihu.ehr.profile.exception.IllegalJsonDataException;
import com.yihu.ehr.profile.exception.IllegalJsonFileException;
import com.yihu.ehr.profile.exception.ResolveException;
import com.yihu.ehr.profile.extractor.KeyDataExtractor;
import com.yihu.ehr.profile.family.ResourceCells;
import com.yihu.ehr.profile.model.LinkPackageDataSet;
import com.yihu.ehr.profile.model.MetaDataRecord;
import com.yihu.ehr.resolve.model.stage1.LinkPackage;
import com.yihu.ehr.resolve.model.stage1.OriginalPackage;
import com.yihu.ehr.resolve.model.stage1.details.LinkFile;
import com.yihu.ehr.util.datetime.DateUtil;
import com.yihu.ehr.util.encrypt.MD5;
import com.yihu.ehr.util.ftp.FtpUtils;
import org.apache.commons.lang3.StringUtils;
import org.apache.commons.net.ftp.FTPClient;
import org.apache.commons.net.ftp.FTPFile;
import org.csource.common.NameValuePair;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.stereotype.Component;
import java.io.File;
import java.io.InputStream;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
/**
 * 轻量级档案包解析器.
 *
 * @author Sand
 * @version 1.0
 * @created 2015.09.09 15:04
 */
@Component
public class LinkPackageResolver extends PackageResolver {
    @Value("${ftp.address}")
    private String address;
    @Value("${ftp.username}")
    private String username;
    @Value("${ftp.password}")
    private String password;
    @Value("${ftp.port}")
    private int port;
    @Value("${fast-dfs.pacs-group-name:group1}")
    private String groupName;
    @Autowired
    private FastDFSUtil fastDFSUtil;
    @Override
    public void resolve(OriginalPackage originalPackage, File root) throws Exception {
        File indexFile = new File(root.getAbsolutePath() + File.separator  + "index" + File.separator + "patient_index.json");
        parseFile((LinkPackage) originalPackage, indexFile);
    }
    private void parseFile (LinkPackage linkPackage, File indexFile) throws Exception{
        JsonNode jsonNode = objectMapper.readTree(indexFile);
        if (jsonNode.isNull()) {
            throw new IllegalJsonFileException("Invalid json file when generate data set");
        }
        String patientId = jsonNode.get("patient_id") == null ? "" : jsonNode.get("patient_id").asText();
        String eventNo = jsonNode.get("event_no") == null ? "" : jsonNode.get("event_no").asText();
        String orgCode = jsonNode.get("org_code") == null ? "" : jsonNode.get("org_code").asText();
        String version = jsonNode.get("inner_version") == null ? "" : jsonNode.get("inner_version").asText();
        String visitType = jsonNode.get("visit_type") == null? "" : jsonNode.get("visit_type").asText();
        String eventDate = jsonNode.get("event_time") == null ? "" : jsonNode.get("event_time").asText();
        String expireDate = jsonNode.get("expire_date") == null? "" : jsonNode.get("expire_date").asText();
        //验证档案基础数据的完整性,当其中某字段为空的情况下直接提示档案包信息缺失。
        StringBuilder errorMsg = new StringBuilder();
        if (StringUtils.isEmpty(patientId)){
            errorMsg.append("patientId is null;");
        }
        if (StringUtils.isEmpty(eventNo)){
            errorMsg.append("eventNo is null;");
        }
        if (StringUtils.isEmpty(orgCode)){
            errorMsg.append("orgCode is null;");
        }
        if (StringUtils.isEmpty(version)) {
            errorMsg.append("innerVersion is null;");
        }
        if (StringUtils.isEmpty(eventDate)) {
            errorMsg.append("eventTime is null;");
        }
        if (StringUtils.isEmpty(visitType)) {
            errorMsg.append("visitType is null;");
        }
        if (!StringUtils.isEmpty(errorMsg.toString())){
            throw new IllegalJsonDataException(errorMsg.toString());
        }
        linkPackage.setPatientId(patientId);
        linkPackage.setEventNo(eventNo);
        linkPackage.setEventType(EventType.create(visitType));
        linkPackage.setOrgCode(orgCode);
        linkPackage.setCdaVersion(version);
        linkPackage.setEventTime(DateUtil.strToDate(eventDate));
        linkPackage.setVisitType(visitType);
        linkPackage.setExpireDate(DateUtil.strToDate(expireDate));
        // dataset节点,存储数据集URL
        JsonNode dataSetNode = jsonNode.get("dataset");
        Iterator<String> fieldNames = dataSetNode.fieldNames();
        while (fieldNames.hasNext()) {
            String dataSetCode = fieldNames.next();
            String url = dataSetNode.get(dataSetCode).asText();
            LinkPackageDataSet dataSet = new LinkPackageDataSet();
            dataSet.setCode(dataSetCode);
            dataSet.setPatientId(patientId);
            dataSet.setEventNo(eventNo);
            dataSet.setOrgCode(orgCode);
            dataSet.setCdaVersion(version);
            dataSet.setEventTime(DateUtil.strToDate(eventDate));
            dataSet.setUrl(url);
            linkPackage.insertDataSet(dataSetCode, dataSet);
        }
        //--------------增加ftp影像文件解析
        JsonNode filesNode = jsonNode.get("files");
        if (filesNode != null){
            List<LinkFile> linkFiles = linkPackage.getLinkFiles();
            Map<String,List<String>> needDeleteFiles = linkPackage.getFiles();
            ArrayNode arrayNode = (ArrayNode) filesNode;
            FtpUtils ftpUtils = null;
            try {
                ftpUtils = new FtpUtils(username, password, address, port);
                ftpUtils.connect();
                FTPClient ftpClient = ftpUtils.getFtpClient();
                for (int i = 0; i < arrayNode.size(); ++i){
                    JsonNode fileNode = arrayNode.get(i);
                    JsonNode file = fileNode.get("file");
                    if (null == file){
                        throw new IllegalJsonFileException("fileName is null.");
                    }
                    String fileName = file.asText();
                    String fileExtension;
                    if (fileName.contains(".")) {
                        fileExtension = fileName.substring(fileName.lastIndexOf(".") + 1);
                    } else {
                        throw new IllegalJsonFileException("上传影像档案文件失败, 文件缺失扩展名.");
                    }
                    JsonNode urlNode = fileNode.get("url");
                    if(urlNode == null){
                        throw new IllegalJsonFileException("缺失ftp路径地址");
                    }
                    String url = urlNode.asText();
                    if(!url.startsWith("ftp:/")){
                        throw new IllegalJsonFileException("ftp路径地址格式有误");
                    }
                    String path = url.substring(5);//将url前面的ftp:/截取掉,剩下的path为文件的完整路径(包含文件名)
                    FTPFile[] ftpFiles = ftpClient.listFiles(path);
                    if (ftpFiles == null || ftpFiles.length == 0){
                        throw new ResolveException("ftp上找不到该文件:" + path);
                    }
                    JsonNode md5Node = fileNode.get("md5");
                    if(md5Node == null){
                        throw new IllegalJsonFileException("md5 value is null");
                    }
                    JsonNode reportFormNoNode = fileNode.get("report_form_no");
                    if(reportFormNoNode == null){
                        throw new ResolveException("report_form_no is null");
                    }
                    String reportFormNo = reportFormNoNode.asText();
                    JsonNode serialNoNode = fileNode.get("serial_no");
                    if(serialNoNode == null){
                        throw new ResolveException("serial_no is null");
                    }
                    String serialNo = serialNoNode.asText();
                    InputStream inputStream = ftpUtils.getInputStream(path);
                    //手动获取md5值与json中的md5做校验
                    boolean b = MD5.md5CheckSum(inputStream,md5Node.asText());
                    if(!b){
                        throw new ResolveException("ftp file is not correct:the file name is "+path);
                    }
                    long fileSize = ftpFiles[0].getSize();
                    NameValuePair[] fileMetaData = new NameValuePair[1];
                    fileMetaData[0] = new NameValuePair("description", "File from link profile package.");
                    //影像文件存在group2
                    ObjectNode msg = fastDFSUtil.upload(groupName, inputStream, fileExtension, fileMetaData);
                    LinkFile linkFile = new LinkFile();
                    linkFile.setFileSize(fileSize);
                    String md5 = md5Node.asText();
                    //校验文件完整性
                    linkFile.setMd5(md5);
                    linkFile.setFileExtension(fileExtension);
                    linkFile.setOriginName(fileName);
                    linkFile.setReportFormNo(reportFormNo);
                    linkFile.setSerialNo(serialNo);
                    String fastdfsUrl = msg.get(FastDFSUtil.GROUP_NAME).asText() + "/" + msg.get(FastDFSUtil.REMOTE_FILE_NAME).asText();
                    linkFile.setUrl(fastdfsUrl);
                    linkFiles.add(linkFile);
                    path = path.substring(0, path.length() - fileName.length());//文件路径,不包含文件名
                    List<String> _fileNames = needDeleteFiles.get(path);
                    if(_fileNames == null){
                        _fileNames = new ArrayList<>();
                    }
                    _fileNames.add(fileName);
                    //ftp文件,待数据入库后,在删除
                    needDeleteFiles.put(path,_fileNames);
                    ftpUtils.deleteFile(path, fileName);
                }
            } finally {
                if (ftpUtils != null){
                    ftpUtils.closeConnect();
                }
            }
        }
        //----------------------ftp影像文件解析end----
        // summary节点可能不存在
        JsonNode summaryNode = jsonNode.get("summary");
        if (summaryNode == null) {
            return;
        }
        fieldNames = summaryNode.fieldNames();
        while (fieldNames.hasNext()) {
            String dataSetCode = fieldNames.next();
            LinkPackageDataSet linkPackageDataSet = (LinkPackageDataSet)linkPackage.getDataSet(dataSetCode);
            if (linkPackageDataSet == null) {
                linkPackageDataSet = new LinkPackageDataSet();
            }
            linkPackageDataSet.setCode(dataSetCode);
            linkPackageDataSet.setPatientId(patientId);
            linkPackageDataSet.setEventNo(eventNo);
            linkPackageDataSet.setOrgCode(orgCode);
            linkPackageDataSet.setCdaVersion(version);
            linkPackageDataSet.setEventTime(DateUtil.strToDate(eventDate));
            ArrayNode arrayNode = (ArrayNode) summaryNode.get(dataSetCode);
            for (int i = 0; i < arrayNode.size(); ++i){
                MetaDataRecord record = new MetaDataRecord();
                Iterator<String> metaDataCodes = arrayNode.get(i).fieldNames();
                while (metaDataCodes.hasNext()){
                    String metaDataCode = metaDataCodes.next();
                    record.putMetaData(metaDataCode, arrayNode.get(i).get(metaDataCode).asText());
                }
                linkPackageDataSet.addRecord(Integer.toString(linkPackageDataSet.getRecordCount()), record);
            }
            //提取身份信息
            if (StringUtils.isEmpty(linkPackage.getDemographicId()) || StringUtils.isEmpty(linkPackage.getPatientName())) {
                Map<String, Object> properties = extractorChain.doExtract(linkPackageDataSet, KeyDataExtractor.Filter.Identity);
                String demographicId = (String) properties.get(ResourceCells.DEMOGRAPHIC_ID);
                String patientName = (String) properties.get(ResourceCells.PATIENT_NAME);
                if (!StringUtils.isEmpty(demographicId)) {
                    linkPackage.setDemographicId(demographicId.trim());
                }
                if (!StringUtils.isEmpty(patientName)) {
                    linkPackage.setPatientName(patientName);
                }
            }
            linkPackage.insertDataSet(dataSetCode, linkPackageDataSet);
        }
    }
}

+ 24 - 0
src/main/java/com/yihu/ehr/resolve/PackageResolver.java

@ -0,0 +1,24 @@
package com.yihu.ehr.resolve;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.yihu.ehr.profile.extractor.ExtractorChain;
import com.yihu.ehr.profile.util.DataSetParserUtil;
import com.yihu.ehr.resolve.model.stage1.OriginalPackage;
import org.springframework.beans.factory.annotation.Autowired;
import java.io.File;
/**
 * Created by progr1mmer on 2018/6/8.
 */
public abstract class PackageResolver {
    @Autowired
    protected ObjectMapper objectMapper;
    @Autowired
    protected DataSetParserUtil dataSetParser;
    @Autowired
    protected ExtractorChain extractorChain;
    public abstract void resolve(OriginalPackage originalPackage, File root) throws Exception;
}

+ 266 - 0
src/main/java/com/yihu/ehr/resolve/SimplePackageResolver.java

@ -0,0 +1,266 @@
package com.yihu.ehr.resolve;
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.node.ArrayNode;
import com.fasterxml.jackson.databind.node.ObjectNode;
import com.yihu.ehr.profile.exception.IllegalJsonDataException;
import com.yihu.ehr.profile.exception.IllegalJsonFileException;
import com.yihu.ehr.profile.exception.LegacyPackageException;
import com.yihu.ehr.profile.model.MetaDataRecord;
import com.yihu.ehr.profile.model.PackageDataSet;
import com.yihu.ehr.resolve.config.EventIndexConfig;
import com.yihu.ehr.resolve.dao.DataSetPackageDao;
import com.yihu.ehr.resolve.log.PackResolveLogger;
import com.yihu.ehr.resolve.model.stage1.OriginalPackage;
import com.yihu.ehr.resolve.model.stage1.SimplePackage;
import com.yihu.ehr.util.datetime.DateTimeUtil;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.jdbc.core.JdbcTemplate;
import org.springframework.stereotype.Component;
import java.io.File;
import java.io.IOException;
import java.text.ParseException;
import java.util.*;
/**
 * 数据集(非档案类型)档案包解析器.
 *
 * @author 张进军
 * @created 2017.06.27 11:28
 */
@Component
public class SimplePackageResolver extends PackageResolver {
    @Autowired
    private JdbcTemplate jdbcTemplate;
    @Autowired
    private EventIndexConfig eventIndex;
    @Autowired
    private DataSetPackageDao dataSetPackageDao;
    @Override
    public void resolve(OriginalPackage originalPackage, File root) throws Exception {
        File originFolder = new File(root.getAbsolutePath());
        this.parseFiles((SimplePackage) originalPackage, originFolder.listFiles());
    }
    /**
     * 解析 .json 文件中的 JSON 数据,拼接成SQL语句
     * @param simplePackage
     * @param files
     * @throws IOException
     * @throws ParseException
     */
    private void parseFiles(SimplePackage simplePackage, File[] files) throws IOException, ParseException {
        List<String> sqlList = new ArrayList<>();
        for (File file : files) {
            // head 节点
            JsonNode headNode = objectMapper.readTree(file).get("head");
            String transactionId = headNode.get("id").asText();
            String orgCode = headNode.get("orgCode").asText();
            String version = headNode.get("version").asText();
            String sourceTable = headNode.get("source").asText();
            String targetTable = headNode.get("target").asText();
            String createTime = headNode.get("createTime").asText();
            // data 节点
            JsonNode dataNode = objectMapper.readTree(file).get("data");
            String tableName = dataNode.get("table").get("name").asText();
            String[] pkArr = dataNode.get("table").get("pk").asText().split(",");
            // columns 节点
            JsonNode columnsNode = dataNode.get("columns");
            // rows 节点
            JsonNode rowsNode = dataNode.get("rows");
            // 判断标准版本是否存在。
            String isExistVersionSql = "SELECT 1 FROM std_cda_versions WHERE version = '" + version + "'";
            if (jdbcTemplate.queryForList(isExistVersionSql).size() == 0) {
                throw new IllegalJsonDataException("标准版本号不存在,version: " + version);
            }
            // 判断表是否存在。
            String isExistTableSql = "SELECT 1 FROM std_data_set_" + version + " WHERE code = '" + tableName + "'";
            if (jdbcTemplate.queryForList(isExistTableSql).size() == 0) {
                throw new IllegalJsonDataException("标准中不存在该表,version: " + version + ", table: " + tableName);
            }
            // 拼接 insert/update 语句,后续批量执行保存数据。
            for (int i = 0, length = rowsNode.size(); i < length; i++) {
                JsonNode rowNode = rowsNode.get(i);
                // 用于记录日志:日志JSON结构中的data子节点。
                ObjectNode logDataNode = objectMapper.createObjectNode();
                ObjectNode logDataTargetIdNode = objectMapper.createObjectNode();
                logDataNode.put("transactionId", transactionId);
                logDataNode.put("target", targetTable);
                logDataNode.set("source_id", rowNode.get("_id"));
                // 判断是 insert,还是 update。
                StringBuffer hasRecordSql = new StringBuffer(" SELECT 1 FROM " + tableName + " WHERE ");
                for (String pk : pkArr) {
                    String pkValue = rowNode.get(pk).asText();
                    hasRecordSql.append(pk + " = '" + pkValue + "' AND ");
                    logDataTargetIdNode.put(pk, pkValue);
                }
                logDataNode.set("target_id", logDataTargetIdNode);
                PackResolveLogger.info(logDataNode.toString());
                int hasRecordSqlLen = hasRecordSql.length();
                hasRecordSql.delete(hasRecordSqlLen - 4, hasRecordSqlLen);
                boolean isInsert = jdbcTemplate.queryForList(hasRecordSql.toString()).size() == 0 ? true : false;
                StringBuffer sql = new StringBuffer();
                if (isInsert) {
                    sql.append(" INSERT INTO " + tableName + " SET ");
                } else {
                    sql.append(" UPDATE " + tableName + " SET ");
                }
                for (JsonNode column : columnsNode) {
                    if (rowNode == null){
                        System.out.println("/////////");
                    }
                    String fieldName = column.get("column")== null ? "": column.get("column").asText();
                    String fieldValue = rowNode.get(fieldName)== null ? "" : rowNode.get(fieldName).asText();
                    // 判断表字段是否存在。
                    String fieldSql = "SELECT f.column_type AS column_type FROM std_meta_data_" + version + " f  " +
                            "LEFT JOIN std_data_set_" + version + " t ON t.id = f.dataset_id " +
                            "WHERE t.code = '" + tableName + "' AND f.column_name = '" + fieldName + "'";
                    if (jdbcTemplate.queryForList(fieldSql).size() == 0) {
                        throw new IllegalJsonDataException("标准中不存在该表字段的字段类型,version: " + version + ", table: " + tableName + ", field: " + fieldName);
                    }
                    // 判断字段类型
                    String columnType = jdbcTemplate.queryForMap(fieldSql).get("column_type").toString().toUpperCase();
                    if (columnType.contains("VARCHAR")) {
                        sql.append(fieldName + " = '" + fieldValue + "', ");
                    } else if (columnType.equals("TINYINT") || columnType.contains("NUMBER")) {
                        sql.append(fieldName + " = " + fieldValue + ", ");
                    } else if (columnType.equals("DATE")) {
                        sql.append(fieldName + " = '" + DateTimeUtil.simpleDateFormat(DateTimeUtil.simpleDateParse(fieldValue)) + "', ");
                    } else if (columnType.equals("DATETIME")) {
                        sql.append(fieldName + " = '" + DateTimeUtil.simpleDateTimeFormat(DateTimeUtil.utcDateTimeParse(fieldValue)) + "', ");
                    }
                }
                sql.deleteCharAt(sql.lastIndexOf(","));
                if (!isInsert) {
                    sql.append(" WHERE ");
                    for (String pk : pkArr) {
                        sql.append(pk + " = '" + rowNode.get(pk).asText() + "' AND ");
                    }
                    int sqlLen = sql.length();
                    sql.delete(sqlLen - 4, sqlLen);
                }
                sql.append(";");
                sqlList.add(sql.toString());
            }
            simplePackage.setOrgCode(orgCode);
            simplePackage.setCreateDate(DateTimeUtil.utcDateTimeParse(createTime));
        }
        simplePackage.setSqlList(sqlList);
        dataSetPackageDao.saveDataset(simplePackage);//执行sql操作
    }
    /**
     * 生产数据集
     *
     * @param jsonFile
     * @param isOrigin
     * @return
     * @throws IOException
     */
    private List<PackageDataSet> generateDataSet(File jsonFile, boolean isOrigin) throws IOException {
        JsonNode jsonNode = objectMapper.readTree(jsonFile);
        if (jsonNode.isNull()) {
            throw new IllegalJsonFileException("Invalid json file when generate data set");
        }
        List<PackageDataSet> dataSets = parseNonArchiveJsonDataSet(jsonNode);
        return dataSets;
    }
    /**
     *  TODO 未完整逻辑,返回列表
     *  add by HZY at 2017/07/03
     * (非档案类型)结构化档案包数据集处理
     *
     * @param root
     * @return
     */
    public List<PackageDataSet> parseNonArchiveJsonDataSet(JsonNode root) {
        List<PackageDataSet> packageDataSetList = new ArrayList<>();
        PackageDataSet dataSet;
        JsonNode head = root.get("head");//文件内容头信息
        JsonNode data = root.get("data");//文件内容主体信息
        String version = head.get("version").asText();
        if (version.equals("000000000000")) {
            throw new LegacyPackageException("Package is collected via cda version 00000000000, ignored.");
        }
        String dataSetCode = head.get("target").asText();
        String createTime = head.get("createTime").isNull() ? "" : head.get("createTime").asText();
        String orgCode = head.get("orgCode").asText();
        final String[] eventNo = {""};
        final String[] patientId = {""};
        final String[] eventTime = {""};    // 旧数据集结构可能不存在这个属性
        JsonNode table =  data.get("table");//表
        List<String> pkList = Arrays.asList(table.path("pk").asText().split(","));//主键字段
        ArrayNode columns = (ArrayNode) data.get("columns");//列名
        ArrayNode rows = (ArrayNode) data.get("rows");//列值
        //获取索引字段
        columns.forEach(item -> {
            //事件号字段获取
            if (eventIndex.getEventNo().contains(item.get("column").asText())){
                eventNo[0] = item.get("column").asText();
            }
            //病人ID字段获取
            if (eventIndex.getPatientId().contains(item.get("column").asText())){
                patientId[0] = item.get("column").asText();
            }
            //时间时间获取
            if (eventIndex.getEventTime().contains(item.get("column").asText())){
                eventTime[0] = item.path("column").isNull() ? "" : item.path("column").asText();    // 旧数据集结构可能不存在这个属性
            }
        });
        StringBuffer pkBuffer = new StringBuffer();
        for (int i = 0; i < rows.size(); ++i) {
            JsonNode recordNode = rows.get(i);
            try {
                dataSet  = new PackageDataSet();
                dataSet.setPatientId(recordNode.path(patientId[0]).asText());
                dataSet.setEventNo(recordNode.path(eventNo[0]).asText());
                dataSet.setCdaVersion(version);
                dataSet.setCode(dataSetCode);
                dataSet.setOrgCode(orgCode);
                dataSet.setEventTime(DateTimeUtil.simpleDateParse(recordNode.path(eventTime[0]).asText()));
                dataSet.setCreateTime(DateTimeUtil.simpleDateParse(createTime));
                MetaDataRecord record = new MetaDataRecord();
                Iterator<Map.Entry<String, JsonNode>> iterator = recordNode.fields();
                while (iterator.hasNext()) {
                    Map.Entry<String, JsonNode> item = iterator.next();
                    String metaData = item.getKey();
                    if (metaData.equals("EVENT_NO")) continue; //metaData.equals("PATIENT_ID") ||
                    if (metaData.equals("_id")) continue;//源表主键字段名
                    String value = item.getValue().asText().equals("null") ? "" : item.getValue().asText();
                    record.putMetaData(metaData, value);
                    if (pkList != null && pkList.contains(metaData)){
                        pkBuffer.append(value).append("_");
                    }
                }
                dataSet.setPk(pkBuffer.toString());
                dataSet.addRecord(Integer.toString(i), record);
                packageDataSetList.add(dataSet);
            } catch (NullPointerException e) {
                throw new IllegalJsonDataException("Null pointer occurs while generate data set, package cda version: " + version);
            } catch (ParseException e) {
                throw new IllegalJsonDataException("Invalid date time format, do not deal with fail-tolerant.");
            }
        }
        return packageDataSetList;
    }
}

+ 177 - 0
src/main/java/com/yihu/ehr/resolve/StdPackageResolver.java

@ -0,0 +1,177 @@
package com.yihu.ehr.resolve;
import com.fasterxml.jackson.databind.JsonNode;
import com.yihu.ehr.profile.EventType;
import com.yihu.ehr.profile.exception.IllegalJsonFileException;
import com.yihu.ehr.profile.extractor.KeyDataExtractor;
import com.yihu.ehr.profile.family.ResourceCells;
import com.yihu.ehr.profile.util.DataSetUtil;
import com.yihu.ehr.profile.model.PackageDataSet;
import com.yihu.ehr.resolve.model.stage1.OriginalPackage;
import com.yihu.ehr.resolve.model.stage1.StandardPackage;
import com.yihu.ehr.resolve.service.resource.stage1.PackModelFactory;
import org.apache.commons.lang3.StringUtils;
import org.springframework.stereotype.Component;
import java.io.File;
import java.io.IOException;
import java.util.*;
/**
 * Created by progr1mmer on 2018/6/8.
 */
@Component
public class StdPackageResolver extends PackageResolver {
    @Override
    public void resolve(OriginalPackage originalPackage, File root) throws Exception {
        //解析标准数据
        File standardFolder = new File(root.getAbsolutePath() + File.separator + PackModelFactory.StandardFolder);
        parseFiles((StandardPackage) originalPackage, standardFolder.listFiles(), false);
    }
    /**
     * 将标准和原始文件夹中的JSON文件转换为数据集
     * 放入标准档案包中
     * @param standardPackage 标准档案包中
     * @param files 文件夹
     * @param origin 是否为标准文件夹
     * @throws Exception
     */
    private void parseFiles(StandardPackage standardPackage, File[] files, boolean origin) throws Exception {
        List<PackageDataSet> packageDataSetList = new ArrayList<>(files.length);
        //新增补传判断---------------Start---------------
        for (File file : files) {
            PackageDataSet dataSet = generateDataSet(file, origin);
            packageDataSetList.add(dataSet);
            if (dataSet.isReUploadFlg() && !standardPackage.isReUploadFlg()){
                standardPackage.setReUploadFlg(true);
            }
        }
        if (standardPackage.isReUploadFlg()) {
            for (PackageDataSet dataSet : packageDataSetList) {
                String dataSetCode = origin ? DataSetUtil.originDataSetCode(dataSet.getCode()) : dataSet.getCode();
                dataSet.setCode(dataSetCode);
                standardPackage.setPatientId(dataSet.getPatientId());
                standardPackage.setEventNo(dataSet.getEventNo());
                standardPackage.setOrgCode(dataSet.getOrgCode());
                standardPackage.setCdaVersion(dataSet.getCdaVersion());
                standardPackage.setCreateDate(dataSet.getCreateTime());
                standardPackage.setEventTime(dataSet.getEventTime());
                standardPackage.insertDataSet(dataSetCode, dataSet);
            }
            return;
        }
        //---------------End---------------
        for (PackageDataSet dataSet : packageDataSetList) {
            //将单个JSON文件转化为单个数据集
            String dataSetCode = origin ? DataSetUtil.originDataSetCode(dataSet.getCode()) : dataSet.getCode();
            dataSet.setCode(dataSetCode);
            // Extract key data from data set if exists
            if (!origin) {
                //身份信息
                if (StringUtils.isEmpty(standardPackage.getDemographicId()) || StringUtils.isEmpty(standardPackage.getPatientName())) {
                    Map<String, Object> properties = extractorChain.doExtract(dataSet, KeyDataExtractor.Filter.Identity);
                    String demographicId = (String) properties.get(ResourceCells.DEMOGRAPHIC_ID);
                    String patientName = (String) properties.get(ResourceCells.PATIENT_NAME);
                    String patientAge = (String) properties.get(ResourceCells.PATIENT_AGE);
                    String patientSex = (String) properties.get(ResourceCells.PATIENT_SEX);
                    if (!StringUtils.isEmpty(demographicId)) {
                        standardPackage.setDemographicId(demographicId.trim());
                    }
                    if (!StringUtils.isEmpty(patientName)) {
                        standardPackage.setPatientName(patientName);
                    }
                    if (!StringUtils.isEmpty(patientAge)) {
                        standardPackage.setPatientAge(patientAge);
                    }
                    if (!StringUtils.isEmpty(patientSex)) {
                        standardPackage.setPatientSex(patientSex);
                    }
                }
                //就诊事件信息
                if (standardPackage.getEventType() == null) {
                    Map<String, Object> properties = extractorChain.doExtract(dataSet, KeyDataExtractor.Filter.EventInfo);
                    EventType eventType = (EventType) properties.get(ResourceCells.EVENT_TYPE);
                    if (eventType != null) {
                        standardPackage.setEventType(eventType);
                    }
                }
                //门诊或住院诊断
                if (standardPackage.getDiagnosisCode().size() <= 0 || standardPackage.getDiagnosisName().size() <= 0) {
                    Map<String, Object> properties = extractorChain.doExtract(dataSet, KeyDataExtractor.Filter.Diagnosis);
                    Set<String> diagnosisCode = (Set<String>) properties.get(ResourceCells.DIAGNOSIS);
                    Set<String> diagnosisName = (Set<String>) properties.get(ResourceCells.DIAGNOSIS_NAME);
                    if (diagnosisCode.size() > 0) {
                        standardPackage.setDiagnosisCode(diagnosisCode);
                    }
                    if (diagnosisName.size() > 0) {
                        standardPackage.setDiagnosisName(diagnosisName);
                    }
                }
                //就诊卡信息
                if (StringUtils.isEmpty(standardPackage.getCardId()) || StringUtils.isEmpty(standardPackage.getCardType())) {
                    Map<String, Object> properties = extractorChain.doExtract(dataSet, KeyDataExtractor.Filter.CardInfo);
                    String cardId = (String) properties.get(ResourceCells.CARD_ID);
                    String cardType = (String) properties.get(ResourceCells.CARD_TYPE);
                    if (!StringUtils.isEmpty(cardId)) {
                        standardPackage.setCardId(cardId);
                    }
                    if (!StringUtils.isEmpty(cardType)) {
                        standardPackage.setCardType(cardType);
                    }
                }
                //科室信息
                if (standardPackage.getDeptCode() == null) {
                    Map<String, Object> properties = extractorChain.doExtract(dataSet, KeyDataExtractor.Filter.Dept);
                    String deptCode = (String) properties.get(ResourceCells.DEPT_CODE);
                    if (StringUtils.isNotEmpty(deptCode)) {
                        standardPackage.setDeptCode(deptCode);
                    }
                }
            }
            if (null == standardPackage.getPatientId()) {
                standardPackage.setPatientId(dataSet.getPatientId());
            }
            if (null == standardPackage.getEventNo()) {
                standardPackage.setEventNo(dataSet.getEventNo());
            }
            if (null == standardPackage.getOrgCode()) {
                standardPackage.setOrgCode(dataSet.getOrgCode());
            }
            if (null == standardPackage.getCdaVersion()) {
                standardPackage.setCdaVersion(dataSet.getCdaVersion());
            }
            if (null == standardPackage.getCreateDate()) {
                standardPackage.setCreateDate(dataSet.getCreateTime());
            }
            if (null == standardPackage.getEventTime()) {
                standardPackage.setEventTime(dataSet.getEventTime());
            }
            standardPackage.insertDataSet(dataSetCode, dataSet);
        }
    }
    /**
     * 根据JSON文件生产数据集
     * @param jsonFile
     * @param isOrigin
     * @return
     * @throws IOException
     */
    private PackageDataSet generateDataSet(File jsonFile, boolean isOrigin) throws IOException {
        JsonNode jsonNode = objectMapper.readTree(jsonFile);
        if (jsonNode.isNull()) {
            throw new IllegalJsonFileException("Invalid json file when generate data set");
        }
        PackageDataSet dataSet = dataSetParser.parseStructuredJsonDataSet(jsonNode, isOrigin);
        return dataSet;
    }
}

+ 35 - 0
src/main/java/com/yihu/ehr/resolve/config/EventIndexConfig.java

@ -0,0 +1,35 @@
package com.yihu.ehr.resolve.config;
import org.springframework.boot.context.properties.ConfigurationProperties;
import org.springframework.stereotype.Component;
import java.util.ArrayList;
import java.util.List;
/**
 *  病人事件索引信息字段
 * @author HZY
 * @version 1.0
 * @created 2017.07.03 17:03
 */
@Component
@ConfigurationProperties(prefix = "ehr.eventIndex")
public class EventIndexConfig {
    private List<String> eventNo = new ArrayList<>();
    private List<String> patientId = new ArrayList<>();
    private List<String> eventTime = new ArrayList<>();
    public List<String> getEventNo() {
        return eventNo;
    }
    public List<String> getPatientId() {
        return patientId;
    }
    public List<String> getEventTime() {
        return eventTime;
    }
}

+ 55 - 0
src/main/java/com/yihu/ehr/resolve/config/SchedulerConfig.java

@ -0,0 +1,55 @@
package com.yihu.ehr.resolve.config;
import org.springframework.boot.context.properties.ConfigurationProperties;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.scheduling.quartz.SchedulerFactoryBean;
import org.springframework.util.Assert;
/**
 * @author Sand
 * @version 1.0
 * @created 2016.03.31 10:30
 */
@ConfigurationProperties(prefix = "resolve.job")
@Configuration
public class SchedulerConfig {
    private int initSize;
    private int maxSize;
    private String cronExp;
    public int getInitSize() {
        return initSize;
    }
    public void setInitSize(int initSize) {
        this.initSize = initSize;
    }
    public int getMaxSize() {
        return maxSize;
    }
    public void setMaxSize(int maxSize) {
        this.maxSize = maxSize;
    }
    public String getCronExp() {
        return cronExp;
    }
    public void setCronExp(String cronExp) {
        this.cronExp = cronExp;
    }
    @Bean
    SchedulerFactoryBean schedulerFactoryBean(){
        Assert.notNull(cronExp, "Can not found resolve job config");
        SchedulerFactoryBean bean = new SchedulerFactoryBean();
        bean.setAutoStartup(true);
        bean.setSchedulerName("PackageResolveScheduler");
        return bean;
    }
}

+ 77 - 0
src/main/java/com/yihu/ehr/resolve/controller/ArchiveRelationEndPoint.java

@ -0,0 +1,77 @@
package com.yihu.ehr.resolve.controller;
import com.yihu.ehr.constants.ApiVersion;
import com.yihu.ehr.constants.ServiceApi;
import com.yihu.ehr.controller.EnvelopRestEndPoint;
import com.yihu.ehr.elasticsearch.ElasticSearchUtil;
import com.yihu.ehr.resolve.service.profile.ArchiveRelationService;
import com.yihu.ehr.resolve.service.resource.stage2.RsDictionaryEntryService;
import com.yihu.ehr.util.rest.Envelop;
import io.swagger.annotations.Api;
import io.swagger.annotations.ApiOperation;
import io.swagger.annotations.ApiParam;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.data.domain.Page;
import org.springframework.util.StringUtils;
import org.springframework.web.bind.annotation.*;
import java.util.List;
import java.util.Map;
/**
 * EndPoint - 档案关联
 * Created by progr1mmer on 2018/4/4.
 */
@RestController
@RequestMapping(ApiVersion.Version1_0)
@Api(value = "ArchiveRelationEndPoint", description = "档案关联", tags = {"档案解析服务-档案识别关联信息"})
public class ArchiveRelationEndPoint extends EnvelopRestEndPoint {
    private static final String INDEX = "archive_relation";
    private static final String TYPE = "info";
    @Autowired
    private ElasticSearchUtil elasticSearchUtil;
    @Autowired
    private ArchiveRelationService archiveRelationService;
    @Autowired
    private RsDictionaryEntryService rsDictionaryEntryService;
    @RequestMapping(value = ServiceApi.PackageResolve.ArchiveRelation, method = RequestMethod.GET)
    @ApiOperation(value = "获取档案关联列表")
    public Envelop list(
            @ApiParam(name = "filters", value = "过滤器,为空检索所有条件")
            @RequestParam(value = "filters", required = false) String filters,
            @ApiParam(name = "sorts", value = "排序,规则参见说明文档")
            @RequestParam(value = "sorts", required = false) String sorts,
            @ApiParam(name = "page", value = "分页大小", required = true, defaultValue = "1")
            @RequestParam(value = "page") int page,
            @ApiParam(name = "size", value = "页码", required = true, defaultValue = "15")
            @RequestParam(value = "size") int size) throws Exception {
        Page<Map<String, Object>> result = elasticSearchUtil.page(INDEX, TYPE, filters, sorts, page, size);
        //updated by zdm on 2018/07/17 ,bug 6343---start
        result.forEach(item -> {
            //卡类型编码不为空
            if (!StringUtils.isEmpty(item.get("card_type"))) {
                //获取资源字典-卡类型代码 集合
                String cardType = rsDictionaryEntryService.getRsDictionaryEntryByDictCode("STD_CARD_TYPE", item.get("card_type").toString());
                //获取字典值
                item.put("card_type", cardType);
            }
        });
        //updated by zdm on 2018/07/17---end
        Envelop envelop = getPageResult(result.getContent(), (int)result.getTotalElements(), page, size);
        return envelop;
    }
    @ApiOperation(value = "档案关联(单条)")
    @RequestMapping(value = ServiceApi.PackageResolve.ArchiveRelation, method = RequestMethod.POST)
    public Envelop archiveRelation(
            @ApiParam(name = "profileId", value = "档案ID", required = true)
            @RequestParam(value = "profileId") String profileId,
            @ApiParam(name = "idCardNo", value = "身份证号码", required = true)
            @RequestParam(value = "idCardNo") String idCardNo) throws Exception {
        archiveRelationService.archiveRelation(profileId, idCardNo);
        return success(true);
    }
}

+ 241 - 0
src/main/java/com/yihu/ehr/resolve/controller/ResolveEndPoint.java

@ -0,0 +1,241 @@
package com.yihu.ehr.resolve.controller;
import com.yihu.ehr.constants.ApiVersion;
import com.yihu.ehr.lang.SpringContext;
import com.yihu.ehr.profile.ArchiveStatus;
import com.yihu.ehr.profile.ProfileType;
import com.yihu.ehr.constants.ServiceApi;
import com.yihu.ehr.controller.EnvelopRestEndPoint;
import com.yihu.ehr.fastdfs.FastDFSUtil;
import com.yihu.ehr.model.packs.EsDetailsPackage;
import com.yihu.ehr.model.packs.EsSimplePackage;
import com.yihu.ehr.profile.exception.IllegalJsonDataException;
import com.yihu.ehr.profile.exception.IllegalJsonFileException;
import com.yihu.ehr.profile.exception.ResolveException;
import com.yihu.ehr.profile.family.ResourceCells;
import com.yihu.ehr.resolve.feign.PackageMgrClient;
import com.yihu.ehr.resolve.model.stage1.OriginalPackage;
import com.yihu.ehr.resolve.model.stage1.StandardPackage;
import com.yihu.ehr.resolve.model.stage2.ResourceBucket;
import com.yihu.ehr.resolve.service.resource.stage1.ResolveService;
import com.yihu.ehr.resolve.service.resource.stage2.IdentifyService;
import com.yihu.ehr.resolve.service.resource.stage2.PackMillService;
import com.yihu.ehr.resolve.service.resource.stage2.ResourceService;
import com.yihu.ehr.resolve.service.resource.stage2.StatusReportService;
import com.yihu.ehr.resolve.util.LocalTempPathUtil;
import com.yihu.ehr.util.datetime.DateUtil;
import io.swagger.annotations.Api;
import io.swagger.annotations.ApiOperation;
import io.swagger.annotations.ApiParam;
import net.lingala.zip4j.exception.ZipException;
import org.apache.commons.lang3.StringUtils;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.http.HttpStatus;
import org.springframework.http.MediaType;
import org.springframework.http.ResponseEntity;
import org.springframework.kafka.core.KafkaTemplate;
import org.springframework.util.FileCopyUtils;
import org.springframework.web.bind.annotation.*;
import org.springframework.web.multipart.MultipartFile;
import java.io.BufferedOutputStream;
import java.io.File;
import java.io.FileOutputStream;
import java.util.*;
@RestController
@RequestMapping(value = ApiVersion.Version1_0, produces = MediaType.APPLICATION_JSON_UTF8_VALUE)
@Api(value = "ResolveEndPoint", description = "资源化入库", tags = {"档案解析服务-资源化入库"})
public class ResolveEndPoint extends EnvelopRestEndPoint {
    @Autowired
    private ResourceService resourceService;
    @Autowired
    private PackMillService packMillService;
    @Autowired
    private FastDFSUtil fastDFSUtil;
    @Autowired
    private ResolveService resolveService;
    @Autowired
    private PackageMgrClient packageMgrClient;
    @Autowired
    private IdentifyService identifyService;
    @Autowired
    private StatusReportService statusReportService;
    @ApiOperation(value = "健康档案包入库", notes = "若包ID为空,则取最旧的未解析健康档案包", produces = MediaType.APPLICATION_JSON_UTF8_VALUE)
    @RequestMapping(value = ServiceApi.PackageResolve.Resolve, method = RequestMethod.GET)
    public String resolve(
            @ApiParam(name = "id", value = "档案包编号", required = true)
            @PathVariable(value = "id") String id,
            @ApiParam(name = "clientId", value = "模拟应用ID")
            @RequestParam(value = "clientId", required = false) String clientId,
            @ApiParam(name = "echo", value = "返回档案数据", required = true, defaultValue = "true")
            @RequestParam(value = "echo") boolean echo) throws Throwable {
        EsSimplePackage pack = packageMgrClient.getPackage(id);
        if (null == pack) {
            Map<String, String> resultMap = new HashMap<String, String>();
            resultMap.put("failure", "无可用档案包!");
            return objectMapper.writeValueAsString(resultMap);
        }
        try {
            statusReportService.reportStatus(pack.get_id(), ArchiveStatus.Acquired, 0, "正在入库中", null);
            OriginalPackage originalPackage = resolveService.doResolve(pack, downloadTo(pack.getRemote_path()));
            ResourceBucket resourceBucket = packMillService.grindingPackModel(originalPackage);
            identifyService.identify(resourceBucket, originalPackage);
            resourceService.save(resourceBucket, originalPackage);
            //回填入库状态
            Map<String, Object> map = new HashMap();
            map.put("defect", resourceBucket.getQcMetadataRecords().getRecords().isEmpty() ? 0 : 1); //是否解析异常
            map.put("patient_name", resourceBucket.getBasicRecord(ResourceCells.PATIENT_NAME));
            map.put("profile_id", resourceBucket.getId());
            map.put("demographic_id", resourceBucket.getBasicRecord(ResourceCells.DEMOGRAPHIC_ID));
            map.put("event_type", originalPackage.getEventType() == null ? -1 : originalPackage.getEventType().getType());
            map.put("event_no", originalPackage.getEventNo());
            map.put("event_date", DateUtil.toStringLong(originalPackage.getEventTime()));
            map.put("patient_id", originalPackage.getPatientId());
            map.put("dept", resourceBucket.getBasicRecord(ResourceCells.DEPT_CODE));
            long delay = pack.getReceive_date().getTime() - originalPackage.getEventTime().getTime();
            map.put("delay", delay % (1000 * 60 * 60 * 24) > 0 ? delay / (1000 * 60 * 60 * 24) + 1 : delay / (1000 * 60 * 60 * 24));
            map.put("re_upload_flg", String.valueOf(originalPackage.isReUploadFlg()));
            statusReportService.reportStatus(pack.get_id(), ArchiveStatus.Finished, 0, "resolve success", map);
            //发送事件处理消息
            if (originalPackage.getProfileType() == ProfileType.File || originalPackage.getProfileType() == ProfileType.Link) {
                KafkaTemplate kafkaTemplate = SpringContext.getService(KafkaTemplate.class);
                kafkaTemplate.send("svr-pack-event", "resolve", objectMapper.writeValueAsString(pack));
            }
            //是否返回数据
            if (echo) {
                return originalPackage.toJson();
            } else {
                Map<String, String> resultMap = new HashMap<String, String>();
                resultMap.put("success", "入库成功!");
                return objectMapper.writeValueAsString(resultMap);
            }
        } catch (Exception e) {
            int errorType = -1;
            if (e instanceof ZipException) {
                errorType = 1;
            } else if (e instanceof IllegalJsonFileException) {
                errorType = 2;
            } else if (e instanceof IllegalJsonDataException) {
                errorType = 3;
            } else if (e instanceof ResolveException) {
                errorType = 21; //21以下为质控和解析的公共错误
            }
            if (StringUtils.isBlank(e.getMessage())) {
                statusReportService.reportStatus(pack.get_id(), ArchiveStatus.Failed, errorType, "Internal Server Error", null);
            } else {
                statusReportService.reportStatus(pack.get_id(), ArchiveStatus.Failed, errorType, e.getMessage(), null);
            }
            throw e;
        }
    }
    /**
     * 执行归档作业。归档作为流程如下:
     * 1. 从JSON档案管理器中获取一个待归档的JSON文档,并标记为Acquired,表示正在归档,并记录开始时间。
     * 2. 解压zip档案包,如果解压失败,或检查解压后的目录结果不符合规定,将文档状态标记为 Failed,记录日志并返回。
     * 3. 读取包中的 origin, standard 文件夹中的 JSON 数据并解析。
     * 4. 对关联字典的数据元进行标准化,将字典的值直接写入数据
     * 5. 解析完的数据存入HBase,并将JSON文档的状态标记为 Finished。
     * 6. 以上步骤有任何一个失败的,将文档标记为 LegacyIgnored 状态,即无法决定该JSON档案的去向,需要人为干预。
     * <p>
     * ObjectMapper Stream API使用,参见:http://wiki.fasterxml.com/JacksonStreamingApi
     */
    @ApiOperation(value = "本地档案包解析", produces = MediaType.APPLICATION_JSON_UTF8_VALUE)
    @RequestMapping(value = ServiceApi.PackageResolve.Local, method = RequestMethod.POST)
    public ResponseEntity<String> resolve(
            @ApiParam(name = "id", value = "档案包ID,忽略此值", required = true, defaultValue = "LocalPackage")
            @PathVariable(value = "id") String packageId,
            @ApiParam(name = "file", value = "档案包文件", required = true)
            @RequestPart() MultipartFile file,
            @ApiParam(name = "password", value = "档案包密码", required = true)
            @RequestParam(value = "password") String password,
            @ApiParam(name = "clientId", value = "模拟应用ID", required = true, defaultValue = "PACK-RESOLVE")
            @RequestParam(value = "clientId") String clientId,
            @ApiParam(name = "persist", value = "是否入库", required = true, defaultValue = "false")
            @RequestParam(value = "persist", defaultValue = "false") boolean persist) throws Throwable {
        BufferedOutputStream stream = null;
        try {
            String zipFile = LocalTempPathUtil.getTempPathWithUUIDSuffix() + packageId + ".zip";
            stream = new BufferedOutputStream(new FileOutputStream(new File(zipFile)));
            FileCopyUtils.copy(file.getInputStream(), stream);
            EsSimplePackage pack = new EsSimplePackage();
            pack.set_id(packageId);
            pack.setPwd(password);
            pack.setReceive_date(new Date());
            pack.setClient_id(clientId);
            OriginalPackage originalPackage = resolveService.doResolve(pack, zipFile);
            ResourceBucket resourceBucket = packMillService.grindingPackModel(originalPackage);
            if (persist) {
                identifyService.identify(resourceBucket, originalPackage);
                resourceService.save(resourceBucket, originalPackage);
            }
            return new ResponseEntity<>(originalPackage.toJson(), HttpStatus.OK);
        } finally {
            if (stream != null) {
                stream.close();
            }
        }
    }
    /**
     * 获取档案解析包内容
     * 可用于质量控制,或者用于问题跟踪
     * <p>
     * <p>
     */
    @ApiOperation(value = "获取档案解析包内容", produces = MediaType.APPLICATION_JSON_UTF8_VALUE)
    @RequestMapping(value = ServiceApi.PackageResolve.Fetch, method = RequestMethod.GET)
    public String fetch(
            @ApiParam(name = "id", value = "档案包ID", required = true)
            @PathVariable(value = "id") String id) throws Exception {
        EsSimplePackage esSimplePackage = packageMgrClient.getPackage(id);
        String zipFile = downloadTo(esSimplePackage.getRemote_path());
        OriginalPackage packModel = resolveService.doResolve(esSimplePackage, zipFile);
        return packModel.toJson();
    }
    @ApiOperation(value = "即时交互档案解析入库", notes = "即时交互档案解析入库", produces = MediaType.APPLICATION_JSON_UTF8_VALUE)
    @RequestMapping(value = ServiceApi.PackageResolve.Immediate, method = RequestMethod.PUT)
    public String immediateResolve(
            @ApiParam(name = "idCardNo", value = "身份证号")
            @RequestParam(value = "idCardNo", required = false) String idCardNo,
            @ApiParam(name = "data", value = "档案数据")
            @RequestParam(value = "data", required = false) String data,
            @ApiParam(name = "clientId", value = "模拟应用ID")
            @RequestParam(value = "clientId", required = false) String clientId,
            @ApiParam(name = "echo", value = "返回档案数据")
            @RequestParam(value = "echo",required = false,defaultValue = "true") boolean echo) throws Throwable {
        EsSimplePackage esSimplePackage = new EsSimplePackage();
        esSimplePackage.set_id(UUID.randomUUID().toString());
        esSimplePackage.setReceive_date(new Date());
        StandardPackage standardPackage = resolveService.doResolveImmediateData(data, esSimplePackage);
        ResourceBucket resourceBucket = packMillService.grindingPackModel(standardPackage);
        identifyService.identify(resourceBucket, standardPackage);
        resourceService.save(resourceBucket, standardPackage);
        //回填入库状态
        Map<String, String> map = new HashMap();
        map.put("profileId", standardPackage.getId());
        map.put("demographicId", standardPackage.getDemographicId());
        map.put("eventType", String.valueOf(standardPackage.getEventType().getType()));
        map.put("eventNo", standardPackage.getEventNo());
        map.put("eventDate", DateUtil.toStringLong(standardPackage.getEventTime()));
        map.put("patientId", standardPackage.getPatientId());
        //是否返回数据
        if (echo) {
            return standardPackage.toJson();
        } else {
            Map<String, String> resultMap = new HashMap<String, String>();
            resultMap.put("success", "入库成功!");
            return objectMapper.writeValueAsString(resultMap);
        }
    }
    private String downloadTo(String filePath) throws Exception {
        String[] tokens = filePath.split(":");
        return fastDFSUtil.download(tokens[0], tokens[1], LocalTempPathUtil.getTempPathWithUUIDSuffix());
    }
}

+ 84 - 0
src/main/java/com/yihu/ehr/resolve/controller/SchedulerEndPoint.java

@ -0,0 +1,84 @@
package com.yihu.ehr.resolve.controller;
import com.yihu.ehr.constants.ApiVersion;
import com.yihu.ehr.constants.ServiceApi;
import com.yihu.ehr.controller.EnvelopRestEndPoint;
import com.yihu.ehr.resolve.config.SchedulerConfig;
import com.yihu.ehr.resolve.job.SchedulerManager;
import io.swagger.annotations.Api;
import io.swagger.annotations.ApiOperation;
import io.swagger.annotations.ApiParam;
import org.quartz.*;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.http.HttpStatus;
import org.springframework.http.MediaType;
import org.springframework.http.ResponseEntity;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestMethod;
import org.springframework.web.bind.annotation.RequestParam;
import org.springframework.web.bind.annotation.RestController;
/**
 * @author Sand
 * @version 1.0
 * @created 2016.03.31 10:30
 */
@RestController
@RequestMapping(value = ApiVersion.Version1_0, produces = MediaType.APPLICATION_JSON_UTF8_VALUE)
@Api(value = "SchedulerEndPoint", description = "资源化入库任务", tags = {"档案解析服务-资源化入库任务"})
public class SchedulerEndPoint extends EnvelopRestEndPoint {
    @Autowired
    private SchedulerConfig schedulerConfig;
    @Autowired
    private SchedulerManager schedulerManager;
    @Autowired
    private Scheduler scheduler;
    @ApiOperation(value = "设置任务调度器状态")
    @RequestMapping(value = ServiceApi.PackageResolve.Scheduler, method = RequestMethod.PUT)
    public ResponseEntity<String> updateScheduler(
            @ApiParam(name = "pause", value = "true:暂停 , false:执行", required = true, defaultValue = "true")
            @RequestParam(value = "pause") boolean pause) {
        try {
            if (pause) {
                scheduler.pauseAll();
            } else {
                scheduler.resumeAll();
            }
            return new ResponseEntity<>((String) null, HttpStatus.OK);
        } catch (SchedulerException e) {
            return new ResponseEntity<>(e.getMessage(), HttpStatus.INTERNAL_SERVER_ERROR);
        }
    }
    @ApiOperation(value = "添加任务数量,返回当前系统最大任务限制数")
    @RequestMapping(value = ServiceApi.PackageResolve.Scheduler, method = RequestMethod.POST)
    public ResponseEntity<Integer> addJob(
            @ApiParam(name = "count", value = "任务数量(不要超过系统设定值)", required = true, defaultValue = "4")
            @RequestParam(value = "count") int count,
            @ApiParam(name = "cronExp", value = "触发器CRON表达式", required = true, defaultValue = "0/1 * * * * ?")
            @RequestParam(value = "cronExp") String cronExp) throws Exception {
        if (count > schedulerConfig.getMaxSize()) {
            count = schedulerConfig.getMaxSize();
        }
        schedulerManager.addJob(count, cronExp);
        return new ResponseEntity<>(schedulerConfig.getMaxSize(), HttpStatus.OK);
    }
    @ApiOperation(value = "删除解析任务")
    @RequestMapping(value = ServiceApi.PackageResolve.Scheduler, method = RequestMethod.DELETE)
    public ResponseEntity<String> removeJob(
            @ApiParam(name = "count", value = "任务数量", required = true, defaultValue = "4")
            @RequestParam(value = "count") int count) throws Exception {
        schedulerManager.minusJob(count);
        return new ResponseEntity<>((String) null, HttpStatus.OK);
    }
    @ApiOperation(value = "获取当前任务数量")
    @RequestMapping(value = ServiceApi.PackageResolve.Scheduler, method = RequestMethod.GET)
    public ResponseEntity<Integer> count() throws Exception {
        int count = schedulerManager.getJobSize();
        return new ResponseEntity<>(count, HttpStatus.OK);
    }
}

+ 34 - 0
src/main/java/com/yihu/ehr/resolve/dao/DataSetPackageDao.java

@ -0,0 +1,34 @@
package com.yihu.ehr.resolve.dao;
import com.yihu.ehr.resolve.model.stage1.SimplePackage;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.jdbc.core.JdbcTemplate;
import org.springframework.stereotype.Repository;
import org.springframework.transaction.annotation.Transactional;
/**
 * 数据集档案包入库(Mysql)操作
 *
 * @author 张进军
 * @date 2017/6/27 17:17
 */
@Repository
@Transactional(readOnly = true)
public class DataSetPackageDao {
    @Autowired
    private JdbcTemplate jdbcTemplate;
    /**
     * 通过转换拼接数据集档案包数据的新增/更新的SQL语句,将档案数据保存到 mysql
     *
     * @param simplePackage
     */
    @Transactional()
    public void saveDataset(SimplePackage simplePackage) {
        String[] insertSqlArr = new String[simplePackage.getSqlList().size()];
        insertSqlArr = simplePackage.getSqlList().toArray(insertSqlArr);
        jdbcTemplate.batchUpdate(insertSqlArr);
    }
}

+ 87 - 0
src/main/java/com/yihu/ehr/resolve/dao/FileResourceDao.java

@ -0,0 +1,87 @@
package com.yihu.ehr.resolve.dao;
import com.yihu.ehr.profile.ProfileType;
import com.yihu.ehr.hbase.HBaseDao;
import com.yihu.ehr.hbase.TableBundle;
import com.yihu.ehr.profile.core.ResourceCore;
import com.yihu.ehr.profile.family.ResourceFamily;
import com.yihu.ehr.resolve.model.stage1.FilePackage;
import com.yihu.ehr.resolve.model.stage1.LinkPackage;
import com.yihu.ehr.resolve.model.stage1.OriginalPackage;
import com.yihu.ehr.resolve.model.stage1.details.CdaDocument;
import com.yihu.ehr.resolve.util.FileTableUtil;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Repository;
import java.util.Map;
/**
 * @author Sand
 * @created 2016.05.09 19:13
 */
@Deprecated
//@Repository
public class FileResourceDao {
    @Autowired
    private HBaseDao hbaseDao;
    public void save (OriginalPackage originalPackage) throws Exception {
        if (originalPackage.getProfileType() == ProfileType.File){
            FilePackage filePackage = (FilePackage) originalPackage;
            TableBundle bundle = new TableBundle();
            Map<String, CdaDocument> cdaDocuments = filePackage.getCdaDocuments();
            for (String rowkey : cdaDocuments.keySet()) {
                bundle.addRows(rowkey);
            }
            hbaseDao.delete(ResourceCore.RawFiles, bundle);
            for (String rowkey : cdaDocuments.keySet()){
                CdaDocument cdaDocument = cdaDocuments.get(rowkey);
                bundle.addValues(rowkey, ResourceFamily.Basic, FileTableUtil.getBasicFamilyCellMap(originalPackage));
                bundle.addValues(rowkey, ResourceFamily.Data, FileTableUtil.getFileFamilyCellMap(cdaDocument));
            }
            hbaseDao.save(ResourceCore.RawFiles, bundle);
        } else if (originalPackage.getProfileType() == ProfileType.Link){
            LinkPackage linkPackage = (LinkPackage) originalPackage;
            TableBundle bundle = new TableBundle();
            String rowKey = originalPackage.getId();
            bundle.addRows(rowKey);
            hbaseDao.delete(ResourceCore.RawFiles, bundle);
            bundle.addValues(rowKey, ResourceFamily.Basic, FileTableUtil.getBasicFamilyCellMap(originalPackage));
            bundle.addValues(rowKey, ResourceFamily.Data, FileTableUtil.getFileFamilyCellMap(linkPackage));
            hbaseDao.save(ResourceCore.RawFiles, bundle);
        }
    }
    /**
    public Map<String, CdaDocument> findAll(String[] rowkeys) throws IOException, ParseException {
        Map<String, CdaDocument> cdaDocuments = new HashMap<>();
        TableBundle bundle = new TableBundle();
        bundle.addRows(rowkeys);
        Object results[] = hbaseDao.get(FileTableUtil.Table, bundle);
        for (Object object : results){
            ResultUtil result = new ResultUtil(object);
            CdaDocument cdaDocument = new CdaDocument();
            cdaDocument.setId(result.getCellValue(FileFamily.Resource, FileFamily.FileColumns.CdaDocumentId, ""));
            cdaDocument.setName(result.getCellValue(FileFamily.Resource, FileFamily.FileColumns.CdaDocumentName, ""));
            String list = result.getCellValue(FileFamily.Resource, FileFamily.FileColumns.FileList, "");
            ArrayNode root = (ArrayNode) ((ObjectMapper) SpringContext.getService(ObjectMapper.class)).readTree(list);
            for (int i = 0; i < root.size(); ++i){
                ObjectNode objectNode = (ObjectNode) root.get(i);
                OriginFile originFile = new OriginFile();
                originFile.setMime(objectNode.get("mime").asText());
                originFile.setExpireDate(DateTimeUtil.utcDateTimeParse(objectNode.get("expire_date").asText()));
                originFile.setOriginUrl(objectNode.get("origin_url").asText());
                String files = objectNode.get("files").asText();
                for (String file : files.split(";")){
                    String tokens[] = file.split(":");
                    originFile.addStorageUrl(tokens[0], tokens[1]);
                }
                cdaDocument.getOriginFiles().add(originFile);
            }
            cdaDocuments.put(result.getRowKey(), cdaDocument);
        }
        return cdaDocuments;
    }*/
}

+ 72 - 0
src/main/java/com/yihu/ehr/resolve/dao/MasterResourceDao.java

@ -0,0 +1,72 @@
package com.yihu.ehr.resolve.dao;
import com.yihu.ehr.profile.EventType;
import com.yihu.ehr.hbase.HBaseDao;
import com.yihu.ehr.hbase.TableBundle;
import com.yihu.ehr.profile.exception.IllegalJsonFileException;
import com.yihu.ehr.profile.family.ResourceCells;
import com.yihu.ehr.resolve.model.stage1.OriginalPackage;
import com.yihu.ehr.resolve.model.stage2.MasterRecord;
import com.yihu.ehr.resolve.model.stage2.ResourceBucket;
import org.apache.commons.lang3.StringUtils;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Repository;
import java.util.Map;
/**
 * 档案资源主库。
 *
 * @author Sand
 * @version 1.0
 * @created 2015.08.27 10:20
 */
@Repository
public class MasterResourceDao {
    @Autowired
    private HBaseDao hbaseDao;
    public void saveOrUpdate(ResourceBucket resourceBucket, OriginalPackage originalPackage) throws Exception {
        String rowKey = resourceBucket.getId();
        TableBundle bundle = new TableBundle();
        if (originalPackage.isReUploadFlg()) { //补传处理
            Map<String, String> originResult = hbaseDao.get(resourceBucket.getMaster(), rowKey, resourceBucket.getdFamily());
            if (!originResult.isEmpty()) {
                MasterRecord masterRecord = resourceBucket.getMasterRecord();
                Map<String, String> supplement = masterRecord.getDataGroup();
                originResult.putAll(supplement);
                bundle.addValues(rowKey, resourceBucket.getdFamily(), originResult);
                hbaseDao.save(resourceBucket.getMaster(), bundle);
                Map<String, String> basicResult = hbaseDao.get(resourceBucket.getMaster(), rowKey, resourceBucket.getBasicFamily());
                if (StringUtils.isNotEmpty(basicResult.get(ResourceCells.EVENT_TYPE))) {
                    EventType eventType = EventType.create(basicResult.get(ResourceCells.EVENT_TYPE));
                    originalPackage.setEventType(eventType);
                }
                resourceBucket.insertBasicRecord(ResourceCells.DEMOGRAPHIC_ID, basicResult.get(ResourceCells.DEMOGRAPHIC_ID));
            } else {
                throw new IllegalJsonFileException("Please upload the complete package(" + rowKey + ") first !");
            }
        } else {
            // delete legacy data if they are exist
            //主表直接GET
            String legacy = hbaseDao.get(resourceBucket.getMaster(), rowKey);
            if (StringUtils.isNotEmpty(legacy)) {
                hbaseDao.delete(resourceBucket.getMaster(), rowKey);
            }
            // now save the data to hbase
            bundle.clear();
            bundle.addValues(
                    rowKey,
                    resourceBucket.getBasicFamily(),
                    resourceBucket.getMasterBasicRecords(originalPackage.getProfileType())
            );
            bundle.addValues(
                    rowKey,
                    resourceBucket.getdFamily(),
                    resourceBucket.getMasterRecord().getDataGroup()
            );
            hbaseDao.save(resourceBucket.getMaster(), bundle);
        }
    }
}

+ 16 - 0
src/main/java/com/yihu/ehr/resolve/dao/PatientDao.java

@ -0,0 +1,16 @@
package com.yihu.ehr.resolve.dao;
import com.yihu.ehr.entity.patient.DemographicInfo;
import org.springframework.data.repository.PagingAndSortingRepository;
import java.util.Date;
import java.util.List;
/**
 * Created by progr1mmer on 2018/1/6.
 */
public interface PatientDao extends PagingAndSortingRepository<DemographicInfo, String> {
    List<DemographicInfo> findByNameOrBirthdayOrTelephoneNo(String name, Date birthday, String telephoneNo);
}

+ 17 - 0
src/main/java/com/yihu/ehr/resolve/dao/RsDictionaryEntryDao.java

@ -0,0 +1,17 @@
package com.yihu.ehr.resolve.dao;
import com.yihu.ehr.resolve.model.stage1.RsDictionaryEntry;
import org.springframework.data.jpa.repository.Query;
import org.springframework.data.repository.PagingAndSortingRepository;
import java.util.List;
/**
 * @author zdm
 * @created 2018.07.17
 */
public interface RsDictionaryEntryDao extends PagingAndSortingRepository<RsDictionaryEntry,Integer> {
    List<RsDictionaryEntry> findByDictCode(String code);
}

+ 78 - 0
src/main/java/com/yihu/ehr/resolve/dao/SubResourceDao.java

@ -0,0 +1,78 @@
package com.yihu.ehr.resolve.dao;
import com.yihu.ehr.hbase.HBaseDao;
import com.yihu.ehr.hbase.TableBundle;
import com.yihu.ehr.resolve.model.stage1.OriginalPackage;
import com.yihu.ehr.resolve.model.stage2.ResourceBucket;
import com.yihu.ehr.resolve.model.stage2.SubRecord;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Repository;
import java.util.List;
/**
 * 档案资源子库。
 *
 * @author Sand
 * @version 1.0
 * @created 2016.04.15 16:50
 */
@Repository
public class SubResourceDao {
    @Autowired
    private HBaseDao hbaseDao;
    public void saveOrUpdate(ResourceBucket resourceBucket, OriginalPackage originalPackage) throws Exception {
        String rowKey = resourceBucket.getId();
        TableBundle bundle = new TableBundle();
        if (originalPackage.isReUploadFlg()) { //补传处理
            List<SubRecord> subRecordList = resourceBucket.getSubRecords();
            if (subRecordList.size() > 0) {
                //先删除
                String legacyRowKeys[] = new String[subRecordList.size()];
                for (int i = 0; i < subRecordList.size(); i++) {
                    legacyRowKeys[i] = subRecordList.get(i).getRowkey();
                }
                bundle.addRows(legacyRowKeys);
                hbaseDao.delete(resourceBucket.getSlave(), bundle);
                bundle.clear();
                //保存
                subRecordList.forEach(item -> {
                    bundle.addValues(
                            item.getRowkey(),
                            resourceBucket.getBasicFamily(),
                            resourceBucket.getSubBasicRecords(originalPackage.getProfileType())
                    );
                    bundle.addValues(
                            item.getRowkey(),
                            resourceBucket.getdFamily(),
                            item.getDataGroup()
                    );
                });
                hbaseDao.save(resourceBucket.getSlave(), bundle);
            }
        } else {
            // delete legacy data if they are exist
            String legacyRowKeys[] = hbaseDao.findRowKeys(resourceBucket.getSlave(), rowKey, rowKey.substring(0, rowKey.length() - 1) + "1", "^" + rowKey);
            if (legacyRowKeys != null && legacyRowKeys.length > 0) {
                bundle.addRows(legacyRowKeys);
                hbaseDao.delete(resourceBucket.getSlave(), bundle);
            }
            bundle.clear();
            // now save the data to hbase
            List<SubRecord> subRecordList = resourceBucket.getSubRecords();
            subRecordList.forEach(item -> {
                bundle.addValues(
                        item.getRowkey(),
                        resourceBucket.getBasicFamily(),
                        resourceBucket.getSubBasicRecords(originalPackage.getProfileType()));
                bundle.addValues(
                        item.getRowkey(),
                        resourceBucket.getdFamily(),
                        item.getDataGroup());
            });
            hbaseDao.save(resourceBucket.getSlave(), bundle);
        }
    }
}

+ 40 - 0
src/main/java/com/yihu/ehr/resolve/feign/DataSetPackageMgrClient.java

@ -0,0 +1,40 @@
package com.yihu.ehr.resolve.feign;
import com.yihu.ehr.constants.ApiVersion;
import com.yihu.ehr.profile.ArchiveStatus;
import com.yihu.ehr.constants.MicroServices;
import com.yihu.ehr.constants.ServiceApi;
import org.springframework.cloud.netflix.feign.FeignClient;
import org.springframework.web.bind.annotation.*;
import springfox.documentation.annotations.ApiIgnore;
/**
 * @author 张进军
 * @created 2017.06.27 17:56
 */
@ApiIgnore
@FeignClient(name = MicroServices.Package)
public interface DataSetPackageMgrClient {
    /**
     * 根据ID获取数据集档案包
     *
     * @param datasetId 数据集ID
     * @return
     */
    @RequestMapping(value = ApiVersion.Version1_0 + ServiceApi.DatasetPackages.Package, method = RequestMethod.GET)
    String acquireDatasetPackage(
            @RequestParam(value = "id") String datasetId);
    /**
     * 回写数据集档案
     * @param id
     * @param status
     * @param message
     */
    @RequestMapping(value = ApiVersion.Version1_0 + ServiceApi.DatasetPackages.Package, method = RequestMethod.PUT)
    void reportStatus(@PathVariable(value = "id") String id,
                      @RequestParam(value = "status") ArchiveStatus status,
                      @RequestBody String message);
}

+ 36 - 0
src/main/java/com/yihu/ehr/resolve/feign/PackageMgrClient.java

@ -0,0 +1,36 @@
package com.yihu.ehr.resolve.feign;
import com.yihu.ehr.constants.ApiVersion;
import com.yihu.ehr.profile.ArchiveStatus;
import com.yihu.ehr.constants.MicroServices;
import com.yihu.ehr.constants.ServiceApi;
import com.yihu.ehr.model.packs.EsDetailsPackage;
import com.yihu.ehr.model.packs.EsSimplePackage;
import org.springframework.cloud.netflix.feign.FeignClient;
import org.springframework.web.bind.annotation.*;
import springfox.documentation.annotations.ApiIgnore;
import java.util.Map;
/**
 * @author Sand
 * @version 1.0
 * @created 2016.03.28 9:27
 */
@ApiIgnore
@FeignClient(name = MicroServices.Package)
@RequestMapping(ApiVersion.Version1_0)
public interface PackageMgrClient {
    @RequestMapping(value = ServiceApi.Packages.Package, method = RequestMethod.GET)
    EsSimplePackage getPackage(
            @PathVariable(value = "id") String id);
    @RequestMapping(value = ServiceApi.Packages.Package, method = RequestMethod.PUT)
    boolean resolveStatus(
            @PathVariable(value = "id") String id,
            @RequestParam(value = "status") ArchiveStatus status,
            @RequestParam(value = "errorType") int errorType,
            @RequestBody String message);
}

+ 117 - 0
src/main/java/com/yihu/ehr/resolve/job/HealthCheckTask.java

@ -0,0 +1,117 @@
package com.yihu.ehr.resolve.job;
import com.yihu.ehr.elasticsearch.ElasticSearchUtil;
import com.yihu.ehr.hbase.HBaseAdmin;
import com.yihu.ehr.resolve.config.SchedulerConfig;
import com.yihu.ehr.resolve.log.PackResolveLogger;
import org.quartz.*;
import org.quartz.impl.matchers.GroupMatcher;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.cloud.client.ServiceInstance;
import org.springframework.cloud.client.discovery.DiscoveryClient;
import org.springframework.scheduling.annotation.Scheduled;
import org.springframework.stereotype.Component;
import java.util.Date;
import java.util.List;
import java.util.Set;
import java.util.UUID;
import static org.quartz.JobBuilder.newJob;
import static org.quartz.TriggerBuilder.newTrigger;
/**
 * Task - 定时检查集群状态,提高解析任务容错率
 * Created by progr1mmer on 2017/12/15.
 */
@Component
public class HealthCheckTask {
    private static String  SVR_PACK_MGR = "svr-pack-mgr";
    @Autowired
    private SchedulerConfig schedulerConfig;
    @Autowired
    private SchedulerManager schedulerManager;
    @Autowired
    private Scheduler scheduler;
    @Autowired
    private DiscoveryClient discoveryClient;
    @Autowired
    private HBaseAdmin hBaseAdmin;
    @Autowired
    private ElasticSearchUtil elasticSearchUtil;
    @Scheduled(cron = "0 0/20 * * * ?")
    private void startTask() {
        PackResolveLogger.info("Health Check: " + new Date());
        GroupMatcher groupMatcher = GroupMatcher.groupEquals("PackResolve");
        //检查hbase集群信息
        try {
            hBaseAdmin.isTableExists("HealthProfile");
        } catch (Exception e) {
            try {
                Set<JobKey> jobKeySet = scheduler.getJobKeys(groupMatcher);
                if (jobKeySet != null) {
                    for (JobKey jobKey : jobKeySet) {
                        scheduler.deleteJob(jobKey);
                    }
                }
            } catch (SchedulerException se) {
                PackResolveLogger.error(se.getMessage());
            }
            PackResolveLogger.error(e.getMessage());
            return;
        }
        //检查Es集群
        try {
            elasticSearchUtil.findByField("archive_relation", "info", "sn", "sn_for_check");
        } catch (Exception e) {
            try {
                Set<JobKey> jobKeySet = scheduler.getJobKeys(groupMatcher);
                if (jobKeySet != null) {
                    for (JobKey jobKey : jobKeySet) {
                        scheduler.deleteJob(jobKey);
                    }
                }
            } catch (SchedulerException se) {
                PackResolveLogger.error(se.getMessage());
            }
            PackResolveLogger.error(e.getMessage());
            return;
        }
        //检查微服务信息
        List<ServiceInstance> mgr = discoveryClient.getInstances(SVR_PACK_MGR);
        if (mgr.isEmpty()) {
            try {
                Set<JobKey> jobKeySet = scheduler.getJobKeys(groupMatcher);
                if (jobKeySet != null) {
                    for (JobKey jobKey : jobKeySet) {
                        scheduler.deleteJob(jobKey);
                    }
                }
            } catch (SchedulerException e) {
                PackResolveLogger.error(e.getMessage());
            }
            return;
        }
        try {
            Set<JobKey> jobKeySet = scheduler.getJobKeys(groupMatcher);
            int activeCount = jobKeySet.size();
            for (int i = 0; i < schedulerManager.getJobSetSize() - activeCount; i++) {
                String suffix = UUID.randomUUID().toString().substring(0, 8);
                JobDetail jobDetail = newJob(PackageResolveJob.class)
                        .withIdentity("PackResolveJob-" + suffix, "PackResolve")
                        .build();
                CronTrigger trigger = newTrigger()
                        .withIdentity("PackResolveTrigger-" + suffix, "PackResolve")
                        .withSchedule(CronScheduleBuilder.cronSchedule(schedulerConfig.getCronExp()))
                        .startNow()
                        .build();
                scheduler.scheduleJob(jobDetail, trigger);
            }
        } catch (Exception e) {
            PackResolveLogger.error(e.getMessage());
        }
    }
}

+ 160 - 0
src/main/java/com/yihu/ehr/resolve/job/PackageResolveJob.java

@ -0,0 +1,160 @@
package com.yihu.ehr.resolve.job;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.yihu.ehr.profile.ArchiveStatus;
import com.yihu.ehr.profile.ProfileType;
import com.yihu.ehr.profile.exception.ResolveException;
import com.yihu.ehr.profile.family.ResourceCells;
import com.yihu.ehr.profile.queue.RedisCollection;
import com.yihu.ehr.fastdfs.FastDFSUtil;
import com.yihu.ehr.lang.SpringContext;
import com.yihu.ehr.model.packs.EsSimplePackage;
import com.yihu.ehr.profile.exception.IllegalJsonDataException;
import com.yihu.ehr.profile.exception.IllegalJsonFileException;
import com.yihu.ehr.resolve.model.stage1.OriginalPackage;
import com.yihu.ehr.resolve.model.stage2.ResourceBucket;
import com.yihu.ehr.resolve.service.resource.stage1.ResolveService;
import com.yihu.ehr.resolve.service.resource.stage2.IdentifyService;
import com.yihu.ehr.resolve.service.resource.stage2.PackMillService;
import com.yihu.ehr.resolve.service.resource.stage2.ResourceService;
import com.yihu.ehr.resolve.log.PackResolveLogger;
import com.yihu.ehr.resolve.service.resource.stage2.StatusReportService;
import com.yihu.ehr.resolve.util.LocalTempPathUtil;
import com.yihu.ehr.util.datetime.DateUtil;
import net.lingala.zip4j.exception.ZipException;
import org.apache.commons.lang3.StringUtils;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.quartz.*;
import org.springframework.data.redis.core.RedisTemplate;
import org.springframework.kafka.core.KafkaTemplate;
import org.springframework.stereotype.Component;
import java.io.Serializable;
import java.util.Date;
import java.util.HashMap;
import java.util.Map;
/**
 * 档案包解析作业。
 *
 * @author Sand
 * @version 1.0
 * @created 2016.03.28 11:30
 */
@Component
@DisallowConcurrentExecution
public class PackageResolveJob implements InterruptableJob {
    private static final long DAY = 1000 * 60 * 60 * 24;
    private final Log logger = LogFactory.getLog(this.getClass());
    @Override
    public void interrupt() throws UnableToInterruptJobException {
    }
    @Override
    public void execute(JobExecutionContext context) {
        StatusReportService statusReportService = SpringContext.getService(StatusReportService.class);
        //该对象要采用名称的方式获取,否则:expected single matching bean but found 3: redisTemplate,sessionRedisTemplate,stringRedisTemplate
        RedisTemplate<String, Serializable> redisTemplate = SpringContext.getService("redisTemplate");
        ObjectMapper objectMapper = SpringContext.getService(ObjectMapper.class);
        Serializable serializable = redisTemplate.opsForList().rightPop(RedisCollection.ResolveQueue);
        if (null == serializable) {
            serializable = redisTemplate.opsForSet().pop(RedisCollection.ResolveQueueVice);
        }
        EsSimplePackage pack = null;
        try {
            if (serializable != null) {
                String packStr = serializable.toString();
                pack = objectMapper.readValue(packStr, EsSimplePackage.class);
            }
            if (pack != null) {
                //判断是否已经解析成功,或者正在解析(由于部署多个服务,运行的时间差可能导致多次加入队列,造成多次解析)
                Map<String, Object> map = statusReportService.getJsonArchiveById(pack.get_id());
                if(map != null && ("3".equals(map.get("archive_status")+"") || "1".equals(map.get("archive_status")+""))){
                    logger.error(map.get("archive_status")+"");
                    logger.error("==================stop archive:"+map.get("_id"));
                    return;
                }
                PackResolveLogger.info("开始入库:" + pack.get_id() + ", Timestamp:" + new Date());
                statusReportService.reportStatus(pack.get_id(), ArchiveStatus.Acquired, 0, "正在入库中", null);
                OriginalPackage originalPackage = doResolve(pack, statusReportService);
                //发送省平台上传消息
                redisTemplate.opsForList().leftPush(RedisCollection.ProvincialPlatformQueue, objectMapper.writeValueAsString(pack));
                //发送事件处理消息
                if (originalPackage.getProfileType() == ProfileType.File || originalPackage.getProfileType() == ProfileType.Link) {
                    KafkaTemplate kafkaTemplate = SpringContext.getService(KafkaTemplate.class);
                    kafkaTemplate.send("svr-pack-event", "resolve", objectMapper.writeValueAsString(pack));
                }
            }
        } catch (Exception e) {
            int errorType = -2;
            if (e instanceof ZipException) {
                errorType = 1;
            } else if (e instanceof IllegalJsonFileException) {
                errorType = 2;
            } else if (e instanceof IllegalJsonDataException) {
                errorType = 3;
            } else if (e instanceof ResolveException) {
                errorType = 21; //21以下为质控和解析的公共错误
            }
            if (pack != null) {
                if (StringUtils.isNotBlank(e.getMessage())) {
                    statusReportService.reportStatus(pack.get_id(), ArchiveStatus.Failed, errorType, e.getMessage(), null);
                    PackResolveLogger.error(e.getMessage(), e);
                } else {
                    statusReportService.reportStatus(pack.get_id(), ArchiveStatus.Failed, errorType, "Internal server error, please see task log for detail message.", null);
                    PackResolveLogger.error("Empty exception message, please see the following detail info.", e);
                }
            } else {
                PackResolveLogger.error("Empty pack cause by:" + e.getMessage());
            }
        }
    }
    private OriginalPackage doResolve(EsSimplePackage pack, StatusReportService statusReportService) throws Exception {
        ResolveService resolveEngine = SpringContext.getService(ResolveService.class);
        PackMillService packMill = SpringContext.getService(PackMillService.class);
        IdentifyService identifyService = SpringContext.getService(IdentifyService.class);
        ResourceService resourceService = SpringContext.getService(ResourceService.class);
        OriginalPackage originalPackage = resolveEngine.doResolve(pack, downloadTo(pack.getRemote_path()));
        ResourceBucket resourceBucket = packMill.grindingPackModel(originalPackage);
        identifyService.identify(resourceBucket, originalPackage);
        resourceService.save(resourceBucket, originalPackage);
        //回填入库状态
        Map<String, Object> map = new HashMap();
        map.put("defect", resourceBucket.getQcMetadataRecords().getRecords().isEmpty() ? 0 : 1); //是否解析异常
        map.put("patient_name", resourceBucket.getBasicRecord(ResourceCells.PATIENT_NAME));
        map.put("profile_id", resourceBucket.getId());
        map.put("demographic_id", resourceBucket.getBasicRecord(ResourceCells.DEMOGRAPHIC_ID));
        map.put("event_type", originalPackage.getEventType() == null ? -1 : originalPackage.getEventType().getType());
        map.put("event_no", originalPackage.getEventNo());
        map.put("event_date", DateUtil.toStringLong(originalPackage.getEventTime()));
        map.put("patient_id", originalPackage.getPatientId());
        map.put("dept", resourceBucket.getBasicRecord(ResourceCells.DEPT_CODE));
        long delay = pack.getReceive_date().getTime() - originalPackage.getEventTime().getTime();
        map.put("delay", delay % DAY > 0 ? delay / DAY + 1 : delay / DAY);
        map.put("re_upload_flg", String.valueOf(originalPackage.isReUploadFlg()));
        statusReportService.reportStatus(pack.get_id(), ArchiveStatus.Finished, 0, "resolve success",  map);
        //回填解析数据
        pack.setRowkey(resourceBucket.getId());
        pack.setPatient_id(originalPackage.getPatientId());
        pack.setEvent_date(DateUtil.toStringLong(originalPackage.getEventTime()));
        pack.setEvent_no(originalPackage.getEventNo());
        pack.setEvent_type(originalPackage.getEventType() == null ? -1 : originalPackage.getEventType().getType());
        pack.setOrg_code(originalPackage.getOrgCode());
        pack.setOrg_name(resourceBucket.getBasicRecord(ResourceCells.ORG_NAME));
        pack.setOrg_area(resourceBucket.getBasicRecord(ResourceCells.ORG_AREA));
        pack.setPatient_name(resourceBucket.getBasicRecord(ResourceCells.PATIENT_NAME));
        pack.setIdcard_no(resourceBucket.getBasicRecord(ResourceCells.DEMOGRAPHIC_ID));
        return originalPackage;
    }
    private String downloadTo(String filePath) throws Exception {
        FastDFSUtil fastDFSUtil = SpringContext.getService(FastDFSUtil.class);
        String[] tokens = filePath.split(":");
        return fastDFSUtil.download(tokens[0], tokens[1], LocalTempPathUtil.getTempPathWithUUIDSuffix());
    }
}

+ 102 - 0
src/main/java/com/yihu/ehr/resolve/job/SchedulerManager.java

@ -0,0 +1,102 @@
package com.yihu.ehr.resolve.job;
import com.yihu.ehr.resolve.config.SchedulerConfig;
import org.quartz.*;
import org.quartz.impl.matchers.GroupMatcher;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Component;
import javax.annotation.PostConstruct;
import java.util.Set;
import java.util.UUID;
import static org.quartz.JobBuilder.newJob;
import static org.quartz.TriggerBuilder.newTrigger;
/**
 * Created by progr1mmer on 2018/7/13.
 */
@Component
public class SchedulerManager {
    private int jobSetSize;
    @Autowired
    private Scheduler scheduler;
    @Autowired
    private SchedulerConfig schedulerConfig;
    @PostConstruct
    private void init() throws Exception {
        try {
            for (int i = 0; i < schedulerConfig.getInitSize(); i++) {
                String suffix = UUID.randomUUID().toString().substring(0, 8);
                JobDetail jobDetail = newJob(PackageResolveJob.class)
                        .withIdentity("PackResolveJob-" + suffix, "PackResolve")
                        .build();
                CronTrigger trigger = newTrigger()
                        .withIdentity("PackResolveTrigger-" + suffix, "PackResolve")
                        .withSchedule(CronScheduleBuilder.cronSchedule(schedulerConfig.getCronExp()))
                        .startNow()
                        .build();
                scheduler.scheduleJob(jobDetail, trigger);
            }
        } catch (Exception e) {
            e.printStackTrace();
        }
        this.jobSetSize = schedulerConfig.getInitSize();
    }
    public int getJobSetSize() {
        return jobSetSize;
    }
    public int getJobSize() throws Exception {
        GroupMatcher groupMatcher = GroupMatcher.groupEquals("PackResolve");
        Set<JobKey> jobKeys = scheduler.getJobKeys(groupMatcher);
        return jobKeys.size();
    }
    public void addJob (int count, String cronExp) throws Exception {
        int addCount = 0;
        GroupMatcher groupMatcher = GroupMatcher.groupEquals("PackResolve");
        Set<JobKey> jobKeys = scheduler.getJobKeys(groupMatcher);
        int activeJob = jobKeys.size();
        for (int i = 0; i < count; i++) {
            if (i + activeJob >= schedulerConfig.getMaxSize()) {
                break;
            }
            String suffix = UUID.randomUUID().toString().substring(0, 8);
            JobDetail jobDetail = newJob(PackageResolveJob.class)
                    .withIdentity("PackResolveJob-" + suffix, "PackResolve")
                    .build();
            CronTrigger trigger = newTrigger()
                    .withIdentity("PackResolveJob-" + suffix, "PackResolve")
                    .withSchedule(CronScheduleBuilder.cronSchedule(cronExp))
                    .startNow()
                    .build();
            scheduler.scheduleJob(jobDetail, trigger);
            addCount = i + 1;
        }
        this.jobSetSize += addCount;
        if (this.jobSetSize > schedulerConfig.getMaxSize()) {
            jobSetSize = schedulerConfig.getMaxSize();
        }
    }
    public void minusJob (int count) throws Exception {
        int minusCount = count;
        GroupMatcher groupMatcher = GroupMatcher.groupEquals("PackResolve");
        Set<JobKey> jobKeySet = scheduler.getJobKeys(groupMatcher);
        for (JobKey jobKey : jobKeySet) {
            scheduler.deleteJob(jobKey);
            if (--count == 0) {
                break;
            }
        }
        this.jobSetSize -= minusCount;
        if (this.jobSetSize < 0) {
            jobSetSize = 0;
        }
    }
}

+ 63 - 0
src/main/java/com/yihu/ehr/resolve/log/PackResolveLogger.java

@ -0,0 +1,63 @@
package com.yihu.ehr.resolve.log;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
 * Logger - 日志
 * 档案入库的日志记录器
 */
public class PackResolveLogger {
    private static final Logger LOGGER = LoggerFactory.getLogger(PackResolveLogger.class);
    /**
     * @param info
     */
    public static void info(String info) {
        LOGGER.info(info);
    }
    /**
     *
     * @param warn
     */
    public static void warn(String warn) {
        LOGGER.warn(warn);
    }
    /**
     *
     * @param error
     */
    public static void error(String error) {
        LOGGER.error(error);
    }
    /**
     *
     * @param error
     * @param e
     */
    public static void error(String error, Exception e) {
        LOGGER.error(error, e);
    }
    /**
    public static void info(String caller, JsonNode info) {
        try {
            ObjectMapper objectMapper = new ObjectMapper();
            ObjectNode log = objectMapper.createObjectNode();
            log.put("caller", caller);
            log.put("time", DateTimeUtil.simpleDateTimeFormat(new Date()));
            log.put("logType", "3"); // TODO - 2017.7.4 张进军 - 待确定。
            log.set("data", info);
            logger.info(log.toString());
        } catch (Exception e) {
            e.printStackTrace();
        }
    }
     */
}

+ 120 - 0
src/main/java/com/yihu/ehr/resolve/model/stage1/FilePackage.java

@ -0,0 +1,120 @@
package com.yihu.ehr.resolve.model.stage1;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.node.ArrayNode;
import com.fasterxml.jackson.databind.node.ObjectNode;
import com.yihu.ehr.profile.ProfileType;
import com.yihu.ehr.profile.exception.IllegalJsonDataException;
import com.yihu.ehr.profile.model.PackageDataSet;
import com.yihu.ehr.profile.model.ProfileId;
import com.yihu.ehr.resolve.model.stage1.details.CdaDocument;
import com.yihu.ehr.util.datetime.DateTimeUtil;
import org.apache.commons.lang3.StringUtils;
import java.util.*;
/**
 * 文件型健康档案(非结构化)。
 *
 * @author Sand
 * @created 2015.08.16 10:44
 */
public class FilePackage extends OriginalPackage {
    private String patientName; //居民姓名
    private String demographicId; //身份证号码
    public FilePackage(String packId, Date receiveDate) {
        this.packId = packId;
        this.receiveDate = receiveDate;
        this.profileType = ProfileType.File;
    }
    // 文档列表,Key为数据库主键
    private Map<String, CdaDocument> cdaDocuments = new TreeMap<>();
    public String getDemographicId() {
        return demographicId;
    }
    public void setDemographicId(String demographicId) {
        this.demographicId = demographicId;
    }
    public String getPatientName() {
        return patientName;
    }
    public void setPatientName(String patientName) {
        this.patientName = patientName;
    }
    public Map<String, CdaDocument> getCdaDocuments() {
        return cdaDocuments;
    }
    public String getFileIndices(){
        return String.join(";", cdaDocuments.keySet());
    }
    @Override
    public String getId() {
        if (profileId == null) {
            if (StringUtils.isEmpty(orgCode)) {
                throw new IllegalJsonDataException("Build profile id failed, organization code is empty.");
            }
            if (StringUtils.isEmpty(eventNo)) {
                throw new IllegalJsonDataException("Build profile id failed, eventNo is empty.");
            }
            if (eventTime == null) {
                throw new IllegalJsonDataException("Build profile id failed, unable to get event date.");
            }
            if (profileType == null ){
                throw new IllegalJsonDataException("Build profileType id failed, profileType is empty.");
            }
            this.profileId = ProfileId.get(orgCode, eventNo, eventTime, profileType.getType());
        }
        return profileId.toString();
    }
    @Override
    public String toJson() {
        ObjectMapper objectMapper = new ObjectMapper();
        ObjectNode root = objectMapper.createObjectNode();
        root.put("id", getId());
        root.put("orgCode", this.orgCode);
        root.put("patientId", this.patientId);
        root.put("eventNo", this.eventNo);
        root.put("cdaVersion", this.cdaVersion);
        root.put("eventTime", DateTimeUtil.utcDateTimeFormat(this.eventTime));
        root.put("eventType", this.eventType == null ? "" : this.eventType.toString());
        root.put("createTime", DateTimeUtil.utcDateTimeFormat(this.createDate));
        root.put("profileType", this.profileType.toString());
        root.put("patientName", this.patientName);
        root.put("demographicId", this.demographicId);
        root.put("reUploadFlg", this.reUploadFlg);
        root.put("identifyFlag", this.identifyFlag);
        ObjectNode dataSetsNode = root.putObject("dataSets");
        for (String dataSetCode : dataSets.keySet()) {
            PackageDataSet dataSet = dataSets.get(dataSetCode);
            dataSetsNode.putPOJO(dataSetCode, dataSet.toJson());
        }
        ArrayNode files = root.putArray("files");
        for (String key : cdaDocuments.keySet()) {
            files.add(cdaDocuments.get(key).toJson());
        }
        return root.toString();
    }
    @Override
    public void regularRowKey() {
        super.regularRowKey();
        int i = 0;
        Set<String> rowkeys = new HashSet<>(cdaDocuments.keySet());
        for (String rowkey : rowkeys){
            CdaDocument document = cdaDocuments.remove(rowkey);
            cdaDocuments.put(getId() + "$" + i++, document);
        }
    }
}

+ 138 - 0
src/main/java/com/yihu/ehr/resolve/model/stage1/LinkPackage.java

@ -0,0 +1,138 @@
package com.yihu.ehr.resolve.model.stage1;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.node.ArrayNode;
import com.fasterxml.jackson.databind.node.ObjectNode;
import com.yihu.ehr.profile.ProfileType;
import com.yihu.ehr.profile.annotation.Table;
import com.yihu.ehr.profile.core.ResourceCore;
import com.yihu.ehr.profile.exception.IllegalJsonDataException;
import com.yihu.ehr.profile.model.PackageDataSet;
import com.yihu.ehr.profile.model.ProfileId;
import com.yihu.ehr.resolve.model.stage1.details.LinkFile;
import com.yihu.ehr.util.datetime.DateTimeUtil;
import org.apache.commons.lang3.StringUtils;
import java.util.*;
/**
 * 轻量级健康档案。其数据集保存的是机构健康档案中的链接。
 *
 * @author Sand
 * @version 1.0
 * @created 2015.08.16 10:44
 */
@Table(ResourceCore.MasterTable)
public class LinkPackage extends OriginalPackage {
    private String patientName; //居民姓名
    private String demographicId; //身份证号码
    private String visitType;
    private Date expireDate;
    private List<LinkFile> linkFiles = new ArrayList<>(); //影像存入到fast dfs的基本信息等
    private Map<String,List<String>> files = new HashMap<String,List<String>>(); //解析成功后,需要删除的 ftp 文件,map key值为路径 ,value值为文件名
    public LinkPackage(String packId, Date receiveDate){
        this.packId = packId;
        this.receiveDate = receiveDate;
        this.profileType = ProfileType.Link;
    }
    @Override
    public String getId() {
        if (profileId == null) {
            if (StringUtils.isEmpty(orgCode)) {
                throw new IllegalJsonDataException("Build profile id failed, organization code is empty.");
            }
            if (StringUtils.isEmpty(eventNo)) {
                throw new IllegalJsonDataException("Build profile id failed, eventNo is empty.");
            }
            if (eventTime == null) {
                throw new IllegalJsonDataException("Build profile id failed, unable to get event date.");
            }
            if (profileType == null ){
                throw new IllegalJsonDataException("Build profileType id failed, profileType is empty.");
            }
            this.profileId = ProfileId.get(orgCode, eventNo, eventTime, profileType.getType());
        }
        return profileId.toString();
    }
    @Override
    public String toJson() {
        ObjectMapper objectMapper = new ObjectMapper();
        ObjectNode root = objectMapper.createObjectNode();
        root.put("id", getId());
        root.put("orgCode", this.orgCode);
        root.put("patientId", this.patientId);
        root.put("eventNo", this.eventNo);
        root.put("cdaVersion", this.cdaVersion);
        root.put("eventTime", DateTimeUtil.utcDateTimeFormat(this.eventTime));
        root.put("eventType", this.eventType == null ?  "" : this.eventType.toString());
        root.put("profileType", this.profileType.toString());
        root.put("patientName", this.patientName);
        root.put("demographicId", this.demographicId);
        root.put("reUploadFlg", this.reUploadFlg);
        root.put("identifyFlag", this.identifyFlag);
        root.put("visitType", this.visitType);
        root.put("expireDate", DateTimeUtil.utcDateTimeFormat(this.expireDate));
        ObjectNode dataSetsNode = root.putObject("dataSets");
        for (String dataSetCode : dataSets.keySet()) {
            PackageDataSet dataSet = dataSets.get(dataSetCode);
            dataSetsNode.putPOJO(dataSetCode, dataSet.toJson());
        }
        ArrayNode files = root.putArray("files");
        for (LinkFile linkFile : linkFiles) {
            files.add(linkFile.toJson());
        }
        return root.toString();
    }
    public String getDemographicId() {
        return demographicId;
    }
    public void setDemographicId(String demographicId) {
        this.demographicId = demographicId;
    }
    public String getPatientName() {
        return patientName;
    }
    public void setPatientName(String patientName) {
        this.patientName = patientName;
    }
    public String getVisitType() {
        return visitType;
    }
    public void setVisitType(String visitType) {
        this.visitType = visitType;
    }
    public Date getExpireDate() {
        return expireDate;
    }
    public void setExpireDate(Date expireDate) {
        this.expireDate = expireDate;
    }
    public List<LinkFile> getLinkFiles() {
        return linkFiles;
    }
    public void setLinkFiles(List<LinkFile> linkFiles) {
        this.linkFiles = linkFiles;
    }
    public Map<String, List<String>> getFiles() {
        return files;
    }
    public void setFiles(Map<String, List<String>> files) {
        this.files = files;
    }
}

+ 155 - 0
src/main/java/com/yihu/ehr/resolve/model/stage1/OriginalPackage.java

@ -0,0 +1,155 @@
package com.yihu.ehr.resolve.model.stage1;
import com.yihu.ehr.profile.EventType;
import com.yihu.ehr.profile.ProfileType;
import com.yihu.ehr.profile.model.PackageDataSet;
import com.yihu.ehr.profile.model.ProfileId;
import java.util.Date;
import java.util.Map;
import java.util.TreeMap;
/**
 * Created by progr1mmer on 2018/6/8.
 */
public abstract class OriginalPackage {
    protected String packId;
    protected Date receiveDate;
    protected ProfileType profileType;
    protected ProfileId profileId;
    protected String cdaVersion;
    protected String patientId;
    protected String eventNo;
    protected Date eventTime;
    protected EventType eventType;
    protected String orgCode;
    protected Date createDate;
    protected boolean reUploadFlg;
    //身份识别标志
    protected boolean identifyFlag;
    //原始数据集
    protected Map<String, PackageDataSet> dataSets = new TreeMap<>();
    public String getPackId() {
        return packId;
    }
    public Date getReceiveDate() {
        return receiveDate;
    }
    public ProfileType getProfileType() {
        return profileType;
    }
    public void setProfileType(ProfileType profileType) {
        this.profileType = profileType;
    }
    public ProfileId getProfileId() {
        return profileId;
    }
    public void setProfileId(ProfileId profileId) {
        this.profileId = profileId;
    }
    public String getCdaVersion() {
        return cdaVersion;
    }
    public void setCdaVersion(String cdaVersion) {
        this.cdaVersion = cdaVersion;
    }
    public String getPatientId() {
        return patientId;
    }
    public void setPatientId(String patientId) {
        this.patientId = patientId;
    }
    public String getEventNo() {
        return eventNo;
    }
    public void setEventNo(String eventNo) {
        this.eventNo = eventNo;
    }
    public Date getEventTime() {
        return eventTime;
    }
    public void setEventTime(Date eventTime) {
        this.eventTime = eventTime;
    }
    public EventType getEventType() {
        return eventType;
    }
    public void setEventType(EventType eventType) {
        this.eventType = eventType;
    }
    public String getOrgCode() {
        return orgCode;
    }
    public void setOrgCode(String orgCode) {
        this.orgCode = orgCode;
    }
    public Date getCreateDate() {
        return createDate;
    }
    public void setCreateDate(Date createDate) {
        this.createDate = createDate;
    }
    public boolean isReUploadFlg() {
        return reUploadFlg;
    }
    public void setReUploadFlg(boolean reUploadFlg) {
        this.reUploadFlg = reUploadFlg;
    }
    public boolean isIdentifyFlag() {
        return identifyFlag;
    }
    public void setIdentifyFlag(boolean identifyFlag) {
        this.identifyFlag = identifyFlag;
    }
    public void insertDataSet(String dataSetCode, PackageDataSet dataSet) {
        this.dataSets.put(dataSetCode, dataSet);
    }
    public PackageDataSet getDataSet(String dataSetCode) {
        return this.dataSets.get(dataSetCode);
    }
    public Map<String, PackageDataSet> getDataSets() {
        return dataSets;
    }
    public abstract String getId();
    public abstract String toJson();
    public void regularRowKey() {
        dataSets.forEach((key, val) -> {
            int rowIndex = 0;
            String sortFormat = val.getRecordCount() > 10 ? "%s$%03d" : "%s$%1d";
            String[] rowKeys = val.getRecordKeys().toArray(new String[val.getRecordCount()]);
            for (String rowKey : rowKeys) {
                val.updateRecordKey(rowKey, String.format(sortFormat, getId(), rowIndex ++));
            }
        });
    }
}

+ 81 - 0
src/main/java/com/yihu/ehr/resolve/model/stage1/RsDictionaryEntry.java

@ -0,0 +1,81 @@
package com.yihu.ehr.resolve.model.stage1;
import org.hibernate.annotations.GenericGenerator;
import javax.persistence.*;
/**
 * 资源字典项
 *
 * Created by lyr on 2016/5/13.
 */
@Entity
@Table(name = "rs_dictionary_entry")
public class RsDictionaryEntry {
    private int id;
    private int dictId;
    private String dictCode;
    private String code;
    private String name;
    private String description;
    @Id
    @GeneratedValue(generator = "Generator")
    @GenericGenerator(name = "Generator", strategy = "increment")
    @Column(name = "id", unique = true, nullable = false)
    public int getId() {
        return id;
    }
    public void setId(int id) {
        this.id = id;
    }
    @Column(name = "dict_id",nullable = false)
    public Integer getDictId() {
        return dictId;
    }
    public void setDictId(int dictId) {
        this.dictId = dictId;
    }
    @Column(name = "dict_code",nullable = false)
    public String getDictCode()
    {
        return dictCode;
    }
    public void setDictCode(String dictCode)
    {
        this.dictCode = dictCode;
    }
    @Column(name = "code",nullable = false)
    public String getCode()
    {
        return code;
    }
    public void setCode(String code)
    {
        this.code = code;
    }
    @Column(name="name",nullable = false)
    public String getName()
    {
        return name;
    }
    public void setName(String name)
    {
        this.name = name;
    }
    @Column(name="description")
    public String getDescription()
    {
        return description;
    }
    public void setDescription(String description)
    {
        this.description = description;
    }
}

+ 82 - 0
src/main/java/com/yihu/ehr/resolve/model/stage1/SimplePackage.java

@ -0,0 +1,82 @@
package com.yihu.ehr.resolve.model.stage1;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.node.ObjectNode;
import com.yihu.ehr.profile.ProfileType;
import com.yihu.ehr.profile.exception.IllegalJsonDataException;
import com.yihu.ehr.profile.model.ProfileId;
import com.yihu.ehr.util.datetime.DateTimeUtil;
import org.springframework.util.StringUtils;
import java.util.Date;
import java.util.List;
/**
 * 数据集档案包
 *
 * @author 张进军
 * @created 2017.06.27 11:34
 */
public class SimplePackage extends OriginalPackage {
    private String pk; // 数据集主键(可能是联合主键)
    private List<String> sqlList; // 遍历数据集拼接的插入/更新SQL语句
    public SimplePackage(String packId, Date receiveDate){
        this.packId = packId;
        this.receiveDate = receiveDate;
        this.profileType = ProfileType.Simple;
    }
    public List<String> getSqlList() {
        return sqlList;
    }
    public void setSqlList(List<String> sqlList) {
        this.sqlList = sqlList;
    }
    public String getPk() {
        return pk;
    }
    public void setPk(String pk) {
        this.pk = pk;
    }
    @Override
    public String getId() {
        if (profileId == null) {
            if (org.apache.commons.lang3.StringUtils.isEmpty(orgCode)) {
                throw new IllegalJsonDataException("Build profile id failed, organization code is empty.");
            }
            if (org.apache.commons.lang3.StringUtils.isEmpty(eventNo)) {
                throw new IllegalJsonDataException("Build profile id failed, eventNo is empty.");
            }
            if (eventTime == null) {
                throw new IllegalJsonDataException("Build profile id failed, unable to get event date.");
            }
            if (profileType == null ){
                throw new IllegalJsonDataException("Build profileType id failed, profileType is empty.");
            }
            this.profileId = ProfileId.get(orgCode, eventNo, eventTime, profileType.getType());
        }
        return profileId.toString();
    }
    @Override
    public String toJson() {
        ObjectMapper objectMapper = new ObjectMapper();
        ObjectNode root = objectMapper.createObjectNode();
        root.put("orgCode", this.orgCode);
        root.put("patientId", this.patientId);
        root.put("eventNo", this.eventNo);
        root.put("cdaVersion", this.cdaVersion);
        root.put("eventTime", DateTimeUtil.utcDateTimeFormat(this.eventTime));
        root.put("createTime", DateTimeUtil.utcDateTimeFormat(this.createDate));
        root.put("eventType", StringUtils.isEmpty(this.eventType) ? "" : this.eventType.toString());
        root.put("profileType", this.profileType.toString());
        return root.toString();
    }
}

+ 161 - 0
src/main/java/com/yihu/ehr/resolve/model/stage1/StandardPackage.java

@ -0,0 +1,161 @@
package com.yihu.ehr.resolve.model.stage1;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.node.ObjectNode;
import com.yihu.ehr.profile.ProfileType;
import com.yihu.ehr.profile.exception.IllegalJsonDataException;
import com.yihu.ehr.profile.model.PackageDataSet;
import com.yihu.ehr.profile.model.ProfileId;
import com.yihu.ehr.resolve.model.stage2.ResourceBucket;
import com.yihu.ehr.util.datetime.DateTimeUtil;
import org.apache.commons.lang3.StringUtils;
import java.util.Date;
import java.util.HashSet;
import java.util.Set;
/**
 * Created by progr1mmer on 2018/6/8.
 */
public class StandardPackage extends OriginalPackage {
    private String cardId; //就诊卡
    private String cardType; //就诊卡类型
    private String patientName; //居民姓名
    private String patientAge; //就诊年龄
    private String patientSex; //患者性别
    private String demographicId; //身份证号码
    private String deptCode; //入院科室编码
    private Set<String> diagnosisCode = new HashSet<>(); //icd10 诊断代码
    private Set<String> diagnosisName = new HashSet<>(); //诊断名称
    public StandardPackage(String packId, Date receiveDate) {
        this.packId = packId;
        this.receiveDate = receiveDate;
        this.profileType = ProfileType.Standard;
    }
    @Override
    public String getId() {
        if (profileId == null) {
            if (StringUtils.isEmpty(orgCode)) {
                throw new IllegalJsonDataException("Build profile id failed, organization code is empty.");
            }
            if (StringUtils.isEmpty(eventNo)) {
                throw new IllegalJsonDataException("Build profile id failed, eventNo is empty.");
            }
            if (eventTime == null) {
                throw new IllegalJsonDataException("Build profile id failed, unable to get event date.");
            }
            if (profileType == null ){
                throw new IllegalJsonDataException("Build profileType id failed, profileType is empty.");
            }
            this.profileId = ProfileId.get(orgCode, eventNo, eventTime, profileType.getType());
        }
        return profileId.toString();
    }
    @Override
    public String toJson() {
        ObjectMapper objectMapper = new ObjectMapper();
        ObjectNode root = objectMapper.createObjectNode();
        root.put("id", getId());
        root.put("cardId", this.cardId);
        root.put("orgCode", this.orgCode);
        root.put("patientId", this.patientId);
        root.put("eventNo", this.eventNo);
        root.put("cdaVersion", this.cdaVersion);
        root.put("eventTime", DateTimeUtil.utcDateTimeFormat(this.eventTime));
        root.put("createTime", DateTimeUtil.utcDateTimeFormat(this.createDate));
        root.put("eventType", this.eventType == null ?  "" : this.eventType.toString());
        root.put("profileType", this.profileType.toString());
        root.put("cardType", this.cardType);
        root.put("patientName", this.patientName);
        root.put("patientAge", this.patientAge);
        root.put("patientSex", this.patientSex);
        root.put("demographicId", this.demographicId);
        root.put("diagnosis", StringUtils.join(this.diagnosisCode,";"));
        root.put("diagnosisName", StringUtils.join(this.diagnosisName,";"));
        root.put("reUploadFlg", this.reUploadFlg);
        root.put("identifyFlag", this.identifyFlag);
        root.put("deptCode", this.deptCode);
        ObjectNode dataSetsNode = root.putObject("dataSets");
        for (String dataSetCode : dataSets.keySet()) {
            PackageDataSet dataSet = dataSets.get(dataSetCode);
            dataSetsNode.putPOJO(dataSetCode, dataSet.toJson());
        }
        return root.toString();
    }
    public String getCardId() {
        return cardId;
    }
    public void setCardId(String cardId) {
        this.cardId = cardId;
    }
    public String getCardType() {
        return cardType;
    }
    public void setCardType(String cardType) {
        this.cardType = cardType;
    }
    public String getDemographicId() {
        return demographicId;
    }
    public void setDemographicId(String demographicId) {
        this.demographicId = demographicId;
    }
    public String getPatientName() {
        return patientName;
    }
    public void setPatientName(String patientName) {
        this.patientName = patientName;
    }
    public String getPatientAge() {
        return patientAge;
    }
    public void setPatientAge(String patientAge) {
        this.patientAge = patientAge;
    }
    public String getPatientSex() {
        return patientSex;
    }
    public void setPatientSex(String patientSex) {
        this.patientSex = patientSex;
    }
    public String getDeptCode() {
        return deptCode;
    }
    public void setDeptCode(String deptCode) {
        this.deptCode = deptCode;
    }
    public Set<String> getDiagnosisCode() {
        return diagnosisCode;
    }
    public void setDiagnosisCode(Set<String> diagnosisCode) {
        this.diagnosisCode = diagnosisCode;
    }
    public Set<String> getDiagnosisName() {
        return diagnosisName;
    }
    public void setDiagnosisName(Set<String> diagnosisName) {
        this.diagnosisName = diagnosisName;
    }
}

+ 68 - 0
src/main/java/com/yihu/ehr/resolve/model/stage1/details/CdaDocument.java

@ -0,0 +1,68 @@
package com.yihu.ehr.resolve.model.stage1.details;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.node.ArrayNode;
import com.fasterxml.jackson.databind.node.ObjectNode;
import com.yihu.ehr.lang.SpringContext;
import com.yihu.ehr.util.datetime.DateTimeUtil;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
/**
 * @author Sand
 * @created 2015.08.16 10:44
 */
public class CdaDocument {
    private String id;
    private String name;
    private List<OriginFile> originFiles = new ArrayList<>();
    public String getId() {
        return id;
    }
    public void setId(String id) {
        this.id = id;
    }
    public String getName() {
        return name;
    }
    public void setName(String name) {
        this.name = name;
    }
    public List<OriginFile> getOriginFiles() {
        return originFiles;
    }
    public ObjectNode toJson(){
        ObjectMapper objectMapper = SpringContext.getService(ObjectMapper.class);
        ObjectNode parent = objectMapper.createObjectNode();
        parent.put("id", id);
        parent.put("name", name);
        ArrayNode docList = parent.putArray("list");
        for (OriginFile originFile : originFiles){
            ObjectNode objectNode = docList.addObject();
            objectNode.put("mime", originFile.getMime());
            objectNode.put("urls", originFile.getUrlsStr());
            String name = originFile.getUrlScope()==null ? "":originFile.getUrlScope().name();
            objectNode.put("url_scope", name);
            objectNode.put("expireDate", originFile.getExpireDate()==null ? null: DateTimeUtil.simpleDateFormat(originFile.getExpireDate()));
            objectNode.put("emr_id",originFile.getEmrId());
            objectNode.put("emr_name",originFile.getEmrName());
            objectNode.put("note",originFile.getNote());
            ObjectNode listNode = objectNode.putObject("files");
            Map<String, String> files = originFile.getFileUrls();
            for (String fileName : files.keySet()){
                listNode.put(fileName, files.get(fileName));
            }
        }
        return parent;
    }
}

+ 88 - 0
src/main/java/com/yihu/ehr/resolve/model/stage1/details/LinkFile.java

@ -0,0 +1,88 @@
package com.yihu.ehr.resolve.model.stage1.details;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.node.ObjectNode;
/**
 *轻量型档案文件解析而来
 */
public class LinkFile {
    //文件信息
    private String url;//文件存储地址
    private String originName;//原始文件名
    private String fileExtension;//文件扩展名
    private long fileSize;//文件大小
    private String reportFormNo;//所属报告单号
    private String serialNo;//子项序号
    private String md5;//文件md5
    public String getUrl() {
        return url;
    }
    public void setUrl(String url) {
        this.url = url;
    }
    public String getOriginName() {
        return originName;
    }
    public void setOriginName(String originName) {
        this.originName = originName;
    }
    public String getFileExtension() {
        return fileExtension;
    }
    public void setFileExtension(String fileExtension) {
        this.fileExtension = fileExtension;
    }
    public long getFileSize() {
        return fileSize;
    }
    public void setFileSize(long fileSize) {
        this.fileSize = fileSize;
    }
    public String getReportFormNo() {
        return reportFormNo;
    }
    public void setReportFormNo(String reportFormNo) {
        this.reportFormNo = reportFormNo;
    }
    public String getSerialNo() {
        return serialNo;
    }
    public void setSerialNo(String serialNo) {
        this.serialNo = serialNo;
    }
    public String getMd5() {
        return md5;
    }
    public void setMd5(String md5) {
        this.md5 = md5;
    }
    public String toJson() {
        ObjectMapper objectMapper = new ObjectMapper();
        ObjectNode root = objectMapper.createObjectNode();
        root.put("url", this.url);
        root.put("originName", this.originName);
        root.put("fileExtension", this.fileExtension);
        root.put("fileSize", this.fileSize);
        root.put("reportFormNo", this.reportFormNo);
        root.put("serialNo", this.serialNo);
        root.put("md5",this.md5);
        return root.toString();
    }
}

+ 96 - 0
src/main/java/com/yihu/ehr/resolve/model/stage1/details/OriginFile.java

@ -0,0 +1,96 @@
package com.yihu.ehr.resolve.model.stage1.details;
import com.yihu.ehr.constants.UrlScope;
import java.util.Collection;
import java.util.Date;
import java.util.Map;
import java.util.TreeMap;
/**
 * 非结构化档案原始文件。包含一个文档地址中的信息。
 *
 * @author Sand
 * @created 2015.08.16 10:44
 */
public class OriginFile {
    private String mime;
    private Date expireDate;
    private UrlScope urlScope;
    private String emrId;
    private String emrName;
    private String note;
    // 文件索引,key为文件名
    private Map<String, String> fileUrls = new TreeMap<>();
    public Date getExpireDate() {
        return expireDate;
    }
    public void setExpireDate(Date expireDate) {
        this.expireDate = expireDate;
    }
    public String getMime() {
        return mime;
    }
    public void setMime(String mime) {
        this.mime = mime;
    }
    public Map<String, String> getFileUrls(){
        return fileUrls;
    }
    public void addUrl(String fileName, String storageUrl){
        fileUrls.put(fileName, storageUrl);
    }
    public UrlScope getUrlScope() {
        return urlScope;
    }
    public void setUrlScope(UrlScope urlScope) {
        this.urlScope = urlScope;
    }
    public String getUrlsStr() {
        Collection values = fileUrls.values();
        String urls = "";
        for (Object object : values) {
            if(urls.length() > 0)
            {
                urls += ",";
            }
            urls+= object.toString();
        }
        return urls;
    }
    public String getEmrId() {
        return emrId;
    }
    public void setEmrId(String emrId) {
        this.emrId = emrId;
    }
    public String getEmrName() {
        return emrName;
    }
    public void setEmrName(String emrName) {
        this.emrName = emrName;
    }
    public String getNote() {
        return note;
    }
    public void setNote(String note) {
        this.note = note;
    }
}

+ 11 - 0
src/main/java/com/yihu/ehr/resolve/model/stage2/MasterRecord.java

@ -0,0 +1,11 @@
package com.yihu.ehr.resolve.model.stage2;
/**
 * 档案资源主记录。
 *
 * @author Sand
 * @created 2016.05.16 15:51
 */
public class MasterRecord extends ResourceRecord {
}

+ 21 - 0
src/main/java/com/yihu/ehr/resolve/model/stage2/QcMetadataRecords.java

@ -0,0 +1,21 @@
package com.yihu.ehr.resolve.model.stage2;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
/**
 * Created by progr1mmer on 2018/5/22.
 */
public class QcMetadataRecords {
    private List<Map<String, Object>> records = new ArrayList<>();
    public void addRecord(Map<String, Object> data){
        records.add(data);
    }
    public List<Map<String, Object>> getRecords(){
        return records;
    }
}

+ 139 - 0
src/main/java/com/yihu/ehr/resolve/model/stage2/ResourceBucket.java

@ -0,0 +1,139 @@
package com.yihu.ehr.resolve.model.stage2;
import com.yihu.ehr.profile.ProfileType;
import com.yihu.ehr.profile.family.ResourceCells;
import java.util.*;
/**
 * 健康档案资源化临时存储工具。此阶段也是存在于内存中,资源化之后会存入hbase。
 *
 * @author Sand
 * @created 2016.05.16 13:52
 */
public class ResourceBucket {
    //档案包ID
    private final String packId;
    //档案包接收时间
    private final Date receiveDate;
    //主键
    private final String id;
    //主表
    private final String master;
    //细表
    private final String slave;
    //基础列族
    private final String basicFamily;
    //数据列族
    private final String dFamily;
    //基础索引字段
    private Map<String, String> basicRecord = new HashMap<>();
    //主记录
    private MasterRecord masterRecord = new MasterRecord();
    //子记录
    private List<SubRecord> subRecords = new ArrayList<>();
    //质控数据
    private QcMetadataRecords qcMetadataRecords = new QcMetadataRecords();
    public ResourceBucket(
            String id,
            String packId,
            Date receiveDate,
            String master,
            String slave,
            String basicFamily,
            String dFamily) {
        this.id = id;
        this.packId = packId;
        this.receiveDate = receiveDate;
        this.master = master;
        this.slave = slave;
        this.basicFamily = basicFamily;
        this.dFamily = dFamily;
    }
    public String getId() {
        return id;
    }
    public String getPackId() {
        return packId;
    }
    public Date getReceiveDate() {
        return receiveDate;
    }
    public String getMaster() {
        return master;
    }
    public String getSlave() {
        return slave;
    }
    public String getBasicFamily() {
        return basicFamily;
    }
    public String getdFamily() {
        return dFamily;
    }
    public void insertBasicRecord(String key, String val) {
        basicRecord.put(key, val);
    }
    public String getBasicRecord(String key) {
        return basicRecord.get(key);
    }
    public Map<String, String> getMasterBasicRecords(ProfileType profileType) {
        Map<String, String> _basicRecord = new HashMap<>();
        List<String> cells = ResourceCells.getMasterBasicCell(profileType);
        cells.forEach(item -> {
            _basicRecord.put(item, basicRecord.get(item));
        });
        return _basicRecord;
    }
    public Map<String, String> getSubBasicRecords(ProfileType profileType) {
        Map<String, String> _basicRecord = new HashMap<>();
        List<String> cells = ResourceCells.getSubBasicCell(profileType);
        cells.forEach(item -> {
            _basicRecord.put(item, basicRecord.get(item));
        });
        _basicRecord.put(ResourceCells.PROFILE_ID, this.id);
        return _basicRecord;
    }
    public MasterRecord getMasterRecord() {
        return masterRecord;
    }
    public void setMasterRecord(MasterRecord masterRecord) {
        this.masterRecord = masterRecord;
    }
    public List<SubRecord> getSubRecords() {
        return subRecords;
    }
    public void setSubRecords(List<SubRecord> subRecords) {
        this.subRecords = subRecords;
    }
    public QcMetadataRecords getQcMetadataRecords() {
        return qcMetadataRecords;
    }
    public void setQcMetadataRecords(QcMetadataRecords qcMetadataRecords) {
        this.qcMetadataRecords = qcMetadataRecords;
    }
    public List<String> getSubRowkeys() {
        List<String> rowkeys = new ArrayList<>();
        subRecords.forEach(item -> rowkeys.add(item.getRowkey()));
        return rowkeys;
    }
}

+ 31 - 0
src/main/java/com/yihu/ehr/resolve/model/stage2/ResourceRecord.java

@ -0,0 +1,31 @@
package com.yihu.ehr.resolve.model.stage2;
import java.util.HashMap;
import java.util.Map;
import java.util.Set;
/**
 * 资源记录。
 *
 * @author Sand
 * @created 2016.05.16 16:10
 */
public class ResourceRecord {
    protected Map<String, String> dataGroup = new HashMap<>();
    public void addResource(String resourceCode, String value){
        dataGroup.put(resourceCode, value);
    }
    public String getResourceValue(String resourceCode){
        return dataGroup.get(resourceCode);
    }
    public Set<String> resourceCodes(){
        return dataGroup.keySet();
    }
    public Map<String, String> getDataGroup(){
        return dataGroup;
    }
}

+ 56 - 0
src/main/java/com/yihu/ehr/resolve/model/stage2/SubRecord.java

@ -0,0 +1,56 @@
package com.yihu.ehr.resolve.model.stage2;
import org.apache.commons.lang3.StringUtils;
/**
 * 档案资源子表记录。
 *
 * 子表记录的rowkey格式:主表rowkey$数据集代码$序号
 *
 * @author Sand
 * @created 2016.05.16 16:06
 */
public class SubRecord extends ResourceRecord {
    private final static char Delimiter = '$';
    private final static String RowKeyFormat = "%s$%s$%s";
    private String rowkey;
    public String getRowkey() {
        return rowkey;
    }
    public void setRowkey(String rowkey) {
        this.rowkey = rowkey;
    }
    public void setRowkey(String profileId, String dataSetCode, int index){
        rowkey = String.format(RowKeyFormat, profileId, dataSetCode, index);
    }
    // 非档案类型使用
    public void setRowkey(String profileId, String dataSetCode, String pk){
        rowkey = String.format(RowKeyFormat, profileId, dataSetCode, pk);
    }
    public String getProfileId(){
        if (StringUtils.isEmpty(rowkey)){
            return "";
        }
        return rowkey.substring(0, rowkey.indexOf(Delimiter));
    }
    public String getDataSetCode() {
        if (StringUtils.isEmpty(rowkey)) {
            return "";
        }
        return rowkey.substring(rowkey.indexOf(Delimiter), rowkey.lastIndexOf(Delimiter));
    }
    public int getIndex(){
        if (StringUtils.isEmpty(rowkey)) {
            return 0;
        }
        return Integer.parseInt(rowkey.substring(rowkey.lastIndexOf(Delimiter)));
    }
}

+ 104 - 0
src/main/java/com/yihu/ehr/resolve/service/profile/ArchiveRelationService.java

@ -0,0 +1,104 @@
package com.yihu.ehr.resolve.service.profile;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.yihu.ehr.profile.ProfileType;
import com.yihu.ehr.elasticsearch.ElasticSearchUtil;
import com.yihu.ehr.hbase.HBaseDao;
import com.yihu.ehr.model.packs.EsArchiveRelation;
import com.yihu.ehr.profile.core.ResourceCore;
import com.yihu.ehr.profile.family.ResourceCells;
import com.yihu.ehr.profile.family.ResourceFamily;
import com.yihu.ehr.resolve.model.stage1.OriginalPackage;
import com.yihu.ehr.resolve.model.stage2.ResourceBucket;
import com.yihu.ehr.util.datetime.DateUtil;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
import org.springframework.util.StringUtils;
import java.util.ArrayList;
import java.util.Date;
import java.util.List;
import java.util.Map;
/**
 * 档案关联
 * Created by hzp on 2017/4/11.
 * Modified by Progr1mmer
*/
@Service
public class ArchiveRelationService {
    private static final String CHARS = "ABCDEFGHIJKLMNOPQRSTUVWXYZ";
    private static final String INDEX = "archive_relation";
    private static final String TYPE = "info";
    @Autowired
    private HBaseDao hbaseDao;
    @Autowired
    private ObjectMapper objectMapper;
    @Autowired
    private ElasticSearchUtil elasticSearchUtil;
    /**
     * 档案关联
     */
    public void archiveRelation(String profileId, String idCardNo) throws Exception {
        //判断记录是否存在
        String re = hbaseDao.get(ResourceCore.MasterTable, profileId);
        if (!StringUtils.isEmpty(re)) {
            hbaseDao.put(ResourceCore.MasterTable, profileId, ResourceFamily.Basic, ResourceCells.DEMOGRAPHIC_ID, idCardNo);
        }
    }
    public void relation (ResourceBucket resourceBucket, OriginalPackage originalPackage) throws Exception {
        if (!originalPackage.isReUploadFlg()) {
            EsArchiveRelation relation = new EsArchiveRelation();
            relation.set_id(resourceBucket.getId());
            ProfileType profileType = originalPackage.getProfileType();
            if (profileType != null){
                relation.setProfile_type(profileType.getType());
                if (profileType == ProfileType.Link) {
                    relation.setUpload_flag(0);
                }
            }
            relation.setName(resourceBucket.getBasicRecord(ResourceCells.PATIENT_NAME));
            relation.setOrg_code(resourceBucket.getBasicRecord(ResourceCells.ORG_CODE));
            relation.setOrg_name(resourceBucket.getBasicRecord(ResourceCells.ORG_NAME));
            relation.setId_card_no(resourceBucket.getBasicRecord(ResourceCells.DEMOGRAPHIC_ID));
            int gender = resourceBucket.getMasterRecord().getResourceValue("EHR_000019") == null ||  "".equals(resourceBucket.getMasterRecord().getResourceValue("EHR_000019"))  ? 0 : new Integer(resourceBucket.getMasterRecord().getResourceValue("EHR_000019"));
            relation.setGender(gender);
            String telephone = resourceBucket.getMasterRecord().getResourceValue("EHR_000003") == null ? "" : resourceBucket.getMasterRecord().getResourceValue("EHR_000003").toString();
            relation.setTelephone(telephone);
            relation.setCard_type(resourceBucket.getBasicRecord(ResourceCells.CARD_TYPE));
            relation.setCard_no(resourceBucket.getBasicRecord(ResourceCells.CARD_ID));
            relation.setEvent_type(resourceBucket.getBasicRecord(ResourceCells.EVENT_TYPE) == null ? -1 : new Integer(resourceBucket.getBasicRecord(ResourceCells.EVENT_TYPE)));
            relation.setEvent_no(resourceBucket.getBasicRecord(ResourceCells.EVENT_NO));
            relation.setEvent_date(DateUtil.strToDate(resourceBucket.getBasicRecord(ResourceCells.EVENT_DATE)));
            char prefix = CHARS.charAt((int)(Math.random() * 26));
            relation.setSn(prefix + "" + new Date().getTime());
            relation.setRelation_date(new Date());
            relation.setCreate_date(new Date());
            //relation.setApply_id(null);
            //relation.setCard_id(null);
            if (originalPackage.isIdentifyFlag()) {
                relation.setIdentify_flag(1);
            } else {
                relation.setIdentify_flag(0);
            }
            elasticSearchUtil.index(INDEX, TYPE, objectMapper.readValue(objectMapper.writeValueAsString(relation), Map.class ));
        }
    }
    public List<String> findIdCardNoByCardNo(String cardNo){
        List<String> result = new ArrayList<>();
        List<Map<String, Object>> data = elasticSearchUtil.findByField(INDEX, TYPE, "card_no", cardNo);
        data.forEach(item -> {
            if (!StringUtils.isEmpty(item.get("id_card_no"))) {
                result.add(String.valueOf(item.get("id_card_no")));
            }
        });
        return result;
    }
}

+ 81 - 0
src/main/java/com/yihu/ehr/resolve/service/profile/PrescriptionService.java

@ -0,0 +1,81 @@
package com.yihu.ehr.resolve.service.profile;
import com.yihu.ehr.hbase.HBaseDao;
import com.yihu.ehr.hbase.TableBundle;
import com.yihu.ehr.profile.core.ResourceCore;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
/**
 * 处方笺HBASE入库服务
 * Created by lyr on 2016/6/22.
 */
@Service
public class PrescriptionService {
    @Autowired
    HBaseDao hbaseDao;
    /**
     * 保存处方笺到HABSE
     */
    public List<Map<String,Object>>  savePrescription(String profileId,List<Map<String,String>> dataList,int existed) throws Exception
    {
        //表数据
        TableBundle bundle = new TableBundle();
        //basic列族数据
        Map<String,String> basicFamily = new HashMap<String,String>();
        //返回保存成功数据
        List<Map<String,Object>> returnMapList = new ArrayList<Map<String,Object>>();
        //basic列族添加profile_id
        basicFamily.put("profile_id",profileId);
        //rowkey集合
        List<String> rowkeys = new ArrayList<String>();
        for(int i = 0; i < dataList.size(); i++)
        {
            int dataCount = existed + i;
            //行主健
            String rowkey = profileId + "$HDSC01_16$" + dataCount;
            rowkeys.add(rowkey);
        }
        //删除已有数据
        if(rowkeys.size() > 0)
        {
            bundle.addRows(rowkeys.toArray(new String[rowkeys.size()]));
            hbaseDao.delete(ResourceCore.SubTable, bundle);
        }
        for (Map<String,String> data : dataList)
        {
            //返回保存数据
            Map<String,Object> returnMap = new HashMap<String,Object>();
            //行主健
            String rowkey = profileId + "$HDSC01_16$" + existed;
            //添加basic列族数据
            bundle.addValues(rowkey, "basic", basicFamily);
            //添加data列族数据
            bundle.addValues(rowkey, "d", data);
            //返回保存数据
            returnMap.put("rowkey",rowkey);
            returnMap.put("profile_id",profileId);
            returnMap.putAll(data);
            returnMapList.add(returnMap);
            existed++;
        }
        //保存数据到HBASE
        hbaseDao.save(ResourceCore.SubTable, bundle);
        return returnMapList;
    }
}

+ 57 - 0
src/main/java/com/yihu/ehr/resolve/service/resource/stage1/PackModelFactory.java

@ -0,0 +1,57 @@
package com.yihu.ehr.resolve.service.resource.stage1;
import com.yihu.ehr.model.packs.EsSimplePackage;
import com.yihu.ehr.profile.ProfileType;
import com.yihu.ehr.resolve.model.stage1.*;
import org.springframework.util.CollectionUtils;
import java.io.File;
import java.util.List;
/**
 * 包模型生成器。
 *
 * @author Sand
 * @version 1.0
 * @created 2016.04.13 15:28
 */
public class PackModelFactory {
    public final static String StandardFolder = "standard";
    public final static String OriginFolder = "origin";
    public final static String DocumentsFile = "documents.json";
    public final static String LinkFile = "index";
    /**
     * 读取档案包目录结构判断档案类型。
     * @param root
     * @return
     */
    public static OriginalPackage createPackModel(File root, EsSimplePackage esSimplePackage) {
        List<String> directories = CollectionUtils.arrayToList(root.list());
        if (directories.contains(StandardFolder) && directories.contains(OriginFolder)) {
            return createPackModel(ProfileType.Standard, esSimplePackage);
        } else if (directories.contains(DocumentsFile)) {
            return createPackModel(ProfileType.File, esSimplePackage);
        } else if (directories.size() == 1 && directories.contains(LinkFile)) {
            return createPackModel(ProfileType.Link, esSimplePackage);
        } else { // 数据集档案包(zip下只有 .json 数据文件)。
            return createPackModel(ProfileType.Simple, esSimplePackage);
        }
    }
    public static OriginalPackage createPackModel(ProfileType type, EsSimplePackage esSimplePackage){
        switch(type){
            case Standard:
                return new StandardPackage(esSimplePackage.get_id(), esSimplePackage.getReceive_date());
            case File:
                return new FilePackage(esSimplePackage.get_id(), esSimplePackage.getReceive_date());
            case Link:
                return new LinkPackage(esSimplePackage.get_id(), esSimplePackage.getReceive_date());
            case Simple:
                return new SimplePackage(esSimplePackage.get_id(), esSimplePackage.getReceive_date());
            default:
                return null;
        }
    }
}

+ 122 - 0
src/main/java/com/yihu/ehr/resolve/service/resource/stage1/ResolveService.java

@ -0,0 +1,122 @@
package com.yihu.ehr.resolve.service.resource.stage1;
import com.yihu.ehr.model.packs.EsSimplePackage;
import com.yihu.ehr.profile.ProfileType;
import com.yihu.ehr.profile.exception.ResolveException;
import com.yihu.ehr.resolve.*;
import com.yihu.ehr.resolve.model.stage1.OriginalPackage;
import com.yihu.ehr.resolve.model.stage1.StandardPackage;
import com.yihu.ehr.resolve.util.LocalTempPathUtil;
import com.yihu.ehr.util.compress.Zipper;
import com.yihu.ehr.util.log.LogService;
import net.lingala.zip4j.exception.ZipException;
import org.apache.commons.io.FileUtils;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.context.ApplicationContext;
import org.springframework.stereotype.Service;
import javax.annotation.PostConstruct;
import java.io.File;
import java.util.HashMap;
import java.util.Map;
import static com.yihu.ehr.profile.ProfileType.*;
/**
 * 档案解析引擎.
 *
 * @author Sand
 * @version 1.0
 * @created 2015.09.09 15:04
 */
@Service
public class ResolveService {
    @Autowired
    private ApplicationContext context;
    private Map<ProfileType, PackageResolver> packageResolvers;
    @PostConstruct
    private void init() {
        packageResolvers = new HashMap<>();
        packageResolvers.put(Standard, context.getBean(StdPackageResolver.class));
        packageResolvers.put(File, context.getBean(FilePackageResolver.class));
        packageResolvers.put(Link, context.getBean(LinkPackageResolver.class));
        packageResolvers.put(Simple, context.getBean(SimplePackageResolver.class));
    }
    /**
     * 执行归档作业。归档作为流程如下:
     * 1. 从JSON档案管理器中获取一个待归档的JSON文档,并标记为Acquired,表示正在归档,并记录开始时间。
     * 2. 解压zip档案包,如果解压失败,或检查解压后的目录结果不符合规定,将文档状态标记为 Failed,记录日志并返回。
     * 3. 读取包中的 origin, standard 文件夹中的 JSON 数据并解析。
     * 4. 对关联字典的数据元进行标准化,将字典的值直接写入数据
     * 5. 解析完的数据存入HBase,并将JSON文档的状态标记为 Finished。
     * 6. 以上步骤有任何一个失败的,将文档标记为 Failed 状态,即无法决定该JSON档案的去向,需要人为干预。
     */
    public OriginalPackage doResolve(EsSimplePackage pack, String zipFile) throws Exception {
        File root = null;
        try {
            root = new Zipper().unzipFile(new File(zipFile), LocalTempPathUtil.getTempPathWithUUIDSuffix() + pack.get_id(), pack.getPwd());
            if (root == null || !root.isDirectory() || root.list().length == 0) {
                throw new ZipException("Invalid package file.");
            }
            //根据压缩包获取标准档案包
            OriginalPackage originalPackage = PackModelFactory.createPackModel(root, pack);
            PackageResolver packageResolver;
            switch (originalPackage.getProfileType()) {
                case Standard:
                    packageResolver = packageResolvers.get(ProfileType.Standard);
                    break;
                case File:
                    packageResolver = packageResolvers.get(ProfileType.File);
                    break;
                case Link:
                    packageResolver = packageResolvers.get(ProfileType.Link);
                    break;
                case Simple:
                    packageResolver = packageResolvers.get(ProfileType.Simple);
                    break;
                default:
                    throw new ResolveException("Failed to identify file type");
            }
            packageResolver.resolve(originalPackage, root);
            originalPackage.regularRowKey();
            return originalPackage;
        } finally {
            houseKeep(zipFile, root);
        }
    }
    private void houseKeep(String zipFile, File root) {
        try {
            FileUtils.deleteQuietly(new File(zipFile));
            FileUtils.deleteQuietly(root);
        } catch (Exception e) {
            LogService.getLogger(ResolveService.class).warn("House keep failed after package resolve: " + e.getMessage());
        }
    }
    /* -------------------- 以下是 即时交互的档案入库 ------------------------------*/
    @Autowired
    private ImmediateDataResolver immediateDataResolver;
    /**
     * 即时交互档案数据解析入库流程
     * 1. 解析哥哥数据集数据
     * 2. 对关联字典的数据元进行标准化,将字典的值直接写入数据
     * 3. 解析完的数据存入HBase,并将JSON文档的状态标记为 Finis
     * @param data
     * @param esSimplePackage
     * @return
     * @throws Exception
     */
    public StandardPackage doResolveImmediateData(String data, EsSimplePackage esSimplePackage) throws Exception {
        StandardPackage standardPackage = new StandardPackage(esSimplePackage.get_id(), esSimplePackage.getReceive_date());
        immediateDataResolver.resolve(standardPackage, data);
        standardPackage.regularRowKey();
        return standardPackage;
    }
}

+ 42 - 0
src/main/java/com/yihu/ehr/resolve/service/resource/stage2/FtpFileService.java

@ -0,0 +1,42 @@
package com.yihu.ehr.resolve.service.resource.stage2;
import com.yihu.ehr.resolve.model.stage1.LinkPackage;
import com.yihu.ehr.util.ftp.FtpUtils;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.stereotype.Service;
import java.util.List;
import java.util.Map;
@Service
public class FtpFileService {
    @Value("${ftp.address}")
    private String address;
    @Value("${ftp.username}")
    private String username;
    @Value("${ftp.password}")
    private String password;
    @Value("${ftp.port}")
    private int port;
    public void deleteFile(LinkPackage pack){
        Map<String, List<String>> files = pack.getFiles();
        if(files !=null && files.size()>0){
            FtpUtils ftpUtils = null;
            try{
                ftpUtils = new FtpUtils(username, password, address, port);
                ftpUtils.connect();
                for(String path: files.keySet()){//key值为path
                    List<String> fileNames = files.get(path);//文件名
                    ftpUtils.deleteFile(path,fileNames);
                }
            } finally {
                if(ftpUtils != null){
                    ftpUtils.closeConnect();
                }
            }
        }
    }
}

+ 103 - 0
src/main/java/com/yihu/ehr/resolve/service/resource/stage2/IdentifyService.java

@ -0,0 +1,103 @@
package com.yihu.ehr.resolve.service.resource.stage2;
import com.yihu.ehr.entity.patient.DemographicInfo;
import com.yihu.ehr.profile.family.ResourceCells;
import com.yihu.ehr.resolve.model.stage1.OriginalPackage;
import com.yihu.ehr.resolve.model.stage2.ResourceBucket;
import com.yihu.ehr.resolve.service.profile.ArchiveRelationService;
import com.yihu.ehr.util.datetime.DateTimeUtil;
import com.yihu.ehr.util.validate.IdCardValidator;
import org.apache.commons.lang3.StringUtils;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
import java.util.Date;
import java.util.List;
import java.util.UUID;
import java.util.regex.Pattern;
/**
 * Created by progr1mmer on 2018/4/3.
 */
@Service
public class IdentifyService {
    private static final IdCardValidator idCardValidator = new IdCardValidator();
    private static final Pattern pattern = Pattern.compile("^[A-Za-z0-9\\-]+$");
    @Autowired
    private ArchiveRelationService archiveRelationService;
    @Autowired
    private PatientService patientService;
    public void identify (ResourceBucket resourceBucket, OriginalPackage originalPackage) throws Exception {
        boolean identify = false;
        String demographicId = UUID.randomUUID().toString();
        if (StringUtils.isEmpty(resourceBucket.getBasicRecord(ResourceCells.DEMOGRAPHIC_ID)) || !pattern.matcher(resourceBucket.getBasicRecord(ResourceCells.DEMOGRAPHIC_ID)).find()) {
            boolean recognition = false;
            if (!StringUtils.isEmpty(resourceBucket.getBasicRecord(ResourceCells.CARD_ID))) {
                List<String> idCardNos = archiveRelationService.findIdCardNoByCardNo(resourceBucket.getBasicRecord(ResourceCells.CARD_ID));
                if (!idCardNos.isEmpty()) {
                    recognition = true;
                    demographicId = idCardNos.get(0);
                }
            }
            if (!recognition) {
                String random = UUID.randomUUID().toString();
                //姓名
                String name = StringUtils.isNotEmpty(resourceBucket.getBasicRecord(ResourceCells.PATIENT_NAME)) ?
                        resourceBucket.getBasicRecord(ResourceCells.PATIENT_NAME) : random;
                //生日
                Date birthday = StringUtils.isNotEmpty(resourceBucket.getMasterRecord().getResourceValue("EHR_000007")) ?
                        DateTimeUtil.simpleDateParse(resourceBucket.getMasterRecord().getResourceValue("EHR_000007")) : new Date();
                //手机号码
                String telephoneNo = StringUtils.isNotEmpty(resourceBucket.getMasterRecord().getResourceValue("EHR_000003")) ?
                        resourceBucket.getMasterRecord().getResourceValue("EHR_000003") : random;
                telephoneNo = "{\"联系电话\":\"" + telephoneNo + "\"}";
                //性别
                String gender = StringUtils.isNotEmpty(resourceBucket.getMasterRecord().getResourceValue("EHR_000019")) ?
                        resourceBucket.getMasterRecord().getResourceValue("EHR_000019") : random;
                //家庭住址
                String homeAddress = StringUtils.isNotEmpty(resourceBucket.getMasterRecord().getResourceValue("EHR_001227")) ?
                        resourceBucket.getMasterRecord().getResourceValue("EHR_001227") : random;
                List<DemographicInfo> demographicInfoList = patientService.findByNameOrBirthdayOrTelephoneNo(name, birthday, telephoneNo);
                if (!demographicInfoList.isEmpty()) {
                    for (DemographicInfo demographicInfo : demographicInfoList) {
                        int match = 0;
                        if (name.equals(demographicInfo.getName())) {
                            match ++;
                        }
                        if (demographicInfo.getBirthday() != null && birthday.getTime() == demographicInfo.getBirthday().getTime()) {
                            match ++;
                        }
                        if (telephoneNo.equals(demographicInfo.getTelephoneNo())) {
                            match ++;
                        }
                        if (gender.equals(demographicInfo.getGender())) {
                            match ++;
                        }
                        if (homeAddress.equals(demographicInfo.getHomeAddress())) {
                            match ++;
                        }
                        if (match >= 3) {
                            demographicId = demographicInfo.getIdCardNo();
                            break;
                        }
                    }
                }
            }
        } else {
            demographicId = resourceBucket.getBasicRecord(ResourceCells.DEMOGRAPHIC_ID);
        }
        if (demographicId.length() == 18) {
            identify = idCardValidator.is18Idcard(demographicId);
        }
        if (demographicId.length() == 15) {
            identify = idCardValidator.is15Idcard(demographicId);
        }
        originalPackage.setIdentifyFlag(identify);
        resourceBucket.insertBasicRecord(ResourceCells.DEMOGRAPHIC_ID, demographicId);
    }
}

+ 420 - 0
src/main/java/com/yihu/ehr/resolve/service/resource/stage2/PackMillService.java

@ -0,0 +1,420 @@
package com.yihu.ehr.resolve.service.resource.stage2;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.node.ArrayNode;
import com.fasterxml.jackson.databind.node.ObjectNode;
import com.yihu.ehr.lang.SpringContext;
import com.yihu.ehr.profile.ErrorType;
import com.yihu.ehr.profile.ProfileType;
import com.yihu.ehr.profile.core.ResourceCore;
import com.yihu.ehr.profile.exception.IllegalJsonDataException;
import com.yihu.ehr.profile.exception.ResolveException;
import com.yihu.ehr.profile.family.ResourceCells;
import com.yihu.ehr.profile.family.ResourceFamily;
import com.yihu.ehr.profile.util.DataSetUtil;
import com.yihu.ehr.profile.model.MetaDataRecord;
import com.yihu.ehr.profile.model.PackageDataSet;
import com.yihu.ehr.resolve.log.PackResolveLogger;
import com.yihu.ehr.resolve.model.stage1.FilePackage;
import com.yihu.ehr.resolve.model.stage1.LinkPackage;
import com.yihu.ehr.resolve.model.stage1.OriginalPackage;
import com.yihu.ehr.resolve.model.stage1.StandardPackage;
import com.yihu.ehr.resolve.model.stage1.details.CdaDocument;
import com.yihu.ehr.resolve.model.stage1.details.LinkFile;
import com.yihu.ehr.resolve.model.stage1.details.OriginFile;
import com.yihu.ehr.resolve.model.stage2.*;
import com.yihu.ehr.util.datetime.DateTimeUtil;
import com.yihu.ehr.util.datetime.DateUtil;
import net.lingala.zip4j.exception.ZipException;
import org.apache.commons.lang3.StringUtils;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
import java.text.DateFormat;
import java.text.SimpleDateFormat;
import java.util.*;
/**
 * 档案包压碎机,将档案数据包压碎成资源点。
 * @author Sand
 * @created 2016.05.16 13:51
 */
@Service
public class PackMillService {
    private static final DateFormat DATE_FORMAT = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");
    @Autowired
    private RedisService redisService;
    @Autowired
    private ObjectMapper objectMapper;
    /**
     *
     * @param originalPackage
     * @return
     * @throws Exception
     */
    public ResourceBucket grindingPackModel(OriginalPackage originalPackage) throws  Exception{
        ResourceBucket resourceBucket = initBasicData(originalPackage);
        //获取数据集的集合
        Map<String, PackageDataSet> packageDataSets = originalPackage.getDataSets();
        for (String dataSetCode : packageDataSets.keySet()) {
            //如果为原始数据集,则跳过
            if (DataSetUtil.isOriginDataSet(dataSetCode)){
                continue;
            }
            //初始化基本字段
            PackageDataSet srcDataSet = packageDataSets.get(dataSetCode);
            Boolean isMultiRecord = redisService.getDataSetMultiRecord(srcDataSet.getCdaVersion(), srcDataSet.getCode());
            if (null == isMultiRecord) {
                throw new ResolveException(srcDataSet.getCode() + " is_multi_record can not be null for std version " + srcDataSet.getCdaVersion());
            }
            Set<String> keys = srcDataSet.getRecordKeys();
            Set<String> existSet = new HashSet<>();
            if (!isMultiRecord){
                MasterRecord masterRecord = resourceBucket.getMasterRecord();
                for (String key : keys){
                    MetaDataRecord metaDataRecord = srcDataSet.getRecord(key);
                    for (String srcMetadataCode : metaDataRecord.getMetaDataCodes()){
                        //通过标准数据元编码(例如HDSA00_01_012)获取资源化数据元ID(EHR_XXXXXX)
                        String resMetadata = getResMetadata(
                                originalPackage.getCdaVersion(),
                                srcDataSet.getCode(),
                                srcMetadataCode,
                                resourceBucket,
                                metaDataRecord.getMetaData(srcMetadataCode),
                                originalPackage.getProfileType(),
                                existSet
                        );
                        if (StringUtils.isEmpty(resMetadata)){
                            continue;
                        }
                        //masterRecord.addResource(resourceMetaData, metaDataRecord.getMetaData(metaDataCode));
                        dictTransform(
                                masterRecord,
                                originalPackage.getCdaVersion(),
                                resMetadata,
                                metaDataRecord.getMetaData(srcMetadataCode),
                                srcDataSet.getCode(),
                                srcMetadataCode
                        );
                    }
                    //仅一条记录
                    break;
                }
            } else {
                Integer index = 0;
                char cIndex = 'a';
                for (String key : keys){
                    SubRecord subRecord = new SubRecord();
                    if (originalPackage.getProfileType() == ProfileType.Simple){
                        subRecord.setRowkey(originalPackage.getId(), srcDataSet.getCode(), srcDataSet.getPk());
                    } else {
                        if (originalPackage.isReUploadFlg()) {
                            subRecord.setRowkey(originalPackage.getId(), srcDataSet.getCode(), ("") + (cIndex ++));
                        } else {
                            subRecord.setRowkey(originalPackage.getId(), srcDataSet.getCode(), index ++);
                        }
                    }
                    MetaDataRecord metaDataRecord = srcDataSet.getRecord(key);
                    for (String srcMetadataCode : metaDataRecord.getMetaDataCodes()){
                        String resMetadata = getResMetadata(
                                originalPackage.getCdaVersion(),
                                srcDataSet.getCode(),
                                srcMetadataCode,
                                resourceBucket,
                                metaDataRecord.getMetaData(srcMetadataCode),
                                originalPackage.getProfileType(),
                                existSet
                        );
                        if (StringUtils.isEmpty(resMetadata)) {
                            continue;
                        }
                        //subRecord.addResource(resourceMetaData, metaDataRecord.getMetaData(metaDataCode));
                        dictTransform(
                                subRecord,
                                originalPackage.getCdaVersion(),
                                resMetadata,
                                metaDataRecord.getMetaData(srcMetadataCode),
                                srcDataSet.getCode(),
                                srcMetadataCode
                        );
                    }
                    if (subRecord.getDataGroup().size() > 0) {
                        resourceBucket.getSubRecords().add(subRecord);
                    }
                }
            }
        }
        if (originalPackage.getProfileType() == ProfileType.File || originalPackage.getProfileType() == ProfileType.Link) {
            resourceBucket.insertBasicRecord(ResourceCells.SUB_ROWKEYS, objectMapper.writeValueAsString(resourceBucket.getSubRowkeys()));
        }
        return resourceBucket;
    }
    /**
     * 生成基本资源包
     * @param originalPackage
     * @return
     */
    private ResourceBucket initBasicData (OriginalPackage originalPackage) throws Exception {
        ResourceBucket resourceBucket;
        if (originalPackage.getProfileType() == ProfileType.Standard) {
            resourceBucket = new ResourceBucket(
                    originalPackage.getId(),
                    originalPackage.getPackId(),
                    originalPackage.getReceiveDate(),
                    ResourceCore.MasterTable,
                    ResourceCore.SubTable,
                    ResourceFamily.Basic,
                    ResourceFamily.Data
            );
            //基本字段
            StandardPackage standardPackage = (StandardPackage) originalPackage;
            resourceBucket.insertBasicRecord(ResourceCells.PROFILE_TYPE, standardPackage.getProfileType().toString());
            resourceBucket.insertBasicRecord(ResourceCells.EVENT_NO, standardPackage.getEventNo());
            resourceBucket.insertBasicRecord(ResourceCells.EVENT_DATE, DateTimeUtil.utcDateTimeFormat(standardPackage.getEventTime()));
            resourceBucket.insertBasicRecord(ResourceCells.EVENT_TYPE, standardPackage.getEventType() == null ? "" : Integer.toString(standardPackage.getEventType().ordinal()));
            resourceBucket.insertBasicRecord(ResourceCells.CARD_ID, standardPackage.getCardId());
            resourceBucket.insertBasicRecord(ResourceCells.CARD_TYPE, standardPackage.getCardType());
            resourceBucket.insertBasicRecord(ResourceCells.PATIENT_ID, standardPackage.getPatientId());
            resourceBucket.insertBasicRecord(ResourceCells.PATIENT_NAME, standardPackage.getPatientName());
            resourceBucket.insertBasicRecord(ResourceCells.PATIENT_AGE, standardPackage.getPatientAge());
            resourceBucket.insertBasicRecord(ResourceCells.PATIENT_SEX, standardPackage.getPatientSex());
            resourceBucket.insertBasicRecord(ResourceCells.DEMOGRAPHIC_ID, standardPackage.getDemographicId());
            resourceBucket.insertBasicRecord(ResourceCells.ORG_CODE, standardPackage.getOrgCode());
            String orgName = redisService.getOrgName(standardPackage.getOrgCode());
            if (StringUtils.isEmpty(orgName)) {
                throw new ResolveException("can not get org name for code " + standardPackage.getOrgCode());
            }
            resourceBucket.insertBasicRecord(ResourceCells.ORG_NAME, orgName);
            String orgArea = redisService.getOrgArea(standardPackage.getOrgCode());
            if (StringUtils.isEmpty(orgName)) {
                throw new ResolveException("can not get org area for code " + standardPackage.getOrgCode());
            }
            resourceBucket.insertBasicRecord(ResourceCells.ORG_AREA, orgArea);
            resourceBucket.insertBasicRecord(ResourceCells.CDA_VERSION, standardPackage.getCdaVersion());
            resourceBucket.insertBasicRecord(ResourceCells.CREATE_DATE, DateTimeUtil.utcDateTimeFormat(new Date()));
            resourceBucket.insertBasicRecord(ResourceCells.DEPT_CODE, standardPackage.getDeptCode());
            //门诊/住院健康问题
            if (!standardPackage.getDiagnosisCode().isEmpty()) {
                Set<String> healthProblem = new HashSet<>();
                Set<String> healthProblemName = new HashSet<>();
                standardPackage.getDiagnosisCode().forEach(item -> {
                    String _healthProblem = redisService.getHpCodeByIcd10(item);//通过ICD10获取健康问题
                    if (!StringUtils.isEmpty(_healthProblem)) {
                        String [] hpCodes = _healthProblem.split(";");
                        for (String hpCode : hpCodes) {
                            healthProblem.add(hpCode);
                            healthProblemName.add(redisService.getHealthProblem(hpCode));
                        }
                    }
                });
                resourceBucket.insertBasicRecord(ResourceCells.DIAGNOSIS, StringUtils.join(standardPackage.getDiagnosisCode(), ";"));
                resourceBucket.insertBasicRecord(ResourceCells.DIAGNOSIS_NAME, StringUtils.join(standardPackage.getDiagnosisName(), ";"));
                resourceBucket.insertBasicRecord(ResourceCells.HEALTH_PROBLEM, StringUtils.join(healthProblem, ";"));//健康问题
                resourceBucket.insertBasicRecord(ResourceCells.HEALTH_PROBLEM_NAME, StringUtils.join(healthProblemName, ";"));//健康问题名称
            }
            return resourceBucket;
        } else if (originalPackage.getProfileType() == ProfileType.File) {
            resourceBucket = new ResourceBucket(
                    originalPackage.getId(),
                    originalPackage.getPackId(),
                    originalPackage.getReceiveDate(),
                    ResourceCore.FileMasterTable,
                    ResourceCore.FileSubTable,
                    ResourceFamily.Basic,
                    ResourceFamily.Data
            );
            //基本字段
            FilePackage filePackage = (FilePackage) originalPackage;
            resourceBucket.insertBasicRecord(ResourceCells.PROFILE_TYPE, filePackage.getProfileType().toString());
            resourceBucket.insertBasicRecord(ResourceCells.EVENT_NO, filePackage.getEventNo());
            resourceBucket.insertBasicRecord(ResourceCells.EVENT_DATE, DateTimeUtil.utcDateTimeFormat(filePackage.getEventTime()));
            resourceBucket.insertBasicRecord(ResourceCells.EVENT_TYPE, filePackage.getEventType() == null ? "" : Integer.toString(filePackage.getEventType().ordinal()));
            resourceBucket.insertBasicRecord(ResourceCells.PATIENT_ID, filePackage.getPatientId());
            resourceBucket.insertBasicRecord(ResourceCells.DEMOGRAPHIC_ID, filePackage.getDemographicId());
            resourceBucket.insertBasicRecord(ResourceCells.ORG_CODE, filePackage.getOrgCode());
            resourceBucket.insertBasicRecord(ResourceCells.ORG_NAME, redisService.getOrgName(filePackage.getOrgCode()));
            resourceBucket.insertBasicRecord(ResourceCells.ORG_AREA, redisService.getOrgArea(filePackage.getOrgCode()));
            resourceBucket.insertBasicRecord(ResourceCells.CDA_VERSION, filePackage.getCdaVersion());
            resourceBucket.insertBasicRecord(ResourceCells.CREATE_DATE, DateTimeUtil.utcDateTimeFormat(new Date()));
            resourceBucket.insertBasicRecord(ResourceCells.PATIENT_NAME, filePackage.getPatientName());
            resourceBucket.insertBasicRecord(ResourceCells.DEMOGRAPHIC_ID, filePackage.getDemographicId());
            ArrayNode root = objectMapper.createArrayNode();
            Map<String, CdaDocument> cdaDocuments = filePackage.getCdaDocuments();
            cdaDocuments.keySet().forEach(item -> {
                CdaDocument cdaDocument = cdaDocuments.get(item);
                for (OriginFile originFile : cdaDocument.getOriginFiles()) {
                    ObjectNode subNode = root.addObject();
                    subNode.put("mime", originFile.getMime());
                    subNode.put("url", originFile.getUrlsStr());
                    String name = originFile.getUrlScope()==null ? "":originFile.getUrlScope().name();
                    subNode.put("url_score", name);
                    subNode.put("emr_id", originFile.getEmrId());
                    subNode.put("emr_name", originFile.getEmrName());
                    subNode.put("expire_date", originFile.getExpireDate()== null ? "" : DateTimeUtil.utcDateTimeFormat(originFile.getExpireDate()));
                    subNode.put("note", originFile.getNote());
                    StringBuilder builder = new StringBuilder();
                    for (String fileName : originFile.getFileUrls().keySet()){
                        builder.append(fileName).append(":").append(originFile.getFileUrls().get(fileName)).append(";");
                    }
                    subNode.put("files", builder.toString());
                    subNode.put("cda_document_id", cdaDocument.getId());
                    subNode.put("cda_document_name", cdaDocument.getName());
                }
            });
            resourceBucket.insertBasicRecord(ResourceCells.FILE_LIST, root.toString());
            return resourceBucket;
        } else if (originalPackage.getProfileType() == ProfileType.Link) {
            resourceBucket = new ResourceBucket(
                    originalPackage.getId(),
                    originalPackage.getPackId(),
                    originalPackage.getReceiveDate(),
                    ResourceCore.FileMasterTable,
                    ResourceCore.FileSubTable,
                    ResourceFamily.Basic,
                    ResourceFamily.Data
            );
            //基本字段
            LinkPackage linkPackage = (LinkPackage) originalPackage;
            resourceBucket.insertBasicRecord(ResourceCells.PROFILE_TYPE, linkPackage.getProfileType().toString());
            resourceBucket.insertBasicRecord(ResourceCells.EVENT_NO, linkPackage.getEventNo());
            resourceBucket.insertBasicRecord(ResourceCells.EVENT_DATE, DateTimeUtil.utcDateTimeFormat(linkPackage.getEventTime()));
            resourceBucket.insertBasicRecord(ResourceCells.EVENT_TYPE, linkPackage.getEventType() == null ? "" : Integer.toString(linkPackage.getEventType().ordinal()));
            resourceBucket.insertBasicRecord(ResourceCells.PATIENT_ID, linkPackage.getPatientId());
            resourceBucket.insertBasicRecord(ResourceCells.ORG_CODE, linkPackage.getOrgCode());
            resourceBucket.insertBasicRecord(ResourceCells.ORG_NAME, redisService.getOrgName(linkPackage.getOrgCode()));
            resourceBucket.insertBasicRecord(ResourceCells.ORG_AREA, redisService.getOrgArea(linkPackage.getOrgCode()));
            resourceBucket.insertBasicRecord(ResourceCells.CDA_VERSION, linkPackage.getCdaVersion());
            resourceBucket.insertBasicRecord(ResourceCells.CREATE_DATE, DateTimeUtil.utcDateTimeFormat(new Date()));
            resourceBucket.insertBasicRecord(ResourceCells.PATIENT_NAME, linkPackage.getPatientName());
            resourceBucket.insertBasicRecord(ResourceCells.DEMOGRAPHIC_ID, linkPackage.getDemographicId());
            ArrayNode root = objectMapper.createArrayNode();
            linkPackage.getLinkFiles().forEach(item -> {
                ObjectNode subNode = root.addObject();
                subNode.put("file_extension", item.getFileExtension());
                subNode.put("origin_name", item.getOriginName());
                subNode.put("report_form_no", item.getReportFormNo());
                subNode.put("serial_no", item.getSerialNo());
                subNode.put("file_size", item.getFileSize());
                subNode.put("url", item.getUrl());
            });
            resourceBucket.insertBasicRecord(ResourceCells.FILE_LIST, root.toString());
            return resourceBucket;
        } else if (originalPackage.getProfileType() == ProfileType.Simple) {
            resourceBucket = new ResourceBucket(
                    originalPackage.getId(),
                    originalPackage.getPackId(),
                    originalPackage.getReceiveDate(),
                    ResourceCore.FileMasterTable,
                    ResourceCore.FileSubTable,
                    ResourceFamily.Basic,
                    ResourceFamily.Data
            );
            return resourceBucket;
        }
        throw new ZipException("Invalid zip file structure");
    }
    /**
     * 对数据元资源化处理。其原理是根据映射关系,将数据元映射到资源中。
     * @param cdaVersion 版本号
     * @param srcDataSetCode 标准数据集编码
     * @param srcMetadataCode 标准数据元编码
     * @param resourceBucket 数据包
     * @param value 值
     * @param profileType 档案类型
     * @param existSet 已质控数据记录
     * @return
     */
     protected String getResMetadata(String cdaVersion,
                                     String srcDataSetCode,
                                     String srcMetadataCode,
                                     ResourceBucket resourceBucket,
                                     String value,
                                     ProfileType profileType,
                                     Set<String> existSet){
         // TODO: 翻译时需要的内容:对CODE与VALUE处理后再翻译
         if ("rBUSINESS_DATE".equals(srcMetadataCode)) {
             return null;
         }
         String resMetadata = redisService.getRsAdapterMetaData(cdaVersion, srcDataSetCode, srcMetadataCode);
         if (!StringUtils.isEmpty(resMetadata)) {
             return resMetadata;
         }
         //日志
         PackResolveLogger.warn(String.format("Unable to get resource meta data code for ehr meta data %s of %s in %s", srcMetadataCode, srcDataSetCode, cdaVersion));
         if (profileType == ProfileType.Standard && !existSet.contains(srcDataSetCode + "$" + srcMetadataCode)) {
             //质控数据
             Map<String, Object> qcMetadataRecord = new HashMap<>();
             StringBuilder _id = new StringBuilder();
             _id.append(resourceBucket.getPackId())
                     .append("$")
                     .append(srcDataSetCode)
                     .append("$")
                     .append(srcMetadataCode);
             qcMetadataRecord.put("_id", _id.toString());
             qcMetadataRecord.put("pack_id", resourceBucket.getPackId());
             qcMetadataRecord.put("patient_id", resourceBucket.getBasicRecord(ResourceCells.PATIENT_ID));
             qcMetadataRecord.put("org_code", resourceBucket.getBasicRecord(ResourceCells.ORG_CODE));
             qcMetadataRecord.put("org_name", resourceBucket.getBasicRecord(ResourceCells.ORG_NAME));
             qcMetadataRecord.put("org_area", resourceBucket.getBasicRecord(ResourceCells.ORG_AREA));
             qcMetadataRecord.put("dept", resourceBucket.getBasicRecord(ResourceCells.DEPT_CODE));
             qcMetadataRecord.put("diagnosis_name", resourceBucket.getBasicRecord(ResourceCells.DIAGNOSIS_NAME));
             qcMetadataRecord.put("event_date", DateUtil.toStringLong(DateUtil.strToDate(resourceBucket.getBasicRecord(ResourceCells.EVENT_DATE))));
             qcMetadataRecord.put("event_type", resourceBucket.getBasicRecord(ResourceCells.EVENT_TYPE) == "" ? -1 : new Integer(resourceBucket.getBasicRecord(ResourceCells.EVENT_TYPE)));
             qcMetadataRecord.put("event_no", resourceBucket.getBasicRecord(ResourceCells.EVENT_NO));
             qcMetadataRecord.put("receive_date", DATE_FORMAT.format(resourceBucket.getReceiveDate()));
             qcMetadataRecord.put("version", cdaVersion);
             qcMetadataRecord.put("dataset", srcDataSetCode);
             qcMetadataRecord.put("metadata", srcMetadataCode);
             qcMetadataRecord.put("value", value);
             qcMetadataRecord.put("qc_step", 2); //资源化质控环节
             qcMetadataRecord.put("qc_error_type", ErrorType.FieldAdaptationError.getType()); //资源适配错误
             qcMetadataRecord.put("qc_error_name", ErrorType.FieldAdaptationError.getName()); //资源适配错误
             qcMetadataRecord.put("qc_error_message", String.format("Unable to get resource meta data code for ehr meta data %s of %s in %s", srcMetadataCode, srcDataSetCode, cdaVersion));
             qcMetadataRecord.put("create_date", DATE_FORMAT.format(new Date()));
             resourceBucket.getQcMetadataRecords().addRecord(qcMetadataRecord);
             existSet.add(srcDataSetCode + "$" + srcMetadataCode);
         }
         return null;
     }
    /**
     *
     * @param dataRecord 数据
     * @param cdaVersion 版本号
     * @param metadataId 资源化编码
     * @param value 值
     * @param srcDataSetCode 标准数据集编码
     * @param srcMetadataCode 标准数据元编码
     * @throws Exception
     */
    protected void dictTransform(ResourceRecord dataRecord,
                                 String cdaVersion,
                                 String metadataId,
                                 String value,
                                 String srcDataSetCode,
                                 String srcMetadataCode) throws Exception {
        //查询是否有对应字典ID
        String dictId = redisService.getMetaDataDict(cdaVersion, srcDataSetCode, srcMetadataCode);
        //字典ID不为空且原始值不为空的情况下
        if (StringUtils.isNotBlank(dictId) && StringUtils.isNotBlank(value)) {
            //查找对应的字典值
            String _value = redisService.getDictEntryValue(cdaVersion, dictId, value);
            //对应字典值不为空的情况下,保存原始值和字典值
            if (StringUtils.isNotBlank(_value)) {
                //保存标准字典值编码(code)
                dataRecord.addResource(metadataId, value);
                //保存标准字典值名称(value)
                dataRecord.addResource(metadataId + "_VALUE", _value);
            } else {
                dataRecord.addResource(metadataId, value);
            }
        } else {
            dataRecord.addResource(metadataId, value);
        }
    }
}

+ 92 - 0
src/main/java/com/yihu/ehr/resolve/service/resource/stage2/PatientService.java

@ -0,0 +1,92 @@
package com.yihu.ehr.resolve.service.resource.stage2;
import com.yihu.ehr.entity.patient.DemographicInfo;
import com.yihu.ehr.profile.family.ResourceCells;
import com.yihu.ehr.query.BaseJpaService;
import com.yihu.ehr.resolve.dao.PatientDao;
import com.yihu.ehr.resolve.model.stage2.ResourceBucket;
import com.yihu.ehr.util.datetime.DateTimeUtil;
import org.apache.commons.codec.digest.DigestUtils;
import org.hibernate.FlushMode;
import org.hibernate.Query;
import org.hibernate.Session;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Transactional;
import java.math.BigInteger;
import java.util.Date;
import java.util.List;
/**
 * Service - 居民信息注册
 * Created by progr1mmer on 2017/12/10.
 */
@Service
@Transactional
public class PatientService extends BaseJpaService<DemographicInfo, PatientDao>{
    @Autowired
    private PatientDao patientDao;
    public void checkPatient(ResourceBucket resourceBucket) throws Exception {
        //获取注册信息
        String idCardNo = resourceBucket.getBasicRecord(ResourceCells.DEMOGRAPHIC_ID).trim();
        boolean isRegistered = this.isExists(idCardNo);
        if (!isRegistered) {
            DemographicInfo demographicInfo = new DemographicInfo();
            demographicInfo.setIdCardNo(idCardNo);
            String name = resourceBucket.getBasicRecord(ResourceCells.PATIENT_NAME) == null ? "" : resourceBucket.getBasicRecord(ResourceCells.PATIENT_NAME);
            demographicInfo.setName(name);
            String telephoneNo = resourceBucket.getMasterRecord().getResourceValue("EHR_000003") == null ? "" : resourceBucket.getMasterRecord().getResourceValue("EHR_000003");
            demographicInfo.setTelephoneNo("{\"联系电话\":\"" + telephoneNo + "\"}");
            String email = resourceBucket.getMasterRecord().getResourceValue("EHR_000008") == null ? "" : resourceBucket.getMasterRecord().getResourceValue("EHR_000008");
            demographicInfo.setEmail(email);
            String birthPlace = resourceBucket.getMasterRecord().getResourceValue("EHR_000013") == null ? "" : resourceBucket.getMasterRecord().getResourceValue("EHR_000013");
            demographicInfo.setBirthPlace(birthPlace);
            String martialStatus = resourceBucket.getMasterRecord().getResourceValue("EHR_000014") == null ? "" : resourceBucket.getMasterRecord().getResourceValue("EHR_000014");
            demographicInfo.setMartialStatus(martialStatus);
            String nativePlace = resourceBucket.getMasterRecord().getResourceValue("EHR_000015") == null ? "" : resourceBucket.getMasterRecord().getResourceValue("EHR_000015");
            demographicInfo.setNativePlace(nativePlace);
            String nation = resourceBucket.getMasterRecord().getResourceValue("EHR_000016") ==  null ? "" : resourceBucket.getMasterRecord().getResourceValue("EHR_000016");
            demographicInfo.setNation(nation);
            String gender = resourceBucket.getMasterRecord().getResourceValue("EHR_000019") == null ? "0" : resourceBucket.getMasterRecord().getResourceValue("EHR_000019");
            demographicInfo.setGender(gender);
            Date birthday = resourceBucket.getMasterRecord().getResourceValue("EHR_000007") == null ? null : DateTimeUtil.simpleDateParse(resourceBucket.getMasterRecord().getResourceValue("EHR_000007"));
            demographicInfo.setBirthday(birthday);
            String homeAddress = resourceBucket.getMasterRecord().getResourceValue("EHR_001227") == null ? "" : resourceBucket.getMasterRecord().getResourceValue("EHR_001227");
            demographicInfo.setHomeAddress(homeAddress);
            //注册
            this.registered(demographicInfo);
        }
    }
    @Transactional(readOnly = true)
    private boolean isExists(String idCardNo) {
        Session session = currentSession();
        String sql = "SELECT COUNT(1) FROM demographics WHERE id = :id";
        Query query = session.createSQLQuery(sql);
        query.setFlushMode(FlushMode.COMMIT);
        query.setString("id", idCardNo);
        BigInteger count = (BigInteger) query.uniqueResult();
        return count.compareTo(new BigInteger("0")) > 0;
    }
    private void registered(DemographicInfo demographicInfo) {
        String password = "12345678";
        if (demographicInfo.getIdCardNo().length() > 9) {
            password = demographicInfo.getIdCardNo().substring(demographicInfo.getIdCardNo().length() - 8);
            demographicInfo.setPassword(DigestUtils.md5Hex(password));
        } else {
            demographicInfo.setPassword(DigestUtils.md5Hex(password));
        }
        demographicInfo.setRegisterTime(new Date());
        patientDao.save(demographicInfo);
    }
    @Transactional(readOnly = true)
    public List<DemographicInfo> findByNameOrBirthdayOrTelephoneNo(String name, Date birthday, String telephoneNo) {
        return patientDao.findByNameOrBirthdayOrTelephoneNo(name, birthday, telephoneNo);
    }
}

+ 22 - 0
src/main/java/com/yihu/ehr/resolve/service/resource/stage2/QcRecordService.java

@ -0,0 +1,22 @@
package com.yihu.ehr.resolve.service.resource.stage2;
import com.yihu.ehr.elasticsearch.ElasticSearchUtil;
import com.yihu.ehr.resolve.model.stage2.ResourceBucket;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
/**
 * Created by progr1mmer on 2018/5/23.
 */
@Service
public class QcRecordService {
    @Autowired
    private ElasticSearchUtil elasticSearchUtil;
    public void record(ResourceBucket resourceBucket) throws Exception {
        elasticSearchUtil.bulkIndex("json_archives_qc", "qc_metadata_info", resourceBucket.getQcMetadataRecords().getRecords());
    }
}

+ 100 - 0
src/main/java/com/yihu/ehr/resolve/service/resource/stage2/RedisService.java

@ -0,0 +1,100 @@
package com.yihu.ehr.resolve.service.resource.stage2;
import com.yihu.ehr.redis.schema.*;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
/**
 * Redis管理
 * @author hzp add at 20170425
 */
@Service
public class RedisService {
    @Autowired
    private HealthProblemDictKeySchema healthProblemDictKeySchema;
    @Autowired
    private Icd10KeySchema icd10KeySchema;
    @Autowired
    private OrgKeySchema orgKeySchema;
    @Autowired
    private RsAdapterMetaKeySchema rsAdapterMetaKeySchema;
    @Autowired
    private StdDataSetKeySchema stdDataSetKeySchema;
    @Autowired
    private StdMetaDataKeySchema stdMetaDataKeySchema;
    /**
     * 获取健康问题redis
     *
     * @return
     */
    public String getHealthProblem(String key) {
        return healthProblemDictKeySchema.get(key);
    }
    /**
     *获取ICD10对应健康问题 redis
     */
    public String getHpCodeByIcd10(String key) {
        return icd10KeySchema.getHpCode(key);
    }
    /**
     * 获取ICD10慢病信息
     * @param key
     * @return
     */
    public String getChronicInfo(String key) {
        return icd10KeySchema.getChronicInfo(key);
    }
    /**
     *获取机构名称redis
     * @return
     */
    public String getOrgName(String key) {
       return orgKeySchema.get(key);
    }
    /**
     *获取机构区域redis
     * @return
     */
    public String getOrgArea(String key) {
        return orgKeySchema.getOrgArea(key);
    }
    /**
     *获取资源化数据元映射 redis
     * @return
     */
    public String getRsAdapterMetaData(String cdaVersion, String dictCode, String srcDictEntryCode) {
        return rsAdapterMetaKeySchema.getMetaData(cdaVersion, dictCode, srcDictEntryCode);
    }
    /**
     *获取标准数据集--主从表 redis
     */
    public Boolean getDataSetMultiRecord(String version, String code){
        return stdDataSetKeySchema.dataSetMultiRecord(version, code);
    }
    /**
     * 获取标准数据元对应字典 redis
     */
    public String getMetaDataDict(String version, String dataSetCode, String innerCode) {
        return stdMetaDataKeySchema.metaDataDict(version, dataSetCode,innerCode);
    }
    /**
     * 获取标准数据字典对应值 redis
     */
    public String getDictEntryValue(String version, String dictId, String entryCode) {
        return stdMetaDataKeySchema.dictEntryValue(version, dictId , entryCode);
    }
}

+ 66 - 0
src/main/java/com/yihu/ehr/resolve/service/resource/stage2/ResourceService.java

@ -0,0 +1,66 @@
package com.yihu.ehr.resolve.service.resource.stage2;
import com.yihu.ehr.resolve.dao.MasterResourceDao;
import com.yihu.ehr.resolve.dao.SubResourceDao;
import com.yihu.ehr.resolve.model.stage1.LinkPackage;
import com.yihu.ehr.resolve.model.stage1.OriginalPackage;
import com.yihu.ehr.resolve.model.stage2.ResourceBucket;
import com.yihu.ehr.resolve.service.profile.ArchiveRelationService;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
import java.util.List;
import java.util.Map;
/**
 * 资源服务。
 *
 * @author Sand
 * @version 1.0
 * @created 2016.04.15 16:50
 */
@Service
public class ResourceService {
    @Autowired
    private MasterResourceDao masterResRepo;
    @Autowired
    private SubResourceDao subResRepo;
    @Autowired
    private ArchiveRelationService archiveRelationService;
    @Autowired
    private PatientService patientService;
    @Autowired
    private QcRecordService qcRecordService;
    @Autowired
    private FtpFileService ftpFileService;
    public void save(ResourceBucket resourceBucket, OriginalPackage originalPackage) throws Exception {
        //资源主表
        masterResRepo.saveOrUpdate(resourceBucket, originalPackage);
        //资源子表
        subResRepo.saveOrUpdate(resourceBucket, originalPackage);
        //保存ES关联记录
        archiveRelationService.relation(resourceBucket, originalPackage);
        //保存ES质控数据
        qcRecordService.record(resourceBucket);
        //保存居民信息记录
        if (originalPackage.isIdentifyFlag()) {
            patientService.checkPatient(resourceBucket);
        }
        //数据入库后,然后删除ftp上的文件
        if(originalPackage instanceof LinkPackage){
            LinkPackage pack = (LinkPackage) originalPackage;
            ftpFileService.deleteFile(pack);
        }
    }
}

+ 34 - 0
src/main/java/com/yihu/ehr/resolve/service/resource/stage2/RsDictionaryEntryService.java

@ -0,0 +1,34 @@
package com.yihu.ehr.resolve.service.resource.stage2;
import com.yihu.ehr.query.BaseJpaService;
import com.yihu.ehr.resolve.dao.RsDictionaryEntryDao;
import com.yihu.ehr.resolve.model.stage1.RsDictionaryEntry;
import org.hibernate.FlushMode;
import org.hibernate.Query;
import org.hibernate.Session;
import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Transactional;
import java.math.BigInteger;
import java.util.List;
/**
 * @author zdm
 * @created 2018.07.17
 */
@Service
@Transactional
public class RsDictionaryEntryService extends BaseJpaService<RsDictionaryEntry, RsDictionaryEntryDao> {
    @Transactional(readOnly = true)
    public String getRsDictionaryEntryByDictCode(String dictCode, String code) {
        Session session = currentSession();
        String sql = "SELECT NAME FROM rs_dictionary_entry WHERE DICT_CODE=:dictCode AND CODE=:code";
        Query query = session.createSQLQuery(sql);
        query.setFlushMode(FlushMode.COMMIT);
        query.setString("dictCode", dictCode);
        query.setString("code", code);
        return (null== query.uniqueResult()?"": query.uniqueResult().toString());
    }
}

+ 74 - 0
src/main/java/com/yihu/ehr/resolve/service/resource/stage2/StatusReportService.java

@ -0,0 +1,74 @@
package com.yihu.ehr.resolve.service.resource.stage2;
import com.yihu.ehr.elasticsearch.ElasticSearchUtil;
import com.yihu.ehr.profile.ArchiveStatus;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
import java.text.DateFormat;
import java.text.SimpleDateFormat;
import java.util.Date;
import java.util.HashMap;
import java.util.Map;
/**
 * Created by progr1mmer on 2018/6/19.
 */
@Service
public class StatusReportService {
    private static final DateFormat DATE_FORMAT = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");
    private static final String MAIN_INDEX = "json_archives";
    private static final String MAIN_INFO = "info";
    @Autowired
    private ElasticSearchUtil elasticSearchUtil;
    public void reportStatus(String _id, ArchiveStatus archiveStatus, int errorType, String message, Map<String, Object> callback) {
        Map<String, Object> updateSource = new HashMap<>();
        if (archiveStatus == ArchiveStatus.Finished) {
            //入库成功
            updateSource.put("profile_id", callback.get("profile_id"));
            updateSource.put("demographic_id", callback.get("demographic_id"));
            updateSource.put("event_type", callback.get("event_type"));
            updateSource.put("event_no", callback.get("event_no"));
            updateSource.put("event_date", callback.get("event_date"));
            updateSource.put("patient_id", callback.get("patient_id"));
            updateSource.put("dept", callback.get("dept"));
            updateSource.put("delay",  callback.get("delay"));
            updateSource.put("re_upload_flg", callback.get("re_upload_flg"));
            updateSource.put("finish_date", DATE_FORMAT.format(new Date()));
            updateSource.put("resourced", 1);
            updateSource.put("defect", callback.get("defect"));
            updateSource.put("patient_name", callback.get("patient_name"));
        } else if (archiveStatus == ArchiveStatus.Acquired) {
            //开始入库
            updateSource.put("parse_date", DATE_FORMAT.format(new Date()));
        } else {
            //入库失败
            updateSource.put("finish_date", null);
            if (3 <= errorType && errorType <= 7) {
                updateSource.put("fail_count", 3);
            } else {
                Map<String, Object> sourceMap = elasticSearchUtil.findById(MAIN_INDEX, MAIN_INFO, _id);
                if (null == sourceMap) {
                    return;
                }
                if ((int)sourceMap.get("fail_count") < 3) {
                    int failCount = (int)sourceMap.get("fail_count");
                    updateSource.put("fail_count", failCount + 1);
                }
            }
            updateSource.put("resourced", 0);
        }
        updateSource.put("message", message);
        updateSource.put("error_type", errorType);
        updateSource.put("archive_status", archiveStatus.ordinal());
        elasticSearchUtil.voidUpdate(MAIN_INDEX, MAIN_INFO, _id, updateSource);
    }
    public Map<String, Object> getJsonArchiveById(String id){
        return  elasticSearchUtil.findById(MAIN_INDEX, MAIN_INFO, id);
    }
}

+ 77 - 0
src/main/java/com/yihu/ehr/resolve/util/FileTableUtil.java

@ -0,0 +1,77 @@
package com.yihu.ehr.resolve.util;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.node.ArrayNode;
import com.fasterxml.jackson.databind.node.ObjectNode;
import com.yihu.ehr.lang.SpringContext;
import com.yihu.ehr.profile.family.ResourceCells;
import com.yihu.ehr.resolve.model.stage1.LinkPackage;
import com.yihu.ehr.resolve.model.stage1.OriginalPackage;
import com.yihu.ehr.resolve.model.stage1.details.CdaDocument;
import com.yihu.ehr.resolve.model.stage1.details.LinkFile;
import com.yihu.ehr.resolve.model.stage1.details.OriginFile;
import com.yihu.ehr.util.datetime.DateTimeUtil;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
/**
 * @author linaz
 * @created 2016.04.15
 */
@Deprecated
public class FileTableUtil {
    public static Map<String, String> getBasicFamilyCellMap(OriginalPackage originalPackage) {
        Map<String, String> map = new HashMap<>();
        map.put(ResourceCells.PATIENT_ID, originalPackage.getPatientId());
        map.put(ResourceCells.EVENT_NO, originalPackage.getEventNo());
        map.put(ResourceCells.ORG_CODE, originalPackage.getOrgCode());
        return map;
    }
    public static Map<String, String> getFileFamilyCellMap(CdaDocument cdaDocument) {
        ArrayNode root = (SpringContext.getService(ObjectMapper.class)).createArrayNode();
        for (OriginFile originFile : cdaDocument.getOriginFiles()) {
            ObjectNode subNode = root.addObject();
            subNode.put("mime", originFile.getMime());
            subNode.put("urls", originFile.getUrlsStr());
            String name = originFile.getUrlScope()==null ? "":originFile.getUrlScope().name();
            subNode.put("url_score", name);
            subNode.put("emr_id", originFile.getEmrId());
            subNode.put("emr_name", originFile.getEmrName());
            subNode.put("expire_date", originFile.getExpireDate()== null ? "" : DateTimeUtil.utcDateTimeFormat(originFile.getExpireDate()));
            subNode.put("note", originFile.getNote());
            StringBuilder builder = new StringBuilder();
            for (String fileName : originFile.getFileUrls().keySet()){
                builder.append(fileName).append(":").append(originFile.getFileUrls().get(fileName)).append(";");
            }
            subNode.put("files", builder.toString());
        }
        Map<String, String> map = new HashMap<>();
        map.put(ResourceCells.CDA_DOCUMENT_ID, cdaDocument.getId());
        map.put(ResourceCells.CDA_DOCUMENT_NAME, cdaDocument.getName());
        map.put(ResourceCells.FILE_LIST, root.toString());
        return map;
    }
    public static Map<String, String> getFileFamilyCellMap(LinkPackage linkPackage){
        Map<String, String> map = new HashMap<>();
        if (null == linkPackage){
            return map;
        }
        List<LinkFile> linkFiles = linkPackage.getLinkFiles();
        ArrayNode root = (SpringContext.getService(ObjectMapper.class)).createArrayNode();
        for (LinkFile linkFile:linkFiles){
            ObjectNode subNode = root.addObject();
            subNode.put("url", linkFile.getUrl());
            subNode.put("originName", linkFile.getOriginName());
            subNode.put("fileExtension", linkFile.getFileExtension());
            subNode.put("fileSize", linkFile.getFileSize());
            root.add(subNode);
        }
        map.put(ResourceCells.FILE_LIST, root.toString());
        return map;
    }
}

+ 22 - 0
src/main/java/com/yihu/ehr/resolve/util/LocalTempPathUtil.java

@ -0,0 +1,22 @@
package com.yihu.ehr.resolve.util;
import java.util.UUID;
/**
 * Created by progr1mmer on 2018/5/15.
 */
public class LocalTempPathUtil {
    private final static String TEMP_PATH = System.getProperty("java.io.tmpdir") + java.io.File.separator;
    public static String getTempPath() {
        return TEMP_PATH;
    }
    public static String getTempPathWithUUIDSuffix() {
        StringBuilder path = new StringBuilder();
        path.append(TEMP_PATH).append(UUID.randomUUID()).append("_");
        return path.toString();
    }
}

+ 168 - 0
src/main/resources/application.yml

@ -0,0 +1,168 @@
server:
  port: ${svr-pack-resolve.server.port}
info:
  app:
    name: SVR-PACK-RESOLVE
    description: EHR Platform Microservice.
    version: 1.0.0
spring:
  datasource:
    driver-class-name: com.mysql.jdbc.Driver
    max-active: 20
    max-idle: 8
    min-idle: 8
    validation-query: SELECT 1
    test-on-borrow: true
  # REDIS
  redis:
    database: 0 # Database index used by the connection factory.
    port: 6379 # Redis server port.
    password: # Login password of the redis server.
    timeout: 0 # Connection timeout in milliseconds.
    #sentinel:
    #  master: # Name of Redis server.
    #  nodes: # Comma-separated list of host:port pairs.
    pool:
      max-active: 8 # Max number of connections that can be allocated by the pool at a given time. Use a negative value for no limit.
      max-idle: 8 # Max number of "idle" connections in the pool. Use a negative value to indicate an unlimited number of idle connections.
      max-wait: -1 # Maximum amount of time (in milliseconds) a connection allocation should block before throwing an exception when the pool is exhausted. Use a negative value to block indefinitely.
      min-idle: 1 # Target for the minimum number of idle connections to maintain in the pool. This setting only has an effect if it is positive.
fast-dfs:
  connect-timeout: 20
  network-timeout: 60
  charset: ISO8859-1
  pool:
    init-size: 10
    max-size: 20
    wait-time: 500
  http:
    tracker-http-port: 80
    anti-steal-token: no
    secret-key: FastDFS1234567890
hadoop:
  hbase-properties:
    hbase.zookeeper.property.clientPort: 2181
    zookeeper.znode.parent: /hbase-unsecure
  user:
    name: root
resolve:
  job:
    init-size: 10 #解析入库初始任务数
    max-size: 10 #解析入库最大任务数
    cron-exp: 0/1 * * * * ? #解析任务触发间隔表达式
ehr:
  # 档案包数据提取器参数,从数据集中提取摘要、事件时间与身份标识
  pack-extractor:
    # 事件提取参数,用于生成事件摘要
    # 门诊从“门诊摘要”与“挂号”数据集提取事件
    # 住院从“病人摘要”,“入院记录”与“病案首页”提取事件 Clinic 0门诊 Resident 1住院  MedicalExam 2体检
    event:
      data-sets:
        - HDSC01_02: Clinic #门诊-挂号 v1.0
        - HDSD00_85: Clinic #门诊-挂号 v1.3
        - HDSC02_09: Resident #住院-入院记录 v1.0
        - HDSD00_13: Resident #住院-入院记录 v1.3
        - HDSD00_16: Resident #住院-出院小结 v1.3
        - HDSB05_03: MedicalExam #体检-登记信息 v1.3
    #诊断信息
    diagnosis:
      data-sets:
        - HDSC01_03 #门诊-诊断记录 v1.0
        - HDSD00_73 #门诊-诊断记录 v1.3
        - HDSC02_17 #住院-诊断记录 v1.0
        - HDSD00_69 #住院-诊断记录 v1.3
        - HDSB05_84 #体检-诊断记录 v1.3
      code-meta-data:
        - HDSD00_01_550 #门诊-疾病临床诊断在特定分类体系中的代码 v1.0 & v1.3
        - HDSD00_69_002 #住院-疾病临床诊断在特定分类体系中的代码 v1.3
        - JDSB05_84_003 #体检-疾病临床诊断在特定分类体系中的代码 v1.3
      name-meta-data:
        - HDSD00_01_549 #门诊-疾病临床诊断在特定分类体系中的名称 v1.0 & v1.3
        - HDSD00_69_001 #住院-疾病临床诊断在特定分类体系中的名称 v1.3
        - JDSB05_84_004 #体检-疾病临床诊断在特定分类体系中的名称 v1.3
    #身份提取参数,从人口学提取
    identity:
      data-sets:
        - HDSA00_01 #人口学信息 v1.0 & v1.3
      meta-data:
        id-card-no: HDSA00_01_017 #身份证号码 v1.0 & v1.3
        id-card-type: HDSA00_01_016 #身份证类型 v1.0 & v1.3
        patient-name: HDSA00_01_009 #本人姓名 v1.3 (v1.0:HDSD00_01_002)
        patient-sex: HDSA00_01_011 #性别代码
        birthday: HDSA00_01_012 #出生日期
    #卡提取参数,从就诊摘要提取
    card:
      data-sets:
        - HDSC01_02 #门诊-挂号 v1.0
        - HDSD00_85 #门诊-挂号 v1.3
        - HDSC02_09 #住院-入院记录 v1.0
        - HDSD00_13 #住院-入院记录 v1.3
      card-num:
        - JDSD00_85_005 #门诊-就诊卡号 CARD_NUM
        - JDSD00_13_006 #住院-就诊卡号 CARD_NUM
      card-type:
        - JDSD00_85_006 #门诊-就诊卡类型 CARD_TYPE
        - JDSD00_13_007 #住院-就诊卡类型 CARD_TYPE
    #科室信息
    dept:
      data-sets:
        - HDSD00_85 #门诊-挂号 v1.3
        - HDSD00_13 #住院-入院记录 v1.3
      meta-data:
        - JDSD00_85_001 #门诊-就诊科室代码
        - JDSD00_13_004 #住院-入院科室编码
  #病人事件索引信息
  eventIndex:
    patientId:
      - JDSA00_01_001 #v1.3
      - PATIENT_ID
    eventNo:
      - HDSD00_02_040 #门(急)诊号 v1.3
      - HDSD00_02_066 #住院号 v1.3
      - HDSD03_01_031 #门诊号(挂号) v1.0
      - HDSD00_01_579 #住院号v1.0 会诊记录表编号v1.3
      - EVENT_NO
    eventTime:
      - HDSD00_01_457 #就诊日期时间 v1.0 & v1.3
      - HDSD00_16_031 #入院时间 v1.0
      - HDSD00_13_057 #入院日期时间 v1.3
      - HDSD00_01_185 #出院日期 v1.0
      - HDSD00_16_005 #出院日期时间 v1.3
eip:
  schemaVersion: 5a6951bff0bb
---
spring:
  profiles: dev
  datasource:
    url: jdbc:mysql://172.19.103.50:3306/healtharchive?useUnicode=true&characterEncoding=UTF-8&useSSL=false
    username: chenweishan
    password: chenweishan
  redis:
    host: 172.19.103.47 # Redis server host.
    port: 6379
    password: redis!@456
  kafka:
    bootstrap-servers: node4.hde.h3c.com:6667
fast-dfs:
  tracker-server: 172.19.103.13:22122
  public-server: http://172.19.103.52:80
  pacs-group-name: group1 # 影像文件落入到group1上
hadoop:
  hbase-properties:
    hbase.zookeeper.quorum: node1.hde.h3c.com,node2.hde.h3c.com,node3.hde.h3c.com
elasticsearch:
  cluster-name: elasticsearch
  cluster-nodes: 172.19.103.9:9300
ftp:
  address: 172.19.103.58
  username: pacs
  password: pacssr2018
  port: 21

+ 29 - 0
src/main/resources/banner.txt

@ -0,0 +1,29 @@
                                   _oo8oo_
                                  o8888888o
                                  88" . "88
                                  (| -_- |)
                                  0\  =  /0
                                ___/'==='\___
                              .' \\|     |// '.
                             / \\|||  :  |||// \
                            / _||||| -:- |||||_ \
                           |   | \\\  -  /// |   |
                           | \_|  ''\---/''  |_/ |
                           \  .-\__  '-'  __/-.  /
                         ___'. .'  /--.--\  '. .'___
                      ."" '<  '.___\_<|>_/___.'  >' "".
                     | | :  `- \`.:`\ _ /`:.`/ -`  : | |
                     \  \ `-.   \_ __\ /__ _/   .-` /  /
                 =====`-.____`.___ \_____/ ___.`____.-`=====
                                   `=---=`
                ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
                      佛祖保佑     永不宕机     永无Bug
 __   _      ___       ___    __    __    _         ___   ____  __   ___   _     _      ____
( (` \ \  / | |_)     | |_)  / /\  / /`  | |_/     | |_) | |_  ( (` / / \ | |   \ \  / | |_
_)_)  \_\/  |_| \     |_|   /_/--\ \_\_, |_| \     |_| \ |_|__ _)_) \_\_/ |_|__  \_\/  |_|__

+ 23 - 0
src/main/resources/bootstrap.yml

@ -0,0 +1,23 @@
spring:
  application:
    name: svr-pack-resolve
  cloud:
    config:
      username: user
      password: configuration
---
spring:
  profiles: dev
  cloud:
    config:
      uri: ${spring.config.uri:http://172.19.103.73:1221}
      label: ${spring.config.label:dev}
---
spring:
  profiles: prod
  cloud:
    config:
      uri: ${spring.config.uri}
      label: ${spring.config.label}

+ 138 - 0
src/main/resources/logback-spring.xml

@ -0,0 +1,138 @@
<?xml version="1.0" encoding="UTF-8"?>
<configuration scan="true">
	<springProperty scope="context" name="appName" source="spring.application.name"/>
	<property name="log_home" value="/ehr-logs/${appName}" />
	<conversionRule conversionWord="clr" converterClass="org.springframework.boot.logging.logback.ColorConverter" />
	<conversionRule conversionWord="wex" converterClass="org.springframework.boot.logging.logback.WhitespaceThrowableProxyConverter" />
	<conversionRule conversionWord="wEx" converterClass="org.springframework.boot.logging.logback.ExtendedWhitespaceThrowableProxyConverter" />
	<!-- 彩色日志格式 -->
	<property name="CONSOLE_LOG_PATTERN" value="${CONSOLE_LOG_PATTERN:-%clr(%d{yyyy-MM-dd HH:mm:ss.SSS}){faint} %clr([${appName}]){faint} %clr(${LOG_LEVEL_PATTERN:-%5p}) %clr(${PID:- }){magenta} %clr(---){faint} %clr([%15.15t]){faint} %clr(%-40.40logger{39}){cyan} %clr(:){faint} %m%n${LOG_EXCEPTION_CONVERSION_WORD:-%wEx}}" />
	<!-- 文件日志格式 -->
	<property name="FILE_LOG_PATTERN" value="%d{yyyy-MM-dd HH:mm:ss} [${appName}] %-5level --- [%thread] - %logger{36}: %msg%n" />
	<!-- Standard output -->
	<appender name="console" class="ch.qos.logback.core.ConsoleAppender">
		<encoder>
			<pattern>${CONSOLE_LOG_PATTERN}</pattern>
			<charset class="java.nio.charset.Charset">UTF-8</charset>
		</encoder>
	</appender>
    <!-- Api Log Start -->
    <appender name="ApiInfoFile" class="ch.qos.logback.core.rolling.RollingFileAppender">
        <rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
            <fileNamePattern>${log_home}/api/info/%d{yyyy-MM-dd}.log</fileNamePattern>
        </rollingPolicy>
        <encoder>
			<pattern>${FILE_LOG_PATTERN}</pattern>
            <charset class="java.nio.charset.Charset">UTF-8</charset>
        </encoder>
        <filter class="ch.qos.logback.classic.filter.LevelFilter">
            <level>INFO</level>
            <onMatch>ACCEPT</onMatch>
            <onMismatch>DENY</onMismatch>
        </filter>
    </appender>
    <appender name="ApiWarnFile" class="ch.qos.logback.core.rolling.RollingFileAppender">
        <rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
            <fileNamePattern>${log_home}/api/warn/%d{yyyy-MM-dd}.log</fileNamePattern>
        </rollingPolicy>
        <encoder>
			<pattern>${FILE_LOG_PATTERN}</pattern>
            <charset class="java.nio.charset.Charset">UTF-8</charset>
        </encoder>
        <filter class="ch.qos.logback.classic.filter.LevelFilter">
            <level>WARN</level>
            <onMatch>ACCEPT</onMatch>
            <onMismatch>DENY</onMismatch>
        </filter>
    </appender>
    <appender name="ApiErrorFile" class="ch.qos.logback.core.rolling.RollingFileAppender">
        <rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
            <fileNamePattern>${log_home}/api/error/%d{yyyy-MM-dd}.log</fileNamePattern>
        </rollingPolicy>
        <encoder>
			<pattern>${FILE_LOG_PATTERN}</pattern>
            <charset class="java.nio.charset.Charset">UTF-8</charset>
        </encoder>
        <filter class="ch.qos.logback.classic.filter.LevelFilter">
            <level>ERROR</level>
            <onMatch>ACCEPT</onMatch>
            <onMismatch>DENY</onMismatch>
        </filter>
    </appender>
    <logger name="com.yihu.ehr.exception.GlobalExceptionHandler" level="WARN" additivity="false">
		<appender-ref ref="console" />
        <appender-ref ref="ApiInfoFile" />
        <appender-ref ref="ApiWarnFile" />
        <appender-ref ref="ApiErrorFile" />
    </logger>
    <!-- Api Log End -->
    <!-- 如果微服务纳入全局异常处理,请勿修改以上代码,有新的日志处理请在以下追加!!! -->
    <!-- Task Log Start -->
	<appender name="TaskInfoFile" class="ch.qos.logback.core.rolling.RollingFileAppender">
		<rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
			<fileNamePattern>${log_home}/task/info/%d{yyyy-MM-dd}.log</fileNamePattern>
		</rollingPolicy>
		<encoder>
			<pattern>${FILE_LOG_PATTERN}</pattern>
			<charset class="java.nio.charset.Charset">UTF-8</charset>
		</encoder>
		<filter class="ch.qos.logback.classic.filter.LevelFilter">
			<level>INFO</level>
			<onMatch>ACCEPT</onMatch>
			<onMismatch>DENY</onMismatch>
		</filter>
	</appender>
	<appender name="TaskWarnFile" class="ch.qos.logback.core.rolling.RollingFileAppender">
		<rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
			<fileNamePattern>${log_home}/task/warn/%d{yyyy-MM-dd}.log</fileNamePattern>
		</rollingPolicy>
		<encoder>
			<pattern>${FILE_LOG_PATTERN}</pattern>
			<charset class="java.nio.charset.Charset">UTF-8</charset>
		</encoder>
		<filter class="ch.qos.logback.classic.filter.LevelFilter">
			<level>WARN</level>
			<onMatch>ACCEPT</onMatch>
			<onMismatch>DENY</onMismatch>
		</filter>
	</appender>
	<appender name="TaskErrorFile" class="ch.qos.logback.core.rolling.RollingFileAppender">
		<rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
			<fileNamePattern>${log_home}/task/error/%d{yyyy-MM-dd}.log</fileNamePattern>
		</rollingPolicy>
		<encoder>
			<pattern>${FILE_LOG_PATTERN}</pattern>
			<charset class="java.nio.charset.Charset">UTF-8</charset>
		</encoder>
		<filter class="ch.qos.logback.classic.filter.LevelFilter">
			<level>ERROR</level>
			<onMatch>ACCEPT</onMatch>
			<onMismatch>DENY</onMismatch>
		</filter>
	</appender>
	<logger name="com.yihu.ehr.resolve.log.PackResolveLogger" level="WARN" additivity="false">
		<appender-ref ref="TaskInfoFile" />
		<appender-ref ref="TaskWarnFile" />
		<appender-ref ref="TaskErrorFile" />
	</logger>
    <!-- Task Log End -->
	<root level="INFO">
		<appender-ref ref="console" />
	</root>
</configuration>

+ 15 - 0
src/test/java/com/yihu/ehr/SvrPackResolveApplicationTests.java

@ -0,0 +1,15 @@
package com.yihu.ehr;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.springframework.test.context.junit4.SpringJUnit4ClassRunner;
@RunWith(SpringJUnit4ClassRunner.class)
//@SpringApplicationConfiguration(classes = SvrPackResolve.class)
public class SvrPackResolveApplicationTests {
	@Test
	public void contextLoads() {
	}
}

+ 33 - 0
src/test/java/com/yihu/ehr/Test.java

@ -0,0 +1,33 @@
package com.yihu.ehr;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.yihu.ehr.util.datetime.DateUtil;
import com.yihu.ehr.util.http.IPInfoUtils;
import java.text.DateFormat;
import java.text.SimpleDateFormat;
import java.util.Date;
/**
 * Created by progr1mmer on 2018/4/10.
 */
public class Test {
    @org.junit.Test
    public void test() throws Exception {
        String day1 = "2018-01-02 01:02:03";
        String day2 = "2018-01-04 01:02:04";
        DateFormat dateFormat = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");
        long time = dateFormat.parse(day2).getTime() - dateFormat.parse(day1).getTime();
        System.out.println(time);
        long day;
        if (time % (1000 * 60 * 60 * 24) > 0) {
            day = time / (1000 * 60 * 60 * 24) + 1;
        } else {
            day = time / (1000 * 60 * 60 * 24);
        }
        System.out.println(day);
    }
}