Browse Source

Merge branch 'master' of http://192.168.1.220:10080/esb/esb

Airhead 8 years ago
parent
commit
8379f2dade
100 changed files with 13261 additions and 42 deletions
  1. 48 1
      Hos-Framework-dependencies/pom.xml
  2. 131 41
      hos-admin/pom.xml
  3. 56 0
      hos-admin/src/main/java/com/yihu/hos/common/ApplicationStart.java
  4. 84 0
      hos-admin/src/main/java/com/yihu/hos/common/CheckLoginFilter.java
  5. 87 0
      hos-admin/src/main/java/com/yihu/hos/common/CommonPageController.java
  6. 349 0
      hos-admin/src/main/java/com/yihu/hos/common/JXLUtil.java
  7. 150 0
      hos-admin/src/main/java/com/yihu/hos/common/ReflectUtil.java
  8. 121 0
      hos-admin/src/main/java/com/yihu/hos/common/Services.java
  9. 98 0
      hos-admin/src/main/java/com/yihu/hos/common/mongo/IMongoDBAdminer.java
  10. 376 0
      hos-admin/src/main/java/com/yihu/hos/common/mongo/IMongoDBRunner.java
  11. 172 0
      hos-admin/src/main/java/com/yihu/hos/common/mongo/MongoDB.java
  12. 65 0
      hos-admin/src/main/java/com/yihu/hos/common/mongo/MongoDBConfig.java
  13. 57 0
      hos-admin/src/main/java/com/yihu/hos/common/mongo/MongoDBKit.java
  14. 92 0
      hos-admin/src/main/java/com/yihu/hos/common/mongo/MongoDBOperator.java
  15. 566 0
      hos-admin/src/main/java/com/yihu/hos/common/mongo/MongoDBPro.java
  16. 15 0
      hos-admin/src/main/java/com/yihu/hos/config/BeanConfig.java
  17. 51 0
      hos-admin/src/main/java/com/yihu/hos/config/WebMvcConfig.java
  18. 263 0
      hos-admin/src/main/java/com/yihu/hos/crawler/controller/CrawlerController.java
  19. 40 0
      hos-admin/src/main/java/com/yihu/hos/crawler/dao/CrawlerDatasetDao.java
  20. 40 0
      hos-admin/src/main/java/com/yihu/hos/crawler/dao/CrawlerFlowDao.java
  21. 33 0
      hos-admin/src/main/java/com/yihu/hos/crawler/dao/CrawlerFlowHeadDao.java
  22. 95 0
      hos-admin/src/main/java/com/yihu/hos/crawler/format/AdapterBase.java
  23. 65 0
      hos-admin/src/main/java/com/yihu/hos/crawler/format/AdapterScheme.java
  24. 159 0
      hos-admin/src/main/java/com/yihu/hos/crawler/format/DataSetTransformer.java
  25. 54 0
      hos-admin/src/main/java/com/yihu/hos/crawler/format/DocumentTransformer.java
  26. 27 0
      hos-admin/src/main/java/com/yihu/hos/crawler/format/IDataTransformer.java
  27. 109 0
      hos-admin/src/main/java/com/yihu/hos/crawler/model/adapter/AdapterDataSet.java
  28. 46 0
      hos-admin/src/main/java/com/yihu/hos/crawler/model/adapter/AdapterDict.java
  29. 13 0
      hos-admin/src/main/java/com/yihu/hos/crawler/model/adapter/AdapterDictEntry.java
  30. 63 0
      hos-admin/src/main/java/com/yihu/hos/crawler/model/adapter/AdapterMetaData.java
  31. 198 0
      hos-admin/src/main/java/com/yihu/hos/crawler/model/config/SysConfig.java
  32. 63 0
      hos-admin/src/main/java/com/yihu/hos/crawler/model/flow/CrawlerDataSetModel.java
  33. 53 0
      hos-admin/src/main/java/com/yihu/hos/crawler/model/flow/CrawlerFlowHeadModel.java
  34. 93 0
      hos-admin/src/main/java/com/yihu/hos/crawler/model/flow/CrawlerFlowModel.java
  35. 21 0
      hos-admin/src/main/java/com/yihu/hos/crawler/model/flow/resultModel/CrawlerDatasetResultDetailModel.java
  36. 21 0
      hos-admin/src/main/java/com/yihu/hos/crawler/model/flow/resultModel/CrawlerDatasetResultModel.java
  37. 30 0
      hos-admin/src/main/java/com/yihu/hos/crawler/model/flow/resultModel/FlowEntrance.java
  38. 48 0
      hos-admin/src/main/java/com/yihu/hos/crawler/model/flow/resultModel/FlowLines.java
  39. 41 0
      hos-admin/src/main/java/com/yihu/hos/crawler/model/flow/resultModel/FlowMapping.java
  40. 47 0
      hos-admin/src/main/java/com/yihu/hos/crawler/model/flow/resultModel/MappingDataset.java
  41. 36 0
      hos-admin/src/main/java/com/yihu/hos/crawler/model/flow/resultModel/MappingMetadata.java
  42. 103 0
      hos-admin/src/main/java/com/yihu/hos/crawler/model/patient/Patient.java
  43. 59 0
      hos-admin/src/main/java/com/yihu/hos/crawler/model/patient/PatientIdentity.java
  44. 110 0
      hos-admin/src/main/java/com/yihu/hos/crawler/model/patient/PatientIndex.java
  45. 41 0
      hos-admin/src/main/java/com/yihu/hos/crawler/model/standard/StdDict.java
  46. 37 0
      hos-admin/src/main/java/com/yihu/hos/crawler/model/standard/StdMetaData.java
  47. 9 0
      hos-admin/src/main/java/com/yihu/hos/crawler/model/transform/DictDataType.java
  48. 65 0
      hos-admin/src/main/java/com/yihu/hos/crawler/model/transform/EhrCondition.java
  49. 15 0
      hos-admin/src/main/java/com/yihu/hos/crawler/model/transform/IVerifier.java
  50. 12 0
      hos-admin/src/main/java/com/yihu/hos/crawler/model/transform/LogicValues.java
  51. 45 0
      hos-admin/src/main/java/com/yihu/hos/crawler/model/transform/MetaDataType.java
  52. 37 0
      hos-admin/src/main/java/com/yihu/hos/crawler/model/transform/MetaDataVerify.java
  53. 21 0
      hos-admin/src/main/java/com/yihu/hos/crawler/model/transform/TransformType.java
  54. 235 0
      hos-admin/src/main/java/com/yihu/hos/crawler/origin/FileSystemOrigin.java
  55. 47 0
      hos-admin/src/main/java/com/yihu/hos/crawler/origin/IDataOrigin.java
  56. 323 0
      hos-admin/src/main/java/com/yihu/hos/crawler/service/CrawlerFlowManager.java
  57. 243 0
      hos-admin/src/main/java/com/yihu/hos/crawler/service/CrawlerManager.java
  58. 516 0
      hos-admin/src/main/java/com/yihu/hos/crawler/service/CrawlerService.java
  59. 381 0
      hos-admin/src/main/java/com/yihu/hos/crawler/service/DataCollectDispatcher.java
  60. 379 0
      hos-admin/src/main/java/com/yihu/hos/crawler/service/EsbHttp.java
  61. 149 0
      hos-admin/src/main/java/com/yihu/hos/crawler/service/OldPatientCDAUpload.java
  62. 83 0
      hos-admin/src/main/java/com/yihu/hos/crawler/service/PatientCDAIndex.java
  63. 113 0
      hos-admin/src/main/java/com/yihu/hos/crawler/service/PatientCDAUpload.java
  64. 37 0
      hos-admin/src/main/java/com/yihu/hos/crawler/storage/DataSetStorage.java
  65. 232 0
      hos-admin/src/main/java/com/yihu/hos/crawler/storage/DocumentStorage.java
  66. 36 0
      hos-admin/src/main/java/com/yihu/hos/crawler/storage/IDataStorage.java
  67. 390 0
      hos-admin/src/main/java/com/yihu/hos/crawler/storage/MongodbStorage.java
  68. 474 0
      hos-admin/src/main/java/com/yihu/hos/datacollect/controller/DataCollectController.java
  69. 41 0
      hos-admin/src/main/java/com/yihu/hos/datacollect/controller/DataPushController.java
  70. 280 0
      hos-admin/src/main/java/com/yihu/hos/datacollect/dao/DatacollectDao.java
  71. 86 0
      hos-admin/src/main/java/com/yihu/hos/datacollect/dao/DatacollectLogDao.java
  72. 92 0
      hos-admin/src/main/java/com/yihu/hos/datacollect/dao/intf/IDatacollectDao.java
  73. 33 0
      hos-admin/src/main/java/com/yihu/hos/datacollect/dao/intf/IDatacollectLogDao.java
  74. 48 0
      hos-admin/src/main/java/com/yihu/hos/datacollect/model/DtoDatasetCol.java
  75. 48 0
      hos-admin/src/main/java/com/yihu/hos/datacollect/model/DtoDictCol.java
  76. 37 0
      hos-admin/src/main/java/com/yihu/hos/datacollect/model/DtoJobConfig.java
  77. 74 0
      hos-admin/src/main/java/com/yihu/hos/datacollect/model/DtoJobDataset.java
  78. 36 0
      hos-admin/src/main/java/com/yihu/hos/datacollect/model/DtoJobLog.java
  79. 58 0
      hos-admin/src/main/java/com/yihu/hos/datacollect/model/RsDatapushLog.java
  80. 155 0
      hos-admin/src/main/java/com/yihu/hos/datacollect/model/RsJobConfig.java
  81. 117 0
      hos-admin/src/main/java/com/yihu/hos/datacollect/model/RsJobDataset.java
  82. 87 0
      hos-admin/src/main/java/com/yihu/hos/datacollect/model/RsJobLog.java
  83. 191 0
      hos-admin/src/main/java/com/yihu/hos/datacollect/model/RsJobLogDetail.java
  84. 575 0
      hos-admin/src/main/java/com/yihu/hos/datacollect/service/DatacollectManager.java
  85. 1043 0
      hos-admin/src/main/java/com/yihu/hos/datacollect/service/DatacollectService.java
  86. 168 0
      hos-admin/src/main/java/com/yihu/hos/datacollect/service/DatapushService.java
  87. 96 0
      hos-admin/src/main/java/com/yihu/hos/datacollect/service/intf/IDatacollectManager.java
  88. 24 0
      hos-admin/src/main/java/com/yihu/hos/datacollect/service/intf/IDatacollectService.java
  89. 15 0
      hos-admin/src/main/java/com/yihu/hos/datacollect/service/intf/IDatapushService.java
  90. 143 0
      hos-admin/src/main/java/com/yihu/hos/resource/controller/RsCategoryController.java
  91. 283 0
      hos-admin/src/main/java/com/yihu/hos/resource/controller/RsDimensionController.java
  92. 826 0
      hos-admin/src/main/java/com/yihu/hos/resource/controller/RsResourceController.java
  93. 200 0
      hos-admin/src/main/java/com/yihu/hos/resource/controller/RsResourceRestController.java
  94. 45 0
      hos-admin/src/main/java/com/yihu/hos/resource/controller/StdController.java
  95. 21 0
      hos-admin/src/main/java/com/yihu/hos/resource/dao/IRsAppResourceDao.java
  96. 17 0
      hos-admin/src/main/java/com/yihu/hos/resource/dao/IRsAppResourceDetailDao.java
  97. 27 0
      hos-admin/src/main/java/com/yihu/hos/resource/dao/IRsCategoryDao.java
  98. 17 0
      hos-admin/src/main/java/com/yihu/hos/resource/dao/IRsDemensionCategoryDao.java
  99. 10 0
      hos-admin/src/main/java/com/yihu/hos/resource/dao/IRsDimensionDao.java
  100. 0 0
      hos-admin/src/main/java/com/yihu/hos/resource/dao/IRsResourceCategoryDao.java

+ 48 - 1
Hos-Framework-dependencies/pom.xml

@ -8,6 +8,12 @@
    <artifactId>Hos-Framework-dependencies</artifactId>
    <version>1.0.1</version>
    <packaging>pom</packaging>
    <parent>
        <groupId>org.springframework.boot</groupId>
        <artifactId>spring-boot-starter-parent</artifactId>
        <version>1.3.5.RELEASE</version>
        <relativePath/> <!-- lookup parent from repository -->
    </parent>
    <build>
        <plugins>
            <plugin>
@ -18,12 +24,16 @@
                    <target>1.7</target>
                </configuration>
            </plugin>
            <plugin>
                <groupId>org.springframework.boot</groupId>
                <artifactId>spring-boot-maven-plugin</artifactId>
            </plugin>
        </plugins>
    </build>
    <properties>
        <aspectj.version>1.8.7</aspectj.version>
        <jackson.version>2.4.6</jackson.version>
        <spring.version>4.1.8.RELEASE</spring.version>
        <spring.version>4.2.5.RELEASE</spring.version>
        <spring.security.version>4.0.2.RELEASE</spring.security.version>
    </properties>
    <pluginRepositories>
@ -64,6 +74,43 @@
        </repository>
    </repositories>
    <dependencies>
        <dependency>
            <groupId>org.springframework.boot</groupId>
            <artifactId>spring-boot-starter-web</artifactId>
        </dependency>
        <dependency>
            <groupId>org.springframework</groupId>
            <artifactId>spring-webmvc</artifactId>
        </dependency>
        <dependency>
            <groupId>org.springframework</groupId>
            <artifactId>spring-context</artifactId>
        </dependency>
        <dependency>
            <groupId>io.springfox</groupId>
            <artifactId>springfox-swagger2</artifactId>
            <version>2.4.0</version>
            <exclusions>
                <exclusion>
                    <artifactId>classmate</artifactId>
                    <groupId>com.fasterxml</groupId>
                </exclusion>
            </exclusions>
        </dependency>
        <dependency>
            <groupId>io.springfox</groupId>
            <artifactId>springfox-swagger-ui</artifactId>
            <version>2.4.0</version>
        </dependency>
        <dependency>
            <groupId>org.springframework.boot</groupId>
            <artifactId>spring-boot-starter-test</artifactId>
            <scope>test</scope>
        </dependency>
        <!--=============httpclient start=============-->
        <dependency>
            <groupId>org.apache.httpcomponents</groupId>

+ 131 - 41
hos-admin/pom.xml

@ -8,15 +8,14 @@
    <version>0.0.1-SNAPSHOT</version>
    <packaging>jar</packaging>
    <name>hos-admin</name>
    <description>ESB管理平台</description>
    <parent>
        <groupId>org.springframework.boot</groupId>
        <artifactId>spring-boot-starter-parent</artifactId>
        <version>1.4.0.RELEASE</version>
        <relativePath/> <!-- lookup parent from repository -->
    </parent>
	<name>hos-admin</name>
	<description>ESB管理平台</description>
	<parent>
		<groupId>com.yihu.core</groupId>
		<artifactId>Hos-Framework-dependencies</artifactId>
		<version>1.0.1</version>
		<relativePath>../Hos-Framework-dependencies</relativePath>
	</parent>
    <properties>
        <project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
@ -24,38 +23,129 @@
        <java.version>1.8</java.version>
    </properties>
    <dependencies>
        <dependency>
            <groupId>org.springframework.boot</groupId>
            <artifactId>spring-boot-starter-web</artifactId>
        </dependency>
        <dependency>
            <groupId>org.springframework.boot</groupId>
            <artifactId>spring-boot-starter-activemq</artifactId>
        </dependency>
        <dependency>
            <groupId>org.springframework.boot</groupId>
            <artifactId>spring-boot-starter-test</artifactId>
            <scope>test</scope>
        </dependency>
        <!--<dependency>-->
        <!--<groupId>org.apache.activemq</groupId>-->
        <!--<artifactId>activemq-spring</artifactId>-->
        <!--<version>5.10.0</version>-->
        <!--</dependency>-->
    </dependencies>
    <build>
        <plugins>
            <plugin>
                <groupId>org.springframework.boot</groupId>
                <artifactId>spring-boot-maven-plugin</artifactId>
            </plugin>
        </plugins>
    </build>
	<dependencies>
		<dependency>
			<groupId>org.springframework.boot</groupId>
			<artifactId>spring-boot-starter-web</artifactId>
		</dependency>
		<dependency>
			<groupId>org.springframework.boot</groupId>
			<artifactId>spring-boot-starter-logging</artifactId>
		</dependency>
		<dependency>
			<groupId>org.springframework.boot</groupId>
			<artifactId>spring-boot-starter-test</artifactId>
			<scope>test</scope>
		</dependency>
		<dependency>
			<groupId>org.springframework.boot</groupId>
			<artifactId>spring-boot-starter-jersey</artifactId>
			<exclusions>
				<exclusion>
					<artifactId>log4j-over-slf4j</artifactId>
					<groupId>org.slf4j</groupId>
				</exclusion>
			</exclusions>
		</dependency>
		<dependency>
			<groupId>org.mongodb</groupId>
			<artifactId>mongo-java-driver</artifactId>
			<version>3.1.0</version>
		</dependency>
		<dependency>
			<groupId>com.yihu.hos.resource</groupId>
			<artifactId>Hos-Framework</artifactId>
			<version>1.0.7</version>
		</dependency>
		<!--<dependency>-->
		<!--<groupId>com.yihu.ehr</groupId>-->
		<!--<artifactId>EHR</artifactId>-->
		<!--<version>0.99</version>-->
		<!--</dependency>-->
		<dependency>
			<groupId>com.yihu.core</groupId>
			<artifactId>ehr-redishelper</artifactId>
			<version>1.0.0</version>
		</dependency>
		<dependency>
			<groupId>com.yihu.core</groupId>
			<artifactId>html2image</artifactId>
			<version>0.9</version>
		</dependency>
		<!--=============oracle驱动 start =============-->
		<dependency>
			<groupId>com.oracle</groupId>
			<artifactId>ojdbc6</artifactId>
			<version>11.2.0.3.0</version>
		</dependency>
		<dependency>
			<groupId>commons-dbutils</groupId>
			<artifactId>commons-dbutils</artifactId>
			<version>1.6</version>
		</dependency>
		<!--=============oracle驱动 end =============-->
		<!--jxl导出工具-->
		<dependency>
			<groupId>net.sourceforge.jexcelapi</groupId>
			<artifactId>jxl</artifactId>
			<version>2.6.10</version>
		</dependency>
		<dependency>
			<groupId>com.yihu.core</groupId>
			<artifactId>ehr-dbhelper</artifactId>
			<version>1.1.9</version>
		</dependency>
		<!-- =============WEBSERVICE调用====================-->
		<dependency>
			<groupId>org.apache.cxf</groupId>
			<artifactId>cxf-rt-frontend-jaxws</artifactId>
			<version>3.0.0</version>
		</dependency>
		<dependency>
			<groupId>org.apache.cxf</groupId>
			<artifactId>cxf-rt-transports-http</artifactId>
			<version>3.0.0</version>
		</dependency>
		<dependency>
			<groupId>org.apache.cxf</groupId>
			<artifactId>cxf-rt-bindings-soap</artifactId>
			<version>3.0.0</version>
		</dependency>
		<!-- 使用intellij作为集成开发环境 -->
		<!--<dependency>-->
			<!--<groupId>javax.servlet.jsp</groupId>-->
			<!--<artifactId>jsp-api</artifactId>-->
			<!--<version>2.2.1-b03</version>-->
		<!--</dependency>-->
		<dependency>
			<groupId>org.apache.tomcat.embed</groupId>
			<artifactId>tomcat-embed-jasper</artifactId>
			<scope>compile</scope>
		</dependency>
		<dependency>
			<groupId>javax.servlet</groupId>
			<artifactId>jstl</artifactId>
			<scope>compile</scope>
		</dependency>
	</dependencies>
	<build>
		<plugins>
			<plugin>
				<groupId>org.springframework.boot</groupId>
				<artifactId>spring-boot-maven-plugin</artifactId>
			</plugin>
		</plugins>
	</build>
</project>

+ 56 - 0
hos-admin/src/main/java/com/yihu/hos/common/ApplicationStart.java

@ -0,0 +1,56 @@
package com.yihu.hos.common;
import com.yihu.ehr.framework.constrant.DateConvert;
import com.yihu.ehr.framework.util.log.LogService;
import org.apache.commons.beanutils.ConvertUtils;
import javax.servlet.ServletContextEvent;
import javax.servlet.ServletContextListener;
import java.util.Date;
/**
 * 项目启动执行
 * add by hzp at 2016-01-25
 */
public class ApplicationStart implements ServletContextListener {
    private static int rebootCount = 0;
    @Override
    public void contextInitialized(ServletContextEvent context) {
        //使用自定义转化器转化时间格式
        ConvertUtils.register(new DateConvert(), Date.class);
        //initQuartz();//启动任务
    }
    @Override
    public void contextDestroyed(ServletContextEvent context) {
        destroyedQuartz();
    }
    /**
     * 初始化任务
     * @return
     */
    public void initQuartz(){
        try {
            //QuartzManager.startJobs();
        } catch(Exception e){
            LogService.getLogger().error(" Quartz job initing Exception " + e.getMessage());
        }
    }
    /**
     * 终止任务
     */
    public void destroyedQuartz(){
        try {
            //QuartzManager.shutdownJobs();
        } catch(Exception e){
            LogService.getLogger().error(" Quartz job destroying Exception " + e.getMessage());
        }
    }
}

+ 84 - 0
hos-admin/src/main/java/com/yihu/hos/common/CheckLoginFilter.java

@ -0,0 +1,84 @@
package com.yihu.hos.common;
import javax.servlet.*;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import javax.servlet.http.HttpSession;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
/**
 * 校验用户是否已登录过滤器
 * add by hzp at 2016-03-03
 */
public class CheckLoginFilter implements Filter {
    private FilterConfig filterConfig = null;
    private String redirectURL = null;
    private List notCheckURLList = new ArrayList();
    public CheckLoginFilter() {
    }
    public void destroy() {
        notCheckURLList.clear();
    }
    public void doFilter(ServletRequest servletRequest, ServletResponse servletResponse, FilterChain chain) throws IOException, ServletException {
        HttpServletRequest request = (HttpServletRequest)servletRequest;
        HttpServletResponse response = (HttpServletResponse)servletResponse;
        HttpSession session = request.getSession();
        Object userInfo = session.getAttribute("userInfo");
        if(!filterList(request) && userInfo==null){
            //重定向到指定页面
            response.sendRedirect(request.getContextPath() + redirectURL);
        }else{
            //继续响应请求
            chain.doFilter(servletRequest, servletResponse);
        }
    }
    public void init(FilterConfig fConfig) throws ServletException {
        this.filterConfig = fConfig;
        redirectURL = filterConfig.getInitParameter("redirectURL");
        String notCheckURLListStr = filterConfig.getInitParameter("notCheckURLList");
        if(notCheckURLListStr!=null && notCheckURLListStr.length()>0){
            //将字符串转化为list
            String[] list = notCheckURLListStr.split(";");
            for(String str : list)
            {
                notCheckURLList.add(str);
            }
        }
    }
    /**
     * 校验当前请求是否在不过滤的列表中
     * @param request
     * @return
     */
    private boolean filterList(HttpServletRequest request)
    {
        String uri = request.getServletPath() + (request.getPathInfo() == null ? "" : request.getPathInfo());
        if(notCheckURLList.contains(uri)) {
            return true;
        }
        else{
            // 不过滤的uri
            String[] notFilter = new String[] {"/develop"};
            for(String s : notFilter)
            {
                if (uri.indexOf(s) == 0)
                {
                    // 如果uri中包含不过滤的uri,则不进行过滤
                    return true;
                }
            }
        }
        return false;
    }
}

+ 87 - 0
hos-admin/src/main/java/com/yihu/hos/common/CommonPageController.java

@ -0,0 +1,87 @@
package com.yihu.hos.common;
import com.yihu.ehr.framework.util.PKUtil;
import com.yihu.ehr.framework.util.controller.BaseController;
import com.yihu.hos.system.model.SystemUser;
import org.springframework.stereotype.Controller;
import org.springframework.ui.Model;
import org.springframework.web.bind.annotation.RequestMapping;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpSession;
/**
 * 数据采集配置页面
 * Created by hzp on 2015/8/12.
 */
@RequestMapping("/")
@Controller("commonPageController")
public class CommonPageController extends BaseController {
    /*
    登录页面
     */
    @RequestMapping("loginPage")
    public String login(Model model) {
        System.out.println("=================登录成功===============");
        PKUtil.getUuid();
        System.out.println("=================登录结束===============");
        model.addAttribute("contentPage","/common/login");
        return "pageView";
    }
    /*
    首页页面
     */
    @RequestMapping("indexPage")
    public String index(HttpServletRequest request,Model model) {
        HttpSession session = request.getSession();
        SystemUser user = (SystemUser) session.getAttribute("userInfo");
        model.addAttribute("userName", user.getUserName());
        //获取菜单
        String menu = "[{id: 1, text: '任务管理',icon:'${staticRoot}/images/index/menu2_icon.png'},\n" +
                "        {id: 11, pid: 1, text: '任务跟踪', url: '${contextRoot}/datacollect/trackJob',targetType:'1'},\n" +
                "        {id: 12, pid: 1, text: '任务补采', url: '${contextRoot}/datacollect/repeatDatacollect'},\n" +
                "        {id: 13, pid: 1, text: '任务配置', url: '${contextRoot}/datacollect/configJob'},\n" +
                "        {id: 2, text: '标准管理',icon:'${staticRoot}/images/index/menu3_icon.png'},\n" +
                "        {id: 21, pid: 2, text: '集成标准', url: '${contextRoot}/integration/initial/standard'},\n" +
                "        {id: 22, pid: 2, text: '应用标准', url: '${contextRoot}/integration/initial/application'},\n" +
                "        {id: 23, pid: 2, text: '适配方案', url: '${contextRoot}/adapterPlan/initial'},\n" +
                "        {id: 3, text: '资源管理',icon:'${staticRoot}/images/index/menu4_icon.png'},\n" +
                "        {id: 31, pid: 3, text: '资源注册', url: '${contextRoot}/resource/resource/initial'},\n" +
                "        {id: 32, pid: 3, text: '资源浏览', url: '${contextRoot}/resource/resourcePage'},\n" +
                "        {id: 34, pid: 3, text: '资源分类', url: '${contextRoot}/resource/rsCategory/initial'},\n" +
                "        {id: 35, pid: 3, text: '业务资源', url: '${contextRoot}/resourceRest/initial'},\n" +
                "        {id: 4, text: '维度管理',icon:'${staticRoot}/images/index/menu5_icon.png'},\n" +
                "        {id: 41, pid: 4, text: '维度配置', url: '${contextRoot}/dimension/dimension'},\n" +
                "        {id: 42, pid: 4, text: '维度类别配置', url: '${contextRoot}/dimension/dimensioncatetory'},\n" +
                "        {id: 9, text: '系统配置',icon:'${staticRoot}/images/index/menu6_icon.png'},\n" +
                "        {id: 91, pid: 9, text: '机构配置', url: '${contextRoot}/org/initial'},\n" +
                "        {id: 92, pid: 9, text: '数据源配置', url: '${contextRoot}/datasource/configSources'},\n" +
                "        {id: 93, pid: 9, text: '菜单配置', url: '${contextRoot}/menu/initial'},\n" +
                "        {id: 100, pid: 9, text: '菜单按钮配置', url: '${contextRoot}/menu/menuAction/initial'},\n" +
                "        {id: 94, pid: 9, text: '用户管理', url: '${contextRoot}/user/initial'},\n" +
                "        {id: 95, pid: 9, text: '角色管理', url: '${contextRoot}/role/initial'},\n" +
                "        {id: 96, pid: 9, text: '权限管理', url: '${contextRoot}/authority/initial'},\n" +
                "        {id: 97, pid: 9, text: '字典管理', url: '${contextRoot}/dict/initial' },\n" +
                "        {id: 98, pid: 9, text: '系统参数', url: '${contextRoot}/param/initial'},\n" +
                "        {id: 99, pid: 9, text: '<spring:message code=\"title.app.manage\"/>', url: '${contextRoot}/app/initial'}]";
        model.addAttribute("menu", menu);
        model.addAttribute("contentPage","/common/index");
        return "pageView";
    }
    /*
    系统主页
     */
    @RequestMapping("homePage")
    public String home(Model model) {
        model.addAttribute("contentPage","/common/home");
        return "partView";
    }
}

+ 349 - 0
hos-admin/src/main/java/com/yihu/hos/common/JXLUtil.java

@ -0,0 +1,349 @@
package com.yihu.hos.common;
import jxl.Cell;
import jxl.Sheet;
import jxl.Workbook;
import jxl.write.Label;
import jxl.write.WritableSheet;
import jxl.write.WritableWorkbook;
import javax.servlet.http.HttpServletResponse;
import java.io.FileOutputStream;
import java.io.InputStream;
import java.io.OutputStream;
import java.text.SimpleDateFormat;
import java.util.*;
/**
 * Created by chenweida on 2016/2/26.
 */
public class JXLUtil {
    public static void main(String[] args) throws Exception {
        OutputStream os = new FileOutputStream("D:/AAA.xlsx");
        List data = new ArrayList<>();
        Map<String, Object> param = new HashMap<String, Object>();
        param.put("aaa", "aaa1");
        data.add(param);
        data.add(param);
        data.add(param);
        data.add(param);
        HashMap<String, String> fieldMap = new LinkedHashMap<String, String>();
        fieldMap.put("aaa", "aaa1");
        fieldMap.put("aaa1", "aaa2");
        fieldMap.put("aaa2", "aaa3");
        JXLUtil.listMapToExcel(data, fieldMap, "aaaaaa", os);
    }
    /**
     * 导出excel2003
     *
     * @param list      数据集合
     * @param fieldMap  类的英文属性和Excel中的中文列名的对应关系
     * @param sheetName 工作表的名称
     * @param out       导出流
     */
    public static <T> void listObjectToExcel(List<T> list, Map<String, String> fieldMap, String sheetName, OutputStream out) throws Exception {
        if (list.size() == 0 || list == null) {
            throw new Exception("数据源中没有任何数据");
        }
        int sheetSize = list.size();
        if (sheetSize > 65535 || sheetSize < 1) {
            sheetSize = 65535;
        }
        // 创建工作簿并发送到OutputStream指定的地方
        WritableWorkbook wwb;
        try {
            wwb = Workbook.createWorkbook(out);
            // 1.计算一共有多少个工作表
            int sheetNum = list.size() % sheetSize == 0 ? list.size() / sheetSize : (list.size() / sheetSize + 1);
            // 2.创建相应的工作表,并向其中填充数据
            for (int i = 0; i < sheetNum; i++) {
                // 如果只有一个工作表的情况
                if (1 == sheetNum) {
                    WritableSheet sheet = wwb.createSheet(sheetName, i);
                    fillObjectSheet(sheet, list, fieldMap, 0, list.size() - 1);
                    // 有多个工作表的情况
                } else {
                    WritableSheet sheet = wwb.createSheet(sheetName + (i + 1), i);
                    // 获取开始索引和结束索引
                    int firstIndex = i * sheetSize;
                    int lastIndex = (i + 1) * sheetSize - 1 > list.size() - 1 ? list
                            .size() - 1 : (i + 1) * sheetSize - 1;
                    // 填充工作表
                    fillObjectSheet(sheet, list, fieldMap, firstIndex, lastIndex);
                }
            }
            wwb.write();
            wwb.close();
        } catch (Exception e) {
            e.printStackTrace();
        }
    }
    /**
     * 导出excel2003
     *
     * @param list      数据集合
     * @param fieldMap  类的英文属性和Excel中的中文列名的对应关系
     * @param sheetName 工作表的名称
     * @param out       导出流
     */
    public static <T> void listMapToExcel(List<Map<String, Object>> list, Map<String, String> fieldMap, String sheetName, OutputStream out) throws Exception {
        if (list.size() == 0 || list == null) {
            throw new Exception("数据源中没有任何数据");
        }
        int sheetSize = list.size();
        if (sheetSize > 65535 || sheetSize < 1) {
            sheetSize = 65535;
        }
        // 创建工作簿并发送到OutputStream指定的地方
        WritableWorkbook wwb;
        try {
            wwb = Workbook.createWorkbook(out);
            // 1.计算一共有多少个工作表
            int sheetNum = list.size() % sheetSize == 0 ? list.size() / sheetSize : (list.size() / sheetSize + 1);
            // 2.创建相应的工作表,并向其中填充数据
            for (int i = 0; i < sheetNum; i++) {
                // 如果只有一个工作表的情况
                if (1 == sheetNum) {
                    WritableSheet sheet = wwb.createSheet(sheetName, i);
                    fillMapSheet(sheet, list, fieldMap, 0, list.size() - 1);
                    // 有多个工作表的情况
                } else {
                    WritableSheet sheet = wwb.createSheet(sheetName + (i + 1), i);
                    // 获取开始索引和结束索引
                    int firstIndex = i * sheetSize;
                    int lastIndex = (i + 1) * sheetSize - 1 > list.size() - 1 ? list
                            .size() - 1 : (i + 1) * sheetSize - 1;
                    // 填充工作表
                    fillMapSheet(sheet, list, fieldMap, firstIndex, lastIndex);
                }
            }
            wwb.write();
            wwb.close();
            out.close();
        } catch (Exception e) {
            e.printStackTrace();
        }
    }
    /**
     * <---------------------list转excel--------------------->
     * 导出到浏览器
     */
    public static <T> void listMapToExcel(List<Map<String, Object>> list, Map<String, String> fieldMap, String sheetName, HttpServletResponse response) {
        // 设置默认文件名为当前时间:年月日时分秒
        String fileName = new SimpleDateFormat("yyyyMMddhhmmss").format(new Date()).toString();
        // 设置response头信息
        response.reset();
        response.setContentType("application/vnd.ms-excel"); // 改成输出excel文件
        response.setHeader("Content-disposition", "attachment; filename=" + fileName + ".xls");
        // 创建工作簿并发送到浏览器
        try {
            OutputStream out = response.getOutputStream();
            listMapToExcel(list, fieldMap, sheetName, out);
        } catch (Exception e) {
            e.printStackTrace();
        }
    }
    /**
     * <---------------------list转excel--------------------->
     * 导出到浏览器
     */
    public static <T> void listObjectToExcel(List<T> list, Map<String, String> fieldMap, String sheetName, HttpServletResponse response) {
        // 设置默认文件名为当前时间:年月日时分秒
        String fileName = new SimpleDateFormat("yyyyMMddhhmmss").format(new Date()).toString();
        // 设置response头信息
        response.reset();
        response.setContentType("application/vnd.ms-excel"); // 改成输出excel文件
        response.setHeader("Content-disposition", "attachment; filename=" + fileName + ".xls");
        // 创建工作簿并发送到浏览器
        try {
            OutputStream out = response.getOutputStream();
            listObjectToExcel(list, fieldMap, sheetName, out);
        } catch (Exception e) {
            e.printStackTrace();
        }
    }
    //设置工作表自动列宽和首行加粗
    private static void setColumnAutoSize(WritableSheet ws, int extraWith) {
        // 获取本列的最宽单元格的宽度
        for (int i = 0; i < ws.getColumns(); i++) {
            int colWith = 0;
            for (int j = 0; j < ws.getRows(); j++) {
                String content = ws.getCell(i, j).getContents().toString();
                int cellWith = content.length();
                if (colWith < cellWith) {
                    colWith = cellWith;
                }
            }
            // 设置单元格的宽度为最宽宽度+额外宽度
            ws.setColumnView(i, colWith + extraWith);
        }
    }
    /**
     * @param in          : 输入流
     * @param entityClass :实体class
     * @param fieldMap    :Excel中的中文列头和类的英文属性的对应关系Map
     * @MethodName : excelToList
     * @Description : 将Excel转化为List
     */
    public static <T> List<T> excelToList(InputStream in, String sheetName, Class<T> entityClass, HashMap<String, String> fieldMap) {
        // 定义要返回的list
        List<T> resultList = new ArrayList<T>();
        try {
            // 根据Excel数据源创建WorkBook
            Workbook wb = Workbook.getWorkbook(in);
            // 获取工作表
            Sheet sheet = wb.getSheet(sheetName);
            // 获取工作表的有效行数
            int realRows = 0;
            for (int i = 0; i < sheet.getRows(); i++) {
                int nullCols = 0;
                for (int j = 0; j < sheet.getColumns(); j++) {
                    Cell currentCell = sheet.getCell(j, i);
                    if (currentCell == null
                            || "".equals(currentCell.getContents().toString())) {
                        nullCols++;
                    }
                }
                if (nullCols == sheet.getColumns()) {
                    break;
                } else {
                    realRows++;
                }
            }
            // 如果Excel中没有数据则提示错误
            if (realRows <= 1) {
                throw new Exception("Excel文件中没有任何数据");
            }
            Cell[] firstRow = sheet.getRow(0);
            String[] excelFieldNames = new String[firstRow.length];
            // 获取Excel中的列名
            for (int i = 0; i < firstRow.length; i++) {
                excelFieldNames[i] = firstRow[i].getContents().toString().trim();
            }
            // 判断需要的字段在Excel中是否都存在
            boolean isExist = true;
            List<String> excelFieldList = Arrays.asList(excelFieldNames);
            for (String cnName : fieldMap.keySet()) {
                if (!excelFieldList.contains(cnName)) {
                    isExist = false;
                    break;
                }
            }
            // 如果有列名不存在,则抛出异常,提示错误
            if (!isExist) {
                throw new Exception("Excel中缺少必要的字段,或字段名称有误");
            }
            // 将列名和列号放入Map中,这样通过列名就可以拿到列号
            LinkedHashMap<String, Integer> colMap = new LinkedHashMap<String, Integer>();
            for (int i = 0; i < excelFieldNames.length; i++) {
                colMap.put(excelFieldNames[i], firstRow[i].getColumn());
            }
            // 将sheet转换为list
            for (int i = 1; i < realRows; i++) {
                // 新建要转换的对象
                T entity = entityClass.newInstance();
                // 给对象中的字段赋值
                for (Map.Entry<String, String> entry : fieldMap.entrySet()) {
                    // 获取中文字段名
                    String cnNormalName = entry.getKey();
                    // 获取英文字段名
                    String enNormalName = entry.getValue();
                    // 根据中文字段名获取列号
                    int col = colMap.get(cnNormalName);
                    // 获取当前单元格中的内容
                    String content = sheet.getCell(col, i).getContents().toString().trim();
                    ReflectUtil.setProperty(entity, enNormalName, content);
                }
                resultList.add(entity);
            }
        } catch (Exception e) {
            e.printStackTrace();
        }
        return resultList;
    }
    //向工作表中填充数据
    private static <T> void fillMapSheet(WritableSheet sheet, List<Map<String, Object>> list,
                                         Map<String, String> fieldMap, int firstIndex, int lastIndex) throws Exception {
        // 定义存放英文字段名和中文字段名的数组
        String[] enFields = new String[fieldMap.size()];
        String[] cnFields = new String[fieldMap.size()];
        // 填充数组
        int count = 0;
        for (Map.Entry<String, String> entry : fieldMap.entrySet()) {
            enFields[count] = entry.getKey();
            cnFields[count] = entry.getValue();
            count++;
        }
        // 填充表头
        for (int i = 0; i < cnFields.length; i++) {
            Label label = new Label(i, 0, cnFields[i]);
            sheet.addCell(label);
        }
        // 填充内容
        int rowNo = 1;
        for (int index = firstIndex; index <= lastIndex; index++) {
            // 获取单个对象
            Map<String, Object> item = list.get(index);
            for (int i = 0; i < enFields.length; i++) {
                Object objValue = null;
                objValue = item.get(enFields[i]);
                String fieldValue = objValue == null ? "" : objValue.toString();
                Label label = new Label(i, rowNo, fieldValue);
                sheet.addCell(label);
            }
            rowNo++;
        }
        // 设置自动列宽
        setColumnAutoSize(sheet, 5);
    }
    //向工作表中填充数据
    private static <T> void fillObjectSheet(WritableSheet sheet, List<T> list,
                                            Map<String, String> fieldMap, int firstIndex, int lastIndex) throws Exception {
        // 定义存放英文字段名和中文字段名的数组
        String[] enFields = new String[fieldMap.size()];
        String[] cnFields = new String[fieldMap.size()];
        // 填充数组
        int count = 0;
        for (Map.Entry<String, String> entry : fieldMap.entrySet()) {
            enFields[count] = entry.getKey();
            cnFields[count] = entry.getValue();
            count++;
        }
        // 填充表头
        for (int i = 0; i < cnFields.length; i++) {
            Label label = new Label(i, 0, cnFields[i]);
            sheet.addCell(label);
        }
        // 填充内容
        int rowNo = 1;
        for (int index = firstIndex; index <= lastIndex; index++) {
            // 获取单个对象
            T item = list.get(index);
            for (int i = 0; i < enFields.length; i++) {
                Object objValue = null;
                objValue = ReflectUtil.getNestedProperty(item, enFields[i]);
                String fieldValue = objValue == null ? "" : objValue.toString();
                Label label = new Label(i, rowNo, fieldValue);
                sheet.addCell(label);
            }
            rowNo++;
        }
        // 设置自动列宽
        setColumnAutoSize(sheet, 5);
    }
}

+ 150 - 0
hos-admin/src/main/java/com/yihu/hos/common/ReflectUtil.java

@ -0,0 +1,150 @@
package com.yihu.hos.common;
import java.lang.reflect.Field;
import java.lang.reflect.Modifier;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.Date;
import java.util.List;
/**
 * Created by chenweida on 2016/2/26.
 */
public class ReflectUtil {
    @SuppressWarnings("all")
    public static void main(String[] args) throws Exception {
    }
    /**
     * @description 设置对象属性值
     * @param obj
     *            实体对象
     * @param fieldName
     *            属性名
     * @param value
     *            属性值
     */
    public static void setProperty(Object obj, String fieldName, Object value) {
        try {
            Field field = obj.getClass().getDeclaredField(fieldName);
            if (field != null) {
                Class<?> fieldType = field.getType();
                field.setAccessible(true);
                // 根据字段类型给字段赋值
                if (String.class == fieldType) {
                    field.set(obj, String.valueOf(value));
                } else if ((Integer.TYPE == fieldType)
                        || (Integer.class == fieldType)) {
                    field.set(obj, Integer.parseInt(value.toString()));
                } else if ((Long.TYPE == fieldType)
                        || (Long.class == fieldType)) {
                    field.set(obj, Long.valueOf(value.toString()));
                } else if ((Float.TYPE == fieldType)
                        || (Float.class == fieldType)) {
                    field.set(obj, Float.valueOf(value.toString()));
                } else if ((Short.TYPE == fieldType)
                        || (Short.class == fieldType)) {
                    field.set(obj, Short.valueOf(value.toString()));
                } else if ((Double.TYPE == fieldType)
                        || (Double.class == fieldType)) {
                    field.set(obj, Double.valueOf(value.toString()));
                } else if (Character.TYPE == fieldType) {
                    if ((value != null) && (value.toString().length() > 0)) {
                        field.set(obj,
                                Character.valueOf(value.toString().charAt(0)));
                    }
                } else if (Date.class == fieldType) {
                    if (value instanceof Date) {
                        field.set(obj, value);
                    } else if (value instanceof String) {
                        field.set(obj, new SimpleDateFormat(
                                "yyyy-MM-dd HH:mm:ss").parse(value.toString()));
                    }
                } else {
                    field.set(obj, value);
                }
                field.setAccessible(false);
            }
        } catch (Exception e) {
            e.getStackTrace();
        }
    }
    /**
     * @description 获取对象属性值
     * @param obj
     *            实体对象
     * @param fieldName
     *            属性名
     */
    public static Object getProperty(Object obj, String fieldName) {
        Field field = getFieldName(obj, fieldName);
        Object value = null;
        try {
            if (field != null) {
                Class<?> fieldType = field.getType();
                field.setAccessible(true);
                // 根据字段类型给字段赋值
                if (Date.class == fieldType) {
                    Object o = field.get(obj);
                    if (o != null) {
                        value = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss")
                                .format(o);
                    }
                } else {
                    value = field.get(obj);
                }
                field.setAccessible(false);
            }
        } catch (Exception e) {
            e.printStackTrace();
        }
        return value;
    }
    // 获取department.name 属性值
    public static Object getNestedProperty(Object obj, String fieldName) {
        Object value = null;
        String[] attributes = fieldName.split("\\.");
        try {
            value = getProperty(obj, attributes[0]);
            for (int i = 1; i < attributes.length; i++) {
                value = getProperty(value, attributes[i]);
            }
        } catch (Exception e) {
            e.printStackTrace();
        }
        return value;
    }
    // 获取属性
    public static Field getFieldName(Object obj, String fieldName) {
        for (Class<?> superClass = obj.getClass(); superClass != Object.class; superClass = superClass
                .getSuperclass()) {
            try {
                return superClass.getDeclaredField(fieldName);
            } catch (NoSuchFieldException e) {
            }
        }
        return null;
    }
    /**
     * 获取对象所有字段的名字
     * @param obj 目标对象
     * @return 字段名字的数组
     */
    public static String[] getFieldNames(Object obj) {
        Class<?> clazz = obj.getClass();
        Field[] fields = clazz.getDeclaredFields();
        List<String> fieldNames = new ArrayList<String>();
        for (int i = 0; i < fields.length; i++) {
            if ((fields[i].getModifiers() & Modifier.STATIC) == 0) {
                fieldNames.add(fields[i].getName());
            }
        }
        return fieldNames.toArray(new String[fieldNames.size()]);
    }
    public void say(String name){
    }
}

+ 121 - 0
hos-admin/src/main/java/com/yihu/hos/common/Services.java

@ -0,0 +1,121 @@
package com.yihu.hos.common;
/**
 * 平台顶级服务列表.
 *
 */
public class Services {
    public final static String EnvironmentOption = "com.yihu.ha.EnvironmentOption";
    public final static String LogService = "com.yihu.ha.LogService";
    public final static String RedisClient = "com.yihu.ha.RedisClient";
    public final static String HBaseClient= "com.yihu.ha.HBaseClient";
    public final static String SolrClient = "com.yihu.ha.SolrClient";
    public final static String SQLGeneralDAO = "com.yihu.ha.SQLGeneralDAO";
    public final static String AddressManager = "com.yihu.ha.AddressManager";
    public final static String AppManager = "com.yihu.ha.AppManager";
    public final static String JsonPackageManager = "com.yihu.ha.JsonPackageManager";
    public final static String EhrArchiveManager = "com.yihu.ha.EhrArchiveManager";
    public final static String SNSPlatformManager = "com.yihu.ha.SNSPlatformManager";
    public final static String DemographicIndex = "com.yihu.ha.DemographicIndex";
    public final static String DemographicIndexStragety = "com.yihu.ha.DemographicIndexStragety";
    public final static String SystemDictManager = "com.yihu.ha.SystemDictManager";
    public final static String OrgManager = "com.yihu.ha.OrgManager";
    public final static String CardManager = "com.yihu.ha.CardManager";
    public final static String TextResource = "com.yihu.ha.TextResource";
    public final static String SecurityManager = "com.yihu.ha.SecurityManager";
    public final static String CDAVersionManager = "com.yihu.ha.CDAVersionManager";
    public final static String CDAManager="com.yihu.ha.CDADocumentManager";
    public final static String StandardSourceManager="com.yihu.ha.StandardSourceManager";
    public final static String DataSetManager = "com.yihu.ha.DataSetManager";
    public final static String MetaDataManager = "com.yihu.ha.MetaDataManager";
    public final static String DictEntryManager = "com.yihu.ha.DictEntryManager";
    public final static String DictManager = "com.yihu.ha.DictManager";
    public final static String StdDiffer = "com.yihu.ha.StdDiffer";
    public final static String StdDispatchManager = "com.yihu.ha.StdDispatchManager";
    public final static String StdVersionFileManager = "com.yihu.ha.StdVersionFileManager";
    public final static String SchemaManager = "com.yihu.ha.SchemaManager";
    public final static String UserManager = "com.yihu.ha.UserManager";
    public final static String TokenManager = "com.yihu.ha.TokenManager";
    public final static String ObjectMapper = "com.yihu.ha.ObjectMapper";
    public final static String ArchiveTemplateManager = "com.yihu.ha.ArchiveTemplateManager";
    public final static String DataSetRelationshipManager = "com.yihu.ha.CdaDatasetRelationshipManager";
    public final static String ConventionalDictEntry = "com.yihu.ha.ConventionalDictEntry";
    public final static String SolrIndexer = "com.yihu.ha.SolrIndexer";
    public final static String JobManager = "com.yihu.ha.JobManager";
    public final static String SchedulerFactory = "com.yihu.ha.SchedulerFactory";
    public final static String MonitorManager = "com.yihu.ha.MonitorManager";
    public final static String OrgDictManager = "com.yihu.ha.OrgDictManager";
    public final static String OrgDictItemManager = "com.yihu.ha.OrgDictItemManager";
    public final static String OrgDataSetManager = "com.yihu.ha.OrgDataSetManager";
    public final static String OrgMetaDataManager = "com.yihu.ha.OrgMetaDataManager";
    public final static String OrgAdapterPlanManager = "com.yihu.ha.OrgAdapterPlanManager";
    public final static String AdapterDataSetManager = "com.yihu.ha.AdapterDataSetManager";
    public final static String AdapterDictManager = "com.yihu.ha.AdapterDictManager";
    public final static String AdapterOrgManager = "com.yihu.ha.AdapterOrgManager";
    public final static String AdapterInfoSendManager = "com.yihu.ha.AdapterInfoSendManager";
    public final static String RsCategoryServiceImpl = "com.yihu.ha.resource.service.impl.RsCategoryServiceImpl";
    /******************************************************************************************/
    public final static String Organization = "com.yihu.ehr.system.service.OrganizationManager";
    public final static String Datacollect = "com.yihu.ehr.datacollect.service.DatacollectManager";
    public final static String DatacollectService = "com.yihu.ehr.datacollect.service.DatacollectService";
    public final static String DatapushService = "com.yihu.ehr.datacollect.service.DatapushService";
    public final static String Datasource = "com.yihu.ehr.system.service.DatasourceManager";
    public final static String BaseDict = "com.yihu.ehr.system.service.BaseDictManager";
    public final static String StdService = "com.yihu.ehr.resource.service.impl.StdService";
}

+ 98 - 0
hos-admin/src/main/java/com/yihu/hos/common/mongo/IMongoDBAdminer.java

@ -0,0 +1,98 @@
package com.yihu.hos.common.mongo;
import java.util.List;
/**
 * @created Airhead 2016/2/17.
 */
public interface IMongoDBAdminer {
    /**
     * Drops this collection from the Database.
     *
     * @mongodb.driver.manual reference/command/drop/ Drop Collection
     */
    void drop(String collectionName);
    /**
     * Create an index with the given keys.
     *
     * @param keys an object describing the index key(s), which may not be null.
     * @return the index name
     * @mongodb.driver.manual reference/command/createIndexes Create indexes
     */
    String createIndex(String collectionName, String keys);
    /**
     * Create an index with the given keys and options.
     *
     * @param keys         an object describing the index key(s), which may not be null.
     * @param indexOptions the options for the index
     * @return the index name
     * @mongodb.driver.manual reference/command/createIndexes Create indexes
     */
    String createIndex(String collectionName, String keys, String indexOptions);
    /**
     * Create multiple indexes.
     *
     * @param indexes the list of indexes
     * @return the list of index names
     * @mongodb.driver.manual reference/command/createIndexes Create indexes
     * @mongodb.server.release 2.6
     */
//    List<String> createIndexes(List<IndexModel> indexes);
    /**
     * Get all the indexes in this collection.
     *
     * @return the list indexes iterable interface
     * @mongodb.driver.manual reference/command/listIndexes/ List indexes
     */
    List<String> listIndexes(String collectionName);
    /**
     * Get all the indexes in this collection.
     *
     * @param resultClass the class to decode each document into
     * @param <TResult>   the target document type of the iterable.
     * @return the list indexes iterable interface
     * @mongodb.driver.manual reference/command/listIndexes/ List indexes
     */
//    <TResult> ListIndexesIterable<TResult> listIndexes(Class<TResult> resultClass);
    /**
     * Drops the index given its name.
     *
     * @param indexName the name of the index to remove
     * @mongodb.driver.manual reference/command/dropIndexes/ Drop indexes
     */
    void dropIndex(String collectionName, String indexName);
    /**
     * Drop all the indexes on this collection, except for the default on _id.
     *
     * @mongodb.driver.manual reference/command/dropIndexes/ Drop indexes
     */
    void dropIndexes(String collectionName);
    /**
     * Rename the collection with oldCollectionName to the newCollectionName.
     *
     * @param newCollectionName the namespace the collection will be renamed to
     * @throws com.mongodb.MongoServerException if you provide a newCollectionName that is the name of an existing collection, or if the
     *                                          oldCollectionName is the name of a collection that doesn't exist
     * @mongodb.driver.manual reference/commands/renameCollection Rename collection
     */
    void renameCollection(String collectionName, String newCollectionName);
    /**
     * Rename the collection with oldCollectionName to the newCollectionName.
     *
     * @param newCollectionName       the name the collection will be renamed to
     * @param renameCollectionOptions the options for renaming a collection
     * @throws com.mongodb.MongoServerException if you provide a newCollectionName that is the name of an existing collection and dropTarget
     *                                          is false, or if the oldCollectionName is the name of a collection that doesn't exist
     * @mongodb.driver.manual reference/commands/renameCollection Rename collection
     */
    void renameCollection(String collectionName, String newCollectionName, String renameCollectionOptions);
}

+ 376 - 0
hos-admin/src/main/java/com/yihu/hos/common/mongo/IMongoDBRunner.java

@ -0,0 +1,376 @@
package com.yihu.hos.common.mongo;
import java.util.List;
/**
 * MongoDB的CURD接口,此部分内容从MongoCollection中来
 *
 * @created Airhead 2016/2/17.
 */
public interface IMongoDBRunner {
    long count(String collectionName);
    /**
     * Counts the number of documents in the collection according to the given options.
     *
     * @param filter the query filter
     * @return the number of documents in the collection
     */
    long count(String collectionName, String filter);
    /**
     * Counts the number of documents in the collection according to the given options.
     *
     * @param filter  the query filter
     * @param options the options describing the count
     * @return the number of documents in the collection
     */
    long count(String collectionName, String filter, String options);
    /**
     * Gets the distinct values of the specified field name.
     *
     * @param fieldName   the field name
     * @param resultClass the class to cast any distinct items into.
     * @param <TResult>   the target type of the iterable.
     * @return an iterable of distinct values
     * @mongodb.driver.manual reference/command/distinct/ Distinct
     */
//    <TResult> DistinctIterable<TResult> distinct(String fieldName, Class<TResult> resultClass);
    /**
     * Gets the distinct values of the specified field name.
     *
     * @param fieldName   the field name
     * @param filter      the query filter
     * @param resultClass the class to cast any distinct items into.
     * @param <TResult>   the target type of the iterable.
     * @return an iterable of distinct values
     * @mongodb.driver.manual reference/command/distinct/ Distinct
     */
//    <TResult> DistinctIterable<TResult> distinct(String fieldName, Bson filter, Class<TResult> resultClass);
    /**
     * Finds all documents in the collection.
     *
     * @return the find iterable interface
     * @mongodb.driver.manual tutorial/query-documents/ Find
     */
    List<String> find(String collectionName);
    /**
     * Finds all documents in the collection.
     *
     * @param resultClass the class to decode each document into
     * @param <TResult>   the target document type of the iterable.
     * @return the find iterable interface
     * @mongodb.driver.manual tutorial/query-documents/ Find
     */
//    <TResult> FindIterable<TResult> find(Class<TResult> resultClass);
    /**
     * Finds all documents in the collection.
     *
     * @param filter the query filter
     * @return the find iterable interface
     * @mongodb.driver.manual tutorial/query-documents/ Find
     */
    List<String> find(String collectionName, String filter);
    /**
     * Finds all documents in the collection.
     *
     * @param filter the query filter
     * @return the find iterable interface
     * @mongodb.driver.manual tutorial/query-documents/ Find
     */
    List<String> find(String collectionName, String filter, String projection);
    /**
     * Finds all documents in the collection.
     *
     * @param filter the query filter
     * @return the find iterable interface
     * @mongodb.driver.manual tutorial/query-documents/ Find
     */
    List<String> find(String collectionName, String filter, String projection, String options);
    /**
     * Finds all documents in the collection.
     *
     * @param filter      the query filter
     * @param resultClass the class to decode each document into
     * @param <TResult>   the target document type of the iterable.
     * @return the find iterable interface
     * @mongodb.driver.manual tutorial/query-documents/ Find
     */
//    <TResult> FindIterable<TResult> find(Bson filter, Class<TResult> resultClass);
    /**
     * Aggregates documents according to the specified aggregation pipeline.
     *
     * @param pipeline the aggregate pipeline
     * @return an iterable containing the result of the aggregation operation
     * @mongodb.driver.manual aggregation/ Aggregation
     * @mongodb.server.release 2.2
     */
//    List<String> aggregate(String collectionName, List<? extends String> pipeline);
    /**
     * Aggregates documents according to the specified aggregation pipeline.
     *
     * @param pipeline    the aggregate pipeline
     * @param resultClass the class to decode each document into
     * @param <TResult>   the target document type of the iterable.
     * @return an iterable containing the result of the aggregation operation
     * @mongodb.driver.manual aggregation/ Aggregation
     * @mongodb.server.release 2.2
     */
//    <TResult> AggregateIterable<TResult> aggregate(List<? extends Bson> pipeline, Class<TResult> resultClass);
    /**
     * Aggregates documents according to the specified map-reduce function.
     *
     * @param mapFunction    A JavaScript function that associates or "maps" a value with a key and emits the key and value pair.
     * @param reduceFunction A JavaScript function that "reduces" to a single object all the values associated with a particular key.
     * @return an iterable containing the result of the map-reduce operation
     * @mongodb.driver.manual reference/command/mapReduce/ map-reduce
     */
//    List<String> mapReduce(String collectionName, String mapFunction, String reduceFunction);
    /**
     * Aggregates documents according to the specified map-reduce function.
     *
     * @param mapFunction    A JavaScript function that associates or "maps" a value with a key and emits the key and value pair.
     * @param reduceFunction A JavaScript function that "reduces" to a single object all the values associated with a particular key.
     * @param resultClass    the class to decode each resulting document into.
     * @param <TResult>      the target document type of the iterable.
     * @return an iterable containing the result of the map-reduce operation
     * @mongodb.driver.manual reference/command/mapReduce/ map-reduce
     */
//    <TResult> MapReduceIterable<TResult> mapReduce(String mapFunction, String reduceFunction, Class<TResult> resultClass);
    /**
     * Executes a mix of inserts, updates, replaces, and deletes.
     *
     * @param requests the writes to execute
     * @return the result of the bulk write
     * @throws com.mongodb.MongoBulkWriteException if there's an exception in the bulk write operation
     * @throws com.mongodb.MongoException          if there's an exception running the operation
     */
//    BulkWriteResult bulkWrite(List<? extends WriteModel<? extends TDocument>> requests);
    /**
     * Executes a mix of inserts, updates, replaces, and deletes.
     *
     * @param requests the writes to execute
     * @param options  the options to apply to the bulk write operation
     * @return the result of the bulk write
     * @throws com.mongodb.MongoBulkWriteException if there's an exception in the bulk write operation
     * @throws com.mongodb.MongoException          if there's an exception running the operation
     */
//    BulkWriteResult bulkWrite(List<? extends WriteModel<? extends TDocument>> requests, BulkWriteOptions options);
    /**
     * Inserts the provided document. If the document is missing an identifier, the driver should generate one.
     *
     * @param document the document to insert
     * @throws com.mongodb.MongoWriteException        if the write failed due some other failure specific to the insert command
     * @throws com.mongodb.MongoWriteConcernException if the write failed due being unable to fulfil the write concern
     * @throws com.mongodb.MongoException             if the write failed due some other failure
     */
    void insertOne(String collectionName, String document);
    /**
     * Inserts one or more documents.  A call to this method is equivalent to a call to the {@code bulkWrite} method
     *
     * @param documents the documents to insert
     * @throws com.mongodb.MongoBulkWriteException if there's an exception in the bulk write operation
     * @throws com.mongodb.MongoException          if the write failed due some other failure
     * @see com.mongodb.client.MongoCollection#bulkWrite
     */
    void insertMany(String collectionName, List<String> documents);
    /**
     * Inserts one or more documents.  A call to this method is equivalent to a call to the {@code bulkWrite} method
     *
     * @param documents the documents to insert
     * @param options   the options to apply to the operation
     * @throws com.mongodb.DuplicateKeyException if the write failed to a duplicate unique key
     * @throws com.mongodb.WriteConcernException if the write failed due being unable to fulfil the write concern
     * @throws com.mongodb.MongoException        if the write failed due some other failure
     */
    void insertMany(String collectionName, List<String> documents, String options);
    /**
     * Removes at most one document from the collection that matches the given filter.  If no documents match, the collection is not
     * modified.
     *
     * @param filter the query filter to apply the the delete operation
     * @return the result of the remove one operation
     * @throws com.mongodb.MongoWriteException        if the write failed due some other failure specific to the delete command
     * @throws com.mongodb.MongoWriteConcernException if the write failed due being unable to fulfil the write concern
     * @throws com.mongodb.MongoException             if the write failed due some other failure
     */
    long deleteOne(String collectionName, String filter);
    /**
     * Removes all documents from the collection that match the given query filter.  If no documents match, the collection is not modified.
     *
     * @param filter the query filter to apply the the delete operation
     * @return the result of the remove many operation
     * @throws com.mongodb.MongoWriteException        if the write failed due some other failure specific to the delete command
     * @throws com.mongodb.MongoWriteConcernException if the write failed due being unable to fulfil the write concern
     * @throws com.mongodb.MongoException             if the write failed due some other failure
     */
    long deleteMany(String collectionName, String filter);
    /**
     * Replace a document in the collection according to the specified arguments.
     *
     * @param filter      the query filter to apply the the replace operation
     * @param replacement the replacement document
     * @return the result of the replace one operation
     * @throws com.mongodb.MongoWriteException        if the write failed due some other failure specific to the replace command
     * @throws com.mongodb.MongoWriteConcernException if the write failed due being unable to fulfil the write concern
     * @throws com.mongodb.MongoException             if the write failed due some other failure
     * @mongodb.driver.manual tutorial/modify-documents/#replace-the-document Replace
     */
    long replaceOne(String collectionName, String filter, String replacement);
    /**
     * Replace a document in the collection according to the specified arguments.
     *
     * @param filter        the query filter to apply the the replace operation
     * @param replacement   the replacement document
     * @param updateOptions the options to apply to the replace operation
     * @return the result of the replace one operation
     * @throws com.mongodb.MongoWriteException        if the write failed due some other failure specific to the replace command
     * @throws com.mongodb.MongoWriteConcernException if the write failed due being unable to fulfil the write concern
     * @throws com.mongodb.MongoException             if the write failed due some other failure
     * @mongodb.driver.manual tutorial/modify-documents/#replace-the-document Replace
     */
    long replaceOne(String collectionName, String filter, String replacement, String updateOptions);
    /**
     * Update a single document in the collection according to the specified arguments.
     *
     * @param filter a document describing the query filter, which may not be null.
     * @param update a document describing the update, which may not be null. The update to apply must include only update operators.
     * @return the result of the update one operation
     * @throws com.mongodb.MongoWriteException        if the write failed due some other failure specific to the update command
     * @throws com.mongodb.MongoWriteConcernException if the write failed due being unable to fulfil the write concern
     * @throws com.mongodb.MongoException             if the write failed due some other failure
     * @mongodb.driver.manual tutorial/modify-documents/ Updates
     * @mongodb.driver.manual reference/operator/update/ Update Operators
     */
    long updateOne(String collectionName, String filter, String update);
    /**
     * Update a single document in the collection according to the specified arguments.
     *
     * @param filter        a document describing the query filter, which may not be null.
     * @param update        a document describing the update, which may not be null. The update to apply must include only update operators.
     * @param updateOptions the options to apply to the update operation
     * @return the result of the update one operation
     * @throws com.mongodb.MongoWriteException        if the write failed due some other failure specific to the update command
     * @throws com.mongodb.MongoWriteConcernException if the write failed due being unable to fulfil the write concern
     * @throws com.mongodb.MongoException             if the write failed due some other failure
     * @mongodb.driver.manual tutorial/modify-documents/ Updates
     * @mongodb.driver.manual reference/operator/update/ Update Operators
     */
    long updateOne(String collectionName, String filter, String update, String updateOptions);
    /**
     * Update all documents in the collection according to the specified arguments.
     *
     * @param filter a document describing the query filter, which may not be null.
     * @param update a document describing the update, which may not be null. The update to apply must include only update operators.
     * @return the result of the update one operation
     * @throws com.mongodb.MongoWriteException        if the write failed due some other failure specific to the update command
     * @throws com.mongodb.MongoWriteConcernException if the write failed due being unable to fulfil the write concern
     * @throws com.mongodb.MongoException             if the write failed due some other failure
     * @mongodb.driver.manual tutorial/modify-documents/ Updates
     * @mongodb.driver.manual reference/operator/update/ Update Operators
     */
    long updateMany(String collectionName, String filter, String update);
    /**
     * Update all documents in the collection according to the specified arguments.
     *
     * @param filter        a document describing the query filter, which may not be null.
     * @param update        a document describing the update, which may not be null. The update to apply must include only update operators.
     * @param updateOptions the options to apply to the update operation
     * @return the result of the update one operation
     * @throws com.mongodb.MongoWriteException        if the write failed due some other failure specific to the update command
     * @throws com.mongodb.MongoWriteConcernException if the write failed due being unable to fulfil the write concern
     * @throws com.mongodb.MongoException             if the write failed due some other failure
     * @mongodb.driver.manual tutorial/modify-documents/ Updates
     * @mongodb.driver.manual reference/operator/update/ Update Operators
     */
    long updateMany(String collectionName, String filter, String update, String updateOptions);
    /**
     * Atomically find a document and remove it.
     *
     * @param filter the query filter to find the document with
     * @return the document that was removed.  If no documents matched the query filter, then null will be returned
     */
    String findOneAndDelete(String collectionName, String filter);
    /**
     * Atomically find a document and remove it.
     *
     * @param filter  the query filter to find the document with
     * @param options the options to apply to the operation
     * @return the document that was removed.  If no documents matched the query filter, then null will be returned
     */
    String findOneAndDelete(String collectionName, String filter, String options);
    /**
     * Atomically find a document and replace it.
     *
     * @param filter      the query filter to apply the the replace operation
     * @param replacement the replacement document
     * @return the document that was replaced.  Depending on the value of the {@code returnOriginal} property, this will either be the
     * document as it was before the update or as it is after the update.  If no documents matched the query filter, then null will be
     * returned
     */
    String findOneAndReplace(String collectionName, String filter, String replacement);
    /**
     * Atomically find a document and replace it.
     *
     * @param filter      the query filter to apply the the replace operation
     * @param replacement the replacement document
     * @param options     the options to apply to the operation
     * @return the document that was replaced.  Depending on the value of the {@code returnOriginal} property, this will either be the
     * document as it was before the update or as it is after the update.  If no documents matched the query filter, then null will be
     * returned
     */
    String findOneAndReplace(String collectionName, String filter, String replacement, String options);
    /**
     * Atomically find a document and update it.
     *
     * @param filter a document describing the query filter, which may not be null.
     * @param update a document describing the update, which may not be null. The update to apply must include only update operators.
     * @return the document that was updated before the update was applied.  If no documents matched the query filter, then null will be
     * returned
     */
    String findOneAndUpdate(String collectionName, String filter, String update);
    /**
     * Atomically find a document and update it.
     *
     * @param filter  a document describing the query filter, which may not be null.
     * @param update  a document describing the update, which may not be null. The update to apply must include only update operators.
     * @param options the options to apply to the operation
     * @return the document that was updated.  Depending on the value of the {@code returnOriginal} property, this will either be the
     * document as it was before the update or as it is after the update.  If no documents matched the query filter, then null will be
     * returned
     */
    String findOneAndUpdate(String collectionName, String filter, String update, String options);
}

+ 172 - 0
hos-admin/src/main/java/com/yihu/hos/common/mongo/MongoDB.java

@ -0,0 +1,172 @@
package com.yihu.hos.common.mongo;
import com.mongodb.client.MongoCollection;
import org.bson.Document;
import java.util.List;
/**
 * 提供对MongoDB的封装,减化对Mongo使用.
 * 主要就是减少层级关系,过滤掉资源释放等处理。
 * 注意:
 * 部分接口为了保持高效的情况,建议还是使用原生驱动。
 * 可以用getCollection取到原生MongoCollection<Document>
 * usage:
 * 1.使用MongoDBKit.addConfig()
 * 2.使用MongoDBKit.start();
 * 3.使用MongoDB做查询
 * 4.使用use()切换连接查询
 *
 * @created Airhead 2016/2/17.
 */
public class MongoDB {
    private static MongoDBPro mongoDBPro;
    static void init() {
        mongoDBPro = MongoDBPro.use();
    }
    public static MongoDBPro use(String configName) {
        return MongoDBPro.use(configName);
    }
    public static MongoDBPro db(String databaseName) {
        return mongoDBPro.db(databaseName);
    }
    public static long count(String collectionName) {
        return mongoDBPro.count(collectionName);
    }
    public static long count(String collectionName, String filter) {
        return mongoDBPro.count(collectionName, filter);
    }
    public static long count(String collectionName, String filter, String options) {
        return mongoDBPro.count(collectionName, filter, options);
    }
    public static List<String> find(String collectionName) {
        return mongoDBPro.find(collectionName);
    }
    public static List<String> find(String collectionName, String filter) {
        return mongoDBPro.find(collectionName, filter);
    }
    public static List<String> find(String collectionName, String filter, String projection) {
        return mongoDBPro.find(collectionName, filter, projection);
    }
    public static List<String> find(String collectionName, String filter, String projection, String options) {
        return mongoDBPro.find(collectionName, filter, projection, options);
    }
    public static void insertOne(String collectionName, String document) {
        mongoDBPro.insertOne(collectionName, document);
    }
    public static void insertMany(String collectionName, List<String> documents) {
        mongoDBPro.insertMany(collectionName, documents);
    }
    public static void insertMany(String collectionName, List<String> documents, String options) {
        mongoDBPro.insertMany(collectionName, documents, options);
    }
    public static long deleteOne(String collectionName, String filter) {
        return mongoDBPro.deleteOne(collectionName, filter);
    }
    public static long deleteMany(String collectionName, String filter) {
        return mongoDBPro.deleteMany(collectionName, filter);
    }
    public static long replaceOne(String collectionName, String filter, String replacement) {
        return mongoDBPro.replaceOne(collectionName, filter, replacement);
    }
    public static long replaceOne(String collectionName, String filter, String replacement, String updateOptions) {
        return mongoDBPro.replaceOne(collectionName, filter, replacement, updateOptions);
    }
    public static long updateOne(String collectionName, String filter, String update) {
        return mongoDBPro.replaceOne(collectionName, filter, update);
    }
    public static long updateOne(String collectionName, String filter, String update, String updateOptions) {
        return mongoDBPro.replaceOne(collectionName, filter, update, updateOptions);
    }
    public static long updateMany(String collectionName, String filter, String update) {
        return mongoDBPro.updateMany(collectionName, filter, update);
    }
    public static long updateMany(String collectionName, String filter, String update, String updateOptions) {
        return mongoDBPro.updateMany(collectionName, filter, update, updateOptions);
    }
    public static String findOneAndDelete(String collectionName, String filter) {
        return mongoDBPro.findOneAndDelete(collectionName, filter);
    }
    public static String findOneAndDelete(String collectionName, String filter, String options) {
        return mongoDBPro.findOneAndDelete(collectionName, filter, options);
    }
    public static String findOneAndReplace(String collectionName, String filter, String replacement) {
        return mongoDBPro.findOneAndReplace(collectionName, filter, replacement);
    }
    public static String findOneAndReplace(String collectionName, String filter, String replacement, String options) {
        return mongoDBPro.findOneAndReplace(collectionName, filter, replacement, options);
    }
    public static String findOneAndUpdate(String collectionName, String filter, String update, String options) {
        return mongoDBPro.findOneAndUpdate(collectionName, filter, update, options);
    }
    public static String findOneAndUpdate(String collectionName, String filter, String update) {
        return mongoDBPro.findOneAndUpdate(collectionName, filter, update);
    }
    public static void drop(String collectionName) {
        mongoDBPro.drop(collectionName);
    }
    public static String createIndex(String collectionName, String keys) {
        return mongoDBPro.createIndex(collectionName, keys);
    }
    public static String createIndex(String collectionName, String keys, String indexOptions) {
        return mongoDBPro.createIndex(collectionName, keys, indexOptions);
    }
    public static List<String> listIndexes(String collectionName) {
        return mongoDBPro.listIndexes(collectionName);
    }
    public static void dropIndex(String collectionName, String indexName) {
        mongoDBPro.dropIndex(collectionName, indexName);
    }
    public static void dropIndexes(String collectionName) {
        mongoDBPro.dropIndexes(collectionName);
    }
    public static void renameCollection(String collectionName, String newCollectionName) {
        mongoDBPro.renameCollection(collectionName, newCollectionName);
    }
    public static void renameCollection(String collectionName, String newCollectionName, String renameCollectionOptions) {
        mongoDBPro.renameCollection(collectionName, newCollectionName, renameCollectionOptions);
    }
    public static MongoCollection<Document> getCollection(String collectionName) {
        return mongoDBPro.getCollection(collectionName);
    }
    public static List<String> listCollectionNames(){
        return mongoDBPro.listCollectionNames();
    }
}

+ 65 - 0
hos-admin/src/main/java/com/yihu/hos/common/mongo/MongoDBConfig.java

@ -0,0 +1,65 @@
package com.yihu.hos.common.mongo;
import com.mongodb.MongoClient;
import com.mongodb.MongoClientOptions;
import com.mongodb.MongoClientURI;
import com.mongodb.client.MongoDatabase;
/**
 * @created Airhead 2016/2/17.
 */
public class MongoDBConfig {
    String name;
    String uri;
    String defaultDatabaseName;
    static MongoClient mongoClient;
    MongoClientOptions mongoClientOptions;      //暂未使用,Mongo默认配置
    MongoDatabase mongoDatabase;
    public MongoDBConfig(String uri) {
        this.name = MongoDBKit.MAIN_CONFIG_NAME;
        this.uri = uri;
        this.defaultDatabaseName = MongoDBKit.DEFAULT_DB_NAME;
    }
    public MongoDBConfig(String name, String uri) {
        this.name = name;
        this.uri = uri;
        this.defaultDatabaseName = MongoDBKit.DEFAULT_DB_NAME;
    }
    public MongoDBConfig(String name, String uri, String databaseName) {
        this.name = name;
        this.uri = uri;
        this.defaultDatabaseName = databaseName;
    }
    public String getName() {
        return name;
    }
    public MongoDatabase getDatabase(String databaseName) {
        if (mongoClient == null) {
            MongoClientURI mongoClientURI = new MongoClientURI(uri);
            mongoClient = new MongoClient(mongoClientURI);
        }
        if (mongoDatabase != null) {
            if (mongoDatabase.getName().equals(databaseName)) {
                return mongoDatabase;
            }
        }
        mongoDatabase = mongoClient.getDatabase(databaseName);
        return mongoDatabase;
    }
    public MongoDatabase getDatabase() {
        if (mongoDatabase != null) {
            return mongoDatabase;
        }
        return getDatabase(defaultDatabaseName);
    }
}

+ 57 - 0
hos-admin/src/main/java/com/yihu/hos/common/mongo/MongoDBKit.java

@ -0,0 +1,57 @@
package com.yihu.hos.common.mongo;
import java.util.HashMap;
import java.util.Map;
/**
 * @created Airhead 2016/2/17.
 */
public class MongoDBKit {
    public static final String MAIN_CONFIG_NAME = "main";
    public static final String DEFAULT_DB_NAME = "test";
    static MongoDBConfig config = null;
    private static Map<String, MongoDBConfig> configNameToConfig = new HashMap<>();
    public static void start() {
        MongoDB.init();
    }
    public static MongoDBConfig getConfig() {
        return config;
    }
    public static MongoDBConfig getConfig(String configName) {
        return configNameToConfig.get(configName);
    }
    /**
     * Add Config object
     *
     * @param config the Config contains Mongodb uri and MongoClientOptions etc.
     */
    public static void addConfig(MongoDBConfig config) {
        if (config == null) {
            throw new IllegalArgumentException("Config can not be null");
        }
        if (configNameToConfig.containsKey(config.getName())) {
            throw new IllegalArgumentException("Config already exists: " + config.getName());
        }
        configNameToConfig.put(config.getName(), config);
        /**
         * Replace the main config if current config name is MAIN_CONFIG_NAME
         */
        if (MAIN_CONFIG_NAME.equals(config.getName())) {
            MongoDBKit.config = config;
        }
        /**
         * The configName may not be MAIN_CONFIG_NAME,
         * the main config have to set the first comming Config if it is null
         */
        if (MongoDBKit.config == null) {
            MongoDBKit.config = config;
        }
    }
}

+ 92 - 0
hos-admin/src/main/java/com/yihu/hos/common/mongo/MongoDBOperator.java

@ -0,0 +1,92 @@
package com.yihu.hos.common.mongo;
/**
 * Query Selectors
 * $eq	Matches values that are equal to a specified value.
 * $gt	Matches values that are greater than a specified value.
 * $gte	Matches values that are greater than or equal to a specified value.
 * $lt	Matches values that are less than a specified value.
 * $lte	Matches values that are less than or equal to a specified value.
 * $ne	Matches all values that are not equal to a specified value.
 * $in	Matches any of the values specified in an array.
 * $nin	Matches none of the values specified in an array.
 *
 * Logical
 * $or	Joins query clauses with a logical OR returns all documents that match the conditions of either clause.
 * $and	Joins query clauses with a logical AND returns all documents that match the conditions of both clauses.
 * $not	Inverts the effect of a query expression and returns documents that do not match the query expression.
 * $nor	Joins query clauses with a logical NOR returns all documents that fail to match both clauses.
 *
 * Element
 * $exists	Matches documents that have the specified field.
 * $type	Selects documents if a field is of the specified type.
 *
 * Evaluation
 * $mod	Performs a modulo operation on the value of a field and selects documents with a specified result.
 * $regex	Selects documents where values match a specified regular expression.
 * $text	Performs text search.
 * $where	Matches documents that satisfy a JavaScript expression.
 *
 * Geospatial
 * $geoWithin	Selects geometries within a bounding GeoJSON geometry. The 2dsphere and 2d indexes support $geoWithin.
 * $geoIntersects	Selects geometries that intersect with a GeoJSON geometry. The 2dsphere index supports $geoIntersects.
 * $near	Returns geospatial objects in proximity to a point. Requires a geospatial index. The 2dsphere and 2d indexes support $near.
 * $nearSphere	Returns geospatial objects in proximity to a point on a sphere. Requires a geospatial index. The 2dsphere and 2d indexes support $nearSphere.
 *
 * Array
 * $all	Matches arrays that contain all elements specified in the query.
 * $elemMatch	Selects documents if element in the array field matches all the specified $elemMatch conditions.
 * $size	Selects documents if the array field is a specified size.
 *
 * Bitwise
 * $bitsAllSet	Matches numeric or binary values in which a set of bit positions all have a value of 1.
 * $bitsAnySet	Matches numeric or binary values in which any bit from a set of bit positions has a value of 1.
 * $bitsAllClear	Matches numeric or binary values in which a set of bit positions all have a value of 0.
 * $bitsAnyClear	Matches numeric or binary values in which any bit from a set of bit positions has a value of 0.
 *
 * Comments
 * $comment	Adds a comment to a query predicate.
 *
 * Projection Operators
 * $	Projects the first element in an array that matches the query condition.
 * $elemMatch	Projects the first element in an array that matches the specified $elemMatch condition.
 * $meta	Projects the document’s score assigned during $text operation.
 * $slice	Limits the number of elements projected from an array. Supports skip and limit slices.
 *
 * Update Operators
 * $inc	Increments the value of the field by the specified amount.
 * $mul	Multiplies the value of the field by the specified amount.
 * $rename	Renames a field.
 * $setOnInsert	Sets the value of a field if an update results in an insert of a document. Has no effect on update operations that modify existing documents.
 * $set	Sets the value of a field in a document.
 * $unset	Removes the specified field from a document.
 * $min	Only updates the field if the specified value is less than the existing field value.
 * $max	Only updates the field if the specified value is greater than the existing field value.
 * $currentDate	Sets the value of a field to current date, either as a Date or a Timestamp.
 *
 * Array
 * $	Acts as a placeholder to update the first element that matches the query condition in an update.
 * $addToSet	Adds elements to an array only if they do not already exist in the set.
 * $pop	Removes the first or last item of an array.
 * $pullAll	Removes all matching values from an array.
 * $pull	Removes all array elements that match a specified query.
 * $pushAll	Deprecated. Adds several items to an array.
 * $push	Adds an item to an array.
 *
 * Modifiers
 * $each	Modifies the $push and $addToSet operators to append multiple items for array updates.
 * $slice	Modifies the $push operator to limit the size of updated arrays.
 * $sort	Modifies the $push operator to reorder documents stored in an array.
 * $position	Modifies the $push operator to specify the position in the array to add elements.
 *
 * Bitwise
 * $bit	Performs bitwise AND, OR, and XOR updates of integer values.
 *
 * Isolation
 * $isolated	Modifies the behavior of a write operation to increase the isolation of the operation.
 * @created Airhead 2016/2/17.
 */
public class MongoDBOperator {
}

+ 566 - 0
hos-admin/src/main/java/com/yihu/hos/common/mongo/MongoDBPro.java

@ -0,0 +1,566 @@
package com.yihu.hos.common.mongo;
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.mongodb.MongoNamespace;
import com.mongodb.client.*;
import com.mongodb.client.model.*;
import com.mongodb.client.result.DeleteResult;
import com.mongodb.client.result.UpdateResult;
import org.apache.commons.lang3.StringUtils;
import org.bson.Document;
import java.io.IOException;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
/**
 * MongoDBPro. Professional database CURD and Manager tool.
 *
 * @created Airhead 2016/2/17.
 */
public class MongoDBPro implements IMongoDBRunner, IMongoDBAdminer {
    private static final Map<String, MongoDBPro> map = new HashMap<String, MongoDBPro>();
    private final MongoDBConfig config;
    public MongoDBPro() {
        if (MongoDBKit.config == null) {
            throw new RuntimeException("The main config is null, initialize MonogDBKit first");
        }
        this.config = MongoDBKit.config;
    }
    public MongoDBPro(String configName) {
        this.config = MongoDBKit.getConfig(configName);
        if (this.config == null) {
            throw new IllegalArgumentException("Config not found by configName: " + configName);
        }
    }
    public static MongoDBPro use() {
        return use(MongoDBKit.config.name);
    }
    public static MongoDBPro use(String configName) {
        MongoDBPro result = map.get(configName);
        if (result == null) {
            result = new MongoDBPro(configName);
            map.put(configName, result);
        }
        return result;
    }
    public MongoDBPro db(String databaseName) {
        config.getDatabase(databaseName);
        return this;
    }
    @Override
    public long count(String collectionName) {
        return count(collectionName, null);
    }
    @Override
    public long count(String collectionName, String filter) {
        return count(collectionName, filter, null);
    }
    /**
     * @param collectionName
     * @param filter         the query filter
     * @param options        the options describing the count
     *                       <p>
     *                       {
     *                       limit: <integer>,
     *                       skip: <integer>,
     *                       hint: <hint>
     *                       }
     * @return
     */
    @Override
    public long count(String collectionName, String filter, String options) {
        MongoCollection<Document> collection = getCollection(collectionName);
        Document filterDocument = new Document();
        if (filter != null) {
            filterDocument = Document.parse(filter);
        }
        CountOptions countOptions = new CountOptions();
        if (options != null) {
            ObjectMapper mapper = new ObjectMapper();
            try {
                JsonNode rootNode = mapper.readValue(options, JsonNode.class);
                String hintString = rootNode.path("hint").toString();
                if (!StringUtils.isEmpty(hintString)) {
                    Document hint = Document.parse(hintString);
                    countOptions.hint(hint);
                } else {
                    countOptions.hint(new Document());
                }
                countOptions.limit(rootNode.path("limit").asInt());
                countOptions.skip(rootNode.path("skip").asInt());
            } catch (IOException e) {
                e.printStackTrace();
            }
        }
        return collection.count(filterDocument, countOptions);
    }
    @Override
    public List<String> find(String collectionName) {
        return find(collectionName, null);
    }
    @Override
    public List<String> find(String collectionName, String filter) {
        return find(collectionName, filter, null);
    }
    @Override
    public List<String> find(String collectionName, String filter, String projection) {
        return find(collectionName, filter, projection, null);
    }
    @Override
    public List<String> find(String collectionName, String filter, String projection, String options) {
        MongoCollection<Document> collection = getCollection(collectionName);
        Document filterDocument = new Document();
        if (filter != null) {
            filterDocument = Document.parse(filter);
        }
        Document projectionDocument = new Document();
        if (projection != null) {
            projectionDocument = Document.parse(projection);
        }
        FindIterable<Document> documents = collection.find(filterDocument).projection(projectionDocument);
        List<String> list = new ArrayList<>();
        try (MongoCursor<Document> cursor = documents.iterator()) {
            while (cursor.hasNext()) {
                Document doc = cursor.next();
                list.add(doc.toJson());
            }
        }
        return list;
    }
//    @Override
//    public List<String> aggregate(String collectionName, List<? extends String> pipeline) {
//        return null;
//    }
//    @Override
//    public List<String> mapReduce(String collectionName, String mapFunction, String reduceFunction) {
//        return null;
//    }
    @Override
    public void insertOne(String collectionName, String document) {
        MongoCollection<Document> collection = getCollection(collectionName);
        Document doc = Document.parse(document);
        collection.insertOne(doc);
    }
    @Override
    public void insertMany(String collectionName, List<String> documents) {
        insertMany(collectionName, documents, null);
    }
    /**
     * @param collectionName
     * @param documents      the documents to insert
     * @param options        the options to apply to the operation
     *                       {
     *                       orderd:<orderd>
     *                       }
     */
    @Override
    public void insertMany(String collectionName, List<String> documents, String options) {
        MongoCollection<Document> collection = getCollection(collectionName);
        List<Document> list = new ArrayList<>();
        for (String document : documents) {
            Document doc = Document.parse(document);
            list.add(doc);
        }
        InsertManyOptions insertManyOptions = new InsertManyOptions();
        if (options != null) {
            ObjectMapper mapper = new ObjectMapper();
            try {
                JsonNode rootNode = mapper.readValue(options, JsonNode.class);
                insertManyOptions.ordered(rootNode.path("ordered").asBoolean());
            } catch (IOException e) {
                e.printStackTrace();
            }
        }
        collection.insertMany(list, insertManyOptions);
    }
    @Override
    public long deleteOne(String collectionName, String filter) {
        MongoCollection<Document> collection = getCollection(collectionName);
        Document filterDocument = Document.parse(filter);
        DeleteResult deleteResult = collection.deleteOne(filterDocument);
        return deleteResult.getDeletedCount();
    }
    @Override
    public long deleteMany(String collectionName, String filter) {
        MongoCollection<Document> collection = getCollection(collectionName);
        Document filterDocument = Document.parse(filter);
        DeleteResult deleteResult = collection.deleteMany(filterDocument);
        return deleteResult.getDeletedCount();
    }
    @Override
    public long replaceOne(String collectionName, String filter, String replacement) {
        return replaceOne(collectionName, filter, replacement, null);
    }
    /**
     * @param collectionName
     * @param filter         the query filter to apply the the replace operation
     * @param replacement    the replacement document
     * @param updateOptions  the options to apply to the replace operation
     *                       {
     *                       upsert:<upsert>
     *                       }
     * @return
     */
    @Override
    public long replaceOne(String collectionName, String filter, String replacement, String updateOptions) {
        MongoCollection<Document> collection = getCollection(collectionName);
        Document filterDocument = Document.parse(filter);
        Document document = Document.parse(replacement);
        UpdateOptions options = new UpdateOptions();
        if (updateOptions != null) {
            ObjectMapper mapper = new ObjectMapper();
            try {
                JsonNode rootNode = mapper.readValue(updateOptions, JsonNode.class);
                options.upsert(rootNode.path("upsert").asBoolean());
            } catch (IOException e) {
                e.printStackTrace();
            }
        }
        UpdateResult updateResult = collection.replaceOne(filterDocument, document, options);
        return updateResult.getModifiedCount();
    }
    @Override
    public long updateOne(String collectionName, String filter, String update) {
        return updateOne(collectionName, filter, update, null);
    }
    /**
     * @param collectionName
     * @param filter         a document describing the query filter, which may not be null.
     * @param update         a document describing the update, which may not be null. The update to apply must include only update operators.
     * @param updateOptions  the options to apply to the update operation
     *                       {
     *                       upsert:<upsert>
     *                       }
     * @return
     */
    @Override
    public long updateOne(String collectionName, String filter, String update, String updateOptions) {
        MongoCollection<Document> collection = getCollection(collectionName);
        Document filterDocument = Document.parse(filter);
        Document document = Document.parse(update);
        UpdateOptions options = new UpdateOptions();
        if (updateOptions != null) {
            ObjectMapper mapper = new ObjectMapper();
            try {
                JsonNode rootNode = mapper.readValue(updateOptions, JsonNode.class);
                options.upsert(rootNode.path("upsert").asBoolean());
            } catch (IOException e) {
                e.printStackTrace();
            }
        }
        UpdateResult updateResult = collection.updateOne(filterDocument, document, options);
        return updateResult.getModifiedCount();
    }
    @Override
    public long updateMany(String collectionName, String filter, String update) {
        return updateMany(collectionName, filter, update, null);
    }
    /**
     * @param collectionName
     * @param filter         a document describing the query filter, which may not be null.
     * @param update         a document describing the update, which may not be null. The update to apply must include only update operators.
     * @param updateOptions  the options to apply to the update operation
     *                       {
     *                       upsert:<upsert>
     *                       }
     * @return
     */
    @Override
    public long updateMany(String collectionName, String filter, String update, String updateOptions) {
        MongoCollection<Document> collection = getCollection(collectionName);
        Document filterDocument = Document.parse(filter);
        Document document = Document.parse(update);
        UpdateOptions options = new UpdateOptions();
        if (updateOptions != null) {
            ObjectMapper mapper = new ObjectMapper();
            try {
                JsonNode rootNode = mapper.readValue(updateOptions, JsonNode.class);
                options.upsert(rootNode.path("upsert").asBoolean());
            } catch (IOException e) {
                e.printStackTrace();
            }
        }
        UpdateResult updateResult = collection.updateMany(filterDocument, document, options);
        return updateResult.getModifiedCount();
    }
    @Override
    public String findOneAndDelete(String collectionName, String filter) {
        return findOneAndDelete(collectionName, filter, null);
    }
    /**
     * @param collectionName
     * @param filter         the query filter to find the document with
     * @param options        the options to apply to the operation
     *                       {
     *                       projection:<document>,
     *                       sort:<document>
     *                       }
     * @return
     */
    @Override
    public String findOneAndDelete(String collectionName, String filter, String options) {
        MongoCollection<Document> collection = getCollection(collectionName);
        Document filterDocument = Document.parse(filter);
        FindOneAndDeleteOptions findOneAndDeleteOptions = new FindOneAndDeleteOptions();
        if (options != null) {
            ObjectMapper mapper = new ObjectMapper();
            try {
                JsonNode rootNode = mapper.readValue(options, JsonNode.class);
                String projection = rootNode.path("projection").toString();
                Document projectionDoc = new Document();
                if (!StringUtils.isEmpty(projection)) {
                    Document.parse(projection);
                }
                String sort = rootNode.path("sort").toString();
                Document sortDoc = new Document();
                if (!StringUtils.isEmpty(sort)) {
                    Document.parse(sort);
                }
                findOneAndDeleteOptions.projection(projectionDoc);
                findOneAndDeleteOptions.sort(sortDoc);
            } catch (IOException e) {
                e.printStackTrace();
            }
        }
        Document document = collection.findOneAndDelete(filterDocument, findOneAndDeleteOptions);
        return document == null ? "{}" : document.toJson();
    }
    @Override
    public String findOneAndReplace(String collectionName, String filter, String replacement) {
        return findOneAndReplace(collectionName, filter, replacement, null);
    }
    /**
     * @param collectionName
     * @param filter         the query filter to apply the the replace operation
     * @param replacement    the replacement document
     * @param options        the options to apply to the operation
     *                       {
     *                       projection:<document>,
     *                       sort:<document>,
     *                       upsert:<upsert>
     *                       }
     * @return
     */
    @Override
    public String findOneAndReplace(String collectionName, String filter, String replacement, String options) {
        MongoCollection<Document> collection = getCollection(collectionName);
        Document filterDocument = Document.parse(filter);
        Document replacementDocument = Document.parse(replacement);
        FindOneAndReplaceOptions findOneAndReplaceOptions = new FindOneAndReplaceOptions();
        if (options != null) {
            ObjectMapper mapper = new ObjectMapper();
            try {
                JsonNode rootNode = mapper.readValue(options, JsonNode.class);
                String projection = rootNode.path("projection").toString();
                Document projectionDoc = new Document();
                if (!StringUtils.isEmpty(projection)) {
                    Document.parse(projection);
                }
                String sort = rootNode.path("sort").toString();
                Document sortDoc = new Document();
                if (!StringUtils.isEmpty(sort)) {
                    Document.parse(sort);
                }
                findOneAndReplaceOptions.projection(projectionDoc);
                findOneAndReplaceOptions.sort(sortDoc);
                findOneAndReplaceOptions.upsert(rootNode.path("upsert").asBoolean());
            } catch (IOException e) {
                e.printStackTrace();
            }
        }
        Document document = collection.findOneAndReplace(filterDocument, replacementDocument, findOneAndReplaceOptions);
        return document == null ? "{}" : document.toJson();
    }
    @Override
    public String findOneAndUpdate(String collectionName, String filter, String update) {
        return findOneAndUpdate(collectionName, filter, update, null);
    }
    /**
     * @param collectionName
     * @param filter         a document describing the query filter, which may not be null.
     * @param update         a document describing the update, which may not be null. The update to apply must include only update operators.
     * @param options        the options to apply to the operation
     *                       {
     *                       projection:<document>,
     *                       sort:<document>,
     *                       upsert:<upsert>
     *                       }
     * @return
     */
    @Override
    public String findOneAndUpdate(String collectionName, String filter, String update, String options) {
        MongoCollection<Document> collection = getCollection(collectionName);
        Document filterDocument = Document.parse(filter);
        Document updateDocument = Document.parse(update);
        FindOneAndUpdateOptions findOneAndUpdateOptions = new FindOneAndUpdateOptions();
        if (options != null) {
            ObjectMapper mapper = new ObjectMapper();
            try {
                JsonNode rootNode = mapper.readValue(options, JsonNode.class);
                String projection = rootNode.path("projection").asText();
                Document projectionDoc = Document.parse(projection);
                String sort = rootNode.path("sort").asText();
                Document sortDoc = Document.parse(sort);
                findOneAndUpdateOptions.projection(projectionDoc);
                findOneAndUpdateOptions.sort(sortDoc);
                findOneAndUpdateOptions.upsert(rootNode.path("upsert").asBoolean());
            } catch (IOException e) {
                e.printStackTrace();
            }
        }
        Document document = collection.findOneAndUpdate(filterDocument, updateDocument, findOneAndUpdateOptions);
        return document.toJson();
    }
    @Override
    public void drop(String collectionName) {
        getCollection(collectionName).drop();
    }
    @Override
    public String createIndex(String collectionName, String keys) {
        MongoCollection<Document> collection = getCollection(collectionName);
        Document keysDocument = Document.parse(keys);
        return collection.createIndex(keysDocument);
    }
    @Override
    public String createIndex(String collectionName, String keys, String indexOptions) {
        MongoCollection<Document> collection = getCollection(collectionName);
        Document keysDocument = Document.parse(keys);
        IndexOptions options = new IndexOptions();
//TODO:解析indexOptions
//        try {
//            ObjectMapper mapper = new ObjectMapper();
//            JsonNode rootNode = mapper.readValue(indexOptions, JsonNode.class);
//
//
//        } catch (IOException e) {
//            e.printStackTrace();
//        }
        return collection.createIndex(keysDocument, options);
    }
    @Override
    public List<String> listIndexes(String collectionName) {
        MongoCollection<Document> collection = getCollection(collectionName);
        ListIndexesIterable<Document> indexes = collection.listIndexes();
        List<String> list = new ArrayList<>();
        try (MongoCursor<Document> cursor = indexes.iterator()) {
            while (cursor.hasNext()) {
                Document doc = cursor.next();
                list.add(doc.toJson());
            }
        }
        return list;
    }
    @Override
    public void dropIndex(String collectionName, String indexName) {
        getCollection(collectionName).dropIndex(indexName);
    }
    @Override
    public void dropIndexes(String collectionName) {
        getCollection(collectionName).dropIndexes();
    }
    @Override
    public void renameCollection(String collectionName, String newCollectionName) {
        MongoCollection<Document> collection = getCollection(collectionName);
        MongoNamespace namespace = collection.getNamespace();
        collection.renameCollection(new MongoNamespace(namespace.getDatabaseName(), newCollectionName));
    }
    @Override
    public void renameCollection(String collectionName, String newCollectionName, String renameCollectionOptions) {
        MongoCollection<Document> collection = getCollection(collectionName);
        MongoNamespace namespace = collection.getNamespace();
        RenameCollectionOptions options = new RenameCollectionOptions();
        try {
            ObjectMapper mapper = new ObjectMapper();
            JsonNode rootNode = mapper.readValue(renameCollectionOptions, JsonNode.class);
            options.dropTarget(rootNode.path("dropTarget").asBoolean());
        } catch (IOException e) {
            e.printStackTrace();
        }
        collection.renameCollection(new MongoNamespace(namespace.getDatabaseName(), newCollectionName), options);
    }
    public MongoCollection<Document> getCollection(String collectionName) {
        MongoDatabase database = config.getDatabase();
        return database.getCollection(collectionName);
    }
    public List<String> listCollectionNames() {
        MongoDatabase database = config.getDatabase();
        MongoIterable<String> listCollectionNames = database.listCollectionNames();
        List<String> list = new ArrayList<>();
        for (String collectionName : listCollectionNames) {
            list.add(collectionName);
        }
        return list;
    }
}

+ 15 - 0
hos-admin/src/main/java/com/yihu/hos/config/BeanConfig.java

@ -0,0 +1,15 @@
package com.yihu.hos.common;
import org.springframework.context.annotation.Configuration;
import org.springframework.context.annotation.ImportResource;
/**
 * @author HZY
 * @vsrsion 1.0
 * Created at 2016/8/5.
 */
@Configuration
@ImportResource({"classpath:spring/applicationContext.xml" , "classpath:spring/dispatcher-servlet.xml"}) //µ¼ÈëxmlÅäÖÃÏî
public class BeanConfig {
}

+ 51 - 0
hos-admin/src/main/java/com/yihu/hos/config/WebMvcConfig.java

@ -0,0 +1,51 @@
package com.yihu.hos.config;
import org.springframework.context.annotation.Bean;
import org.springframework.context.support.ResourceBundleMessageSource;
import org.springframework.web.servlet.config.annotation.DefaultServletHandlerConfigurer;
import org.springframework.web.servlet.config.annotation.WebMvcConfigurerAdapter;
import org.springframework.web.servlet.view.InternalResourceViewResolver;
import org.springframework.web.servlet.view.tiles3.TilesConfigurer;
import org.springframework.web.servlet.view.tiles3.TilesViewResolver;
//@Configuration
//@ComponentScan("com.yihu")
public class WebMvcConfig extends WebMvcConfigurerAdapter {
    @Override
    public void configureDefaultServletHandling(DefaultServletHandlerConfigurer configurer) {
        configurer.enable();
    }
    @Bean
    public InternalResourceViewResolver viewResolver() {
        InternalResourceViewResolver resolver = new InternalResourceViewResolver();
        resolver.setPrefix("/WEB-INF/ehr/jsp/");
        resolver.setSuffix(".jsp");
        resolver.setOrder(1);
        return resolver;
    }
    @Bean
    public ResourceBundleMessageSource messageSource() {
        ResourceBundleMessageSource resolver = new ResourceBundleMessageSource();
        resolver.setBasenames("text/message");
        resolver.setDefaultEncoding("UTF-8");
        return resolver;
    }
    @Bean
    public TilesViewResolver tilesViewResolver(){
        TilesViewResolver tilesViewResolver = new TilesViewResolver();
        tilesViewResolver.setOrder(0);
        return tilesViewResolver;
    }
    @Bean
    public TilesConfigurer tilesConfigurer(){
        TilesConfigurer tilesConfigurer = new TilesConfigurer();
        tilesConfigurer.setDefinitions("/WEB-INF/ehr/commons/layout/layout.xml");
        return tilesConfigurer;
    }
}

+ 263 - 0
hos-admin/src/main/java/com/yihu/hos/crawler/controller/CrawlerController.java

@ -0,0 +1,263 @@
package com.yihu.hos.crawler.controller;
import com.yihu.hos.crawler.model.patient.Patient;
import com.yihu.hos.crawler.service.CrawlerManager;
import com.yihu.hos.crawler.service.CrawlerService;
import com.yihu.ehr.framework.model.ActionResult;
import com.yihu.ehr.framework.model.DetailModelResult;
import com.yihu.ehr.framework.model.Result;
import com.yihu.ehr.framework.util.operator.StringUtil;
import com.yihu.hos.standard.service.adapter.AdapterSchemeService;
import io.swagger.annotations.Api;
import io.swagger.annotations.ApiOperation;
import io.swagger.annotations.ApiParam;
import org.springframework.stereotype.Controller;
import org.springframework.ui.Model;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestMethod;
import org.springframework.web.bind.annotation.RequestParam;
import org.springframework.web.bind.annotation.ResponseBody;
import javax.annotation.Resource;
import java.io.IOException;
/**
 * 目前版本只需要采集,上传和注册病人档案就可了。
 * <p>
 * Created by Airhead on 2015/12/16.
 */
@RequestMapping("/crawler")
@Controller("crawlerController")
@Api(protocols = "http", value = "CrawlerController", description = "档案采集接口", tags = {"采集"})
public class CrawlerController {
    @Resource
    CrawlerService crawlerService;
    @Resource(name = AdapterSchemeService.BEAN_ID)
    private AdapterSchemeService adapterSchemeService;
    /**
    任务编排
    */
    @RequestMapping("jobLayout")
    public String jobLayout(Model model) {
        model.addAttribute("contentPage", "/crawler/jobLayout");
        return "partView";
    }
    /*
   任务编排数据映射
    */
    @RequestMapping("jobDataMapping")
    public String jobDataMapping(Model model, Integer versionId, String datasetIdStr) {
        try {
            model.addAttribute("versionId", versionId);
            model.addAttribute("datasetIdStr", datasetIdStr);
            model.addAttribute("contentPage", "/crawler/dataMapping");
            return "pageView";
        } catch (Exception ex) {
            model.addAttribute("contentPage", "/crawler/dataMapping");
            return "pageView";
        }
    }
    @RequestMapping("datasetDetail")
    public String datasetDetail(Model model, Integer schemeId, String schemeNm, String cacheDatasetId, String versionId) {
        model.addAttribute("contentPage", "/crawler/datasetDetail");
        model.addAttribute("schemeId", schemeId);
        model.addAttribute("versionId", versionId);
        model.addAttribute("schemeNm", schemeNm);
        model.addAttribute("cacheDatasetId", cacheDatasetId);
        return "pageView";
    }
    @RequestMapping(value = "patient", method = RequestMethod.POST)
    @ApiOperation(value = "采集病人健康档案", produces = "application/json", notes = "采集病人健康档案")
    @ResponseBody
    public Result crawler(
            @ApiParam(name = "patient", value = "病人索引信息", required = true)
            @RequestParam(value = "patient") String patientInfo) {
        CrawlerManager crawlerManager = new CrawlerManager();
        Patient patient = crawlerManager.parsePatient(patientInfo);
        if (patient != null) {
            Boolean result = crawlerManager.collectProcess(patient);
            if (result) {
                return Result.success("采集上传成功");
            } else {
                return Result.error("采集上传失败");
            }
        } else {
            return Result.error("参数转换病人实体失败");
        }
    }
    /**
     * 保存任务编排数据
     */
    @RequestMapping(value = "saveDateSet", method = RequestMethod.POST)
    @ApiOperation(value = "保存任务编排", produces = "application/json", notes = "保存任务编排")
    @ResponseBody
    public Result saveJobData(
            @ApiParam(name = "job", value = "任务编排信息", required = true)
            @RequestParam(value = "job") String jobInfo,
            @ApiParam(name = "rows", value = "当前行数", required = true)
            @RequestParam(value = "rows") Integer rows,
            @ApiParam(name = "page", value = "当前页数", required = true)
            @RequestParam(value = "page") Integer page) {
        try {
            crawlerService.saveJobData(jobInfo, rows, page);
        } catch (Exception e) {
            e.printStackTrace();
            return Result.error("保存失败");
        }
        return Result.success("保存成功");
    }
    /**
     * 获取适配方案 数据集关系
     *
     * @param versionId 适配方案版本ID
     * @return
     */
    @RequestMapping("getSchemeDataSets")
    @ResponseBody
    public DetailModelResult getSchemeDataSets(
            @RequestParam(value = "versionId", required = true) Integer versionId,
            @RequestParam(value = "name", required = false) String name
    ) {
        try {
            return crawlerService.getSchemeDataset(versionId, name);
        } catch (Exception e) {
            e.printStackTrace();
            return new DetailModelResult();
        }
    }
    /**
     * 保存适配方案 数据映射关系
     *
     * @param version
     * @param data    映射关系
     * @return
     */
    @RequestMapping("saveSchemeRelation")
    @ResponseBody
    public ActionResult addSchemeRelation(
            @RequestParam(value = "version", required = true) String version,
            @RequestParam(value = "data", required = true) String data
    ) {
        try {
            return crawlerService.saveDataSetRelation(version, data);
        } catch (Exception ex) {
            ex.printStackTrace();
            return new ActionResult();
        }
    }
    @RequestMapping("getRelations")
    @ResponseBody
    public String getRelations(
            @RequestParam(value = "versionId", required = true) Integer versionId,
            @RequestParam(value = "datasetIdStr", required = false) String datasetIdStr,
            @RequestParam(value = "lines", required = false) String lines) {
        try {
            return crawlerService.getRelations(versionId, datasetIdStr, lines);
        } catch (Exception e) {
            e.printStackTrace();
            return null;
        }
    }
    @RequestMapping("setRelationsCache")
    @ResponseBody
    public Result setRelationsCache(
            @RequestParam(value = "versionId", required = true) Integer versionId,
            @RequestParam(value = "lines", required = false) String lines
    ) {
        try {
            crawlerService.setLinesCache(versionId, lines);
            return Result.success("缓存映射关系成功");
        } catch (IOException e) {
            e.printStackTrace();
        }
        return Result.error("缓存映射关系失败");
    }
    /**
     * 删除任务编排数据
     */
    @RequestMapping(value = "deleteDateSet", method = RequestMethod.POST)
    @ApiOperation(value = "删除任务编排", produces = "application/json", notes = "删除任务编排")
    @ResponseBody
    public Result deleteJobData(
            @ApiParam(name = "version", value = "版本号", required = true)
            @RequestParam(value = "version") String version) {
        if (version != null) {
            String message = crawlerService.deleteJobData(version);
            if (StringUtil.isEmpty(message)) {
                return Result.success("删除成功");
            } else {
                return Result.error(message);
            }
        } else {
            return Result.error("删除失败");
        }
    }
    /**
     * 分页显示任务编排数据
     */
    @RequestMapping(value = "list", method = RequestMethod.POST)
    @ApiOperation(value = "分页显示任务编排", produces = "application/json", notes = "分页显示任务编排")
    @ResponseBody
    public DetailModelResult listJobData(
            @ApiParam(name = "rows", value = "Limit the size of result set. Must be an integer")
            @RequestParam(value = "rows", required = false) Integer rows,
            @ApiParam(name = "page", value = "Start position of result set. Must be an integer")
            @RequestParam(value = "page", required = false) Integer page) throws Exception {
        return crawlerService.getDataSetResult(rows, page);
    }
    @RequestMapping(value = "getSchemeList", method = RequestMethod.POST)
    @ApiOperation(value = "获取适配方案-方案版本下拉框", produces = "application/json", notes = "获取适配方案-方案版本下拉框")
    @ResponseBody
    public DetailModelResult getSchemeList() {
        DetailModelResult result = adapterSchemeService.getAdapterSchemeResultModelList();
        return result;
    }
    /**
     * 获取任务编排保存数据集
     */
    @RequestMapping(value = "savedJobData", method = RequestMethod.POST)
    @ApiOperation(value = "获取保存的数据集",produces = "application/json", notes = "保存的数据集")
    @ResponseBody
    public DetailModelResult ListSavedJobData(
            @ApiParam(name = "version", value = "版本号", required = true)
            @RequestParam(value = "version") Integer version
    ) {
        return crawlerService.getDataSetSavedResult(version);
    }
    /**
     * 获取任务编排保存适配方案-方案版本
     */
    @RequestMapping(value = "savedSchemeList", method = RequestMethod.POST)
    @ApiOperation(value = "获取保存的适配方案",produces = "application/json", notes = "保存的适配方案")
    @ResponseBody
    public DetailModelResult SavedSchemeList() {
        return crawlerService.getSchemeSavedResult();
    }
}

+ 40 - 0
hos-admin/src/main/java/com/yihu/hos/crawler/dao/CrawlerDatasetDao.java

@ -0,0 +1,40 @@
package com.yihu.hos.crawler.dao;
import com.yihu.hos.crawler.model.flow.CrawlerDataSetModel;
import com.yihu.ehr.framework.common.dao.SQLGeneralDAO;
import org.hibernate.Query;
import org.springframework.stereotype.Repository;
import java.math.BigInteger;
import java.util.List;
/**
 * Created by HZY on 2016/4/27.
 */
@Repository("CrawlerDatasetDao")
public class CrawlerDatasetDao extends SQLGeneralDAO {
    public static final String BEAN_ID = "CrawlerDatasetDao";
    public List<CrawlerDataSetModel> getCrawlerDatasetList(Integer versionId) throws Exception {
        String sql = "select * from crawler_dataset where scheme_version_id='" + versionId + "'";
        return super.queryListBySql(sql, CrawlerDataSetModel.class);
    }
    public void deleteCrawlerDatasetList(Integer versionId) throws Exception {
        String sql = "delete from crawler_dataset where scheme_version_id= :scheme_version_id";
        Query query = getCurrentSession().createSQLQuery(sql);
        query.setInteger("scheme_version_id", versionId);
        query.executeUpdate();
    }
    public Integer getTotalRows() throws Exception {
        String countSql = "SELECT COUNT(*) FROM (SELECT a.scheme_id, a.scheme_version_id, GROUP_CONCAT(a.dataset_id SEPARATOR ',') AS datasetId, GROUP_CONCAT(a.dataset_name SEPARATOR ',') AS datasetName" +
                "  FROM crawler_dataset a " +
                "  GROUP BY a.scheme_id, a.scheme_version_id ) b WHERE 1= 1";
        Query query = getCurrentSession().createSQLQuery(countSql);
        List<BigInteger> list = query.list();
        int count = list.get(0).intValue();
        return count;
    }
}

+ 40 - 0
hos-admin/src/main/java/com/yihu/hos/crawler/dao/CrawlerFlowDao.java

@ -0,0 +1,40 @@
package com.yihu.hos.crawler.dao;
import com.yihu.hos.crawler.model.flow.CrawlerFlowModel;
import com.yihu.ehr.framework.common.dao.SQLGeneralDAO;
import org.hibernate.Query;
import org.springframework.stereotype.Repository;
import java.util.List;
/**
 * Created by HZY on 2016/4/27.
 */
@Repository("CrawlerFlowDao")
public class CrawlerFlowDao extends SQLGeneralDAO {
    public static final String BEAN_ID = "CrawlerFlowDao";
    public List<CrawlerFlowModel> getCrawlerFlowList(Integer versionId) {
        String adapterVersionHql = "FROM CrawlerFlowModel where schemeVersionId= :schemeVersionId";
        Query query = getCurrentSession().createQuery(adapterVersionHql);
        query.setParameter("schemeVersionId", versionId);
        List<CrawlerFlowModel> modelList = query.list();
        return modelList;
    }
    public void deleteCrawlerFlowList(Integer versionId)throws Exception  {
        String sql = "delete from crawler_flow where scheme_version_id='"+versionId+"'";
        super.execute(sql);
    }
    public List<CrawlerFlowModel> getCrawlerFlowList(Integer versionId,List<String> datasetCodes) {
        String adapterVersionHql = "FROM CrawlerFlowModel where schemeVersionId= :schemeVersionId and datasetCode in (:datasetCode) and inputDatasetCode in (:inputDatasetCode)";
        Query query = getCurrentSession().createQuery(adapterVersionHql);
        query.setParameter("schemeVersionId", versionId);
        query.setParameterList("datasetCode", datasetCodes);
        query.setParameterList("inputDatasetCode", datasetCodes);
        List<CrawlerFlowModel> modelList = query.list();
        return modelList;
    }
}

+ 33 - 0
hos-admin/src/main/java/com/yihu/hos/crawler/dao/CrawlerFlowHeadDao.java

@ -0,0 +1,33 @@
package com.yihu.hos.crawler.dao;
import com.yihu.hos.crawler.model.flow.CrawlerFlowHeadModel;
import com.yihu.hos.crawler.model.flow.CrawlerFlowModel;
import com.yihu.ehr.framework.common.dao.SQLGeneralDAO;
import org.hibernate.Query;
import org.springframework.stereotype.Repository;
import java.util.List;
/**
 * Created by HZY on 2016/4/27.
 */
@Repository("CrawlerFlowHeadDao")
public class CrawlerFlowHeadDao extends SQLGeneralDAO {
    public static final String BEAN_ID = "CrawlerFlowHeadDao";
    public void deleteCrawlerFlowHeadList(Integer versionId) throws Exception {
        String sql = "delete from crawler_flow_head where scheme_version_id= :scheme_version_id";
        Query query = getCurrentSession().createSQLQuery(sql);
        query.setInteger("scheme_version_id", versionId);
        query.executeUpdate();
    }
    public List<CrawlerFlowHeadModel> getCrawlerFlowHeadList(Integer versionId) {
        String sql = "FROM CrawlerFlowHeadModel where schemeVersionId= :schemeVersionId";
        Query query = getCurrentSession().createQuery(sql);
        query.setParameter("schemeVersionId", versionId);
        List<CrawlerFlowHeadModel> modelList = query.list();
        return modelList;
    }
}

+ 95 - 0
hos-admin/src/main/java/com/yihu/hos/crawler/format/AdapterBase.java

@ -0,0 +1,95 @@
package com.yihu.hos.crawler.format;
import com.yihu.ehr.framework.util.springutil.SpringBeanUtil;
import com.yihu.hos.standard.model.adapter.AdapterSchemeVersionModel;
import com.yihu.hos.standard.service.adapter.*;
import com.yihu.hos.standard.service.bo.AdapterVersion;
import com.yihu.hos.standard.service.standard.StdDictEntryService;
import com.yihu.hos.standard.service.standard.StdDictService;
import javax.annotation.Resource;
/**
 * 适配器基类,DAO缓存
 *
 * @created Created by Air on 2015/6/10.
 */
public class AdapterBase {
    @Resource(name = AdapterDatasetService.BEAN_ID)
    private AdapterDatasetService adapterDatasetService;
    @Resource(name = AdapterSchemeVersionService.BEAN_ID)
    private AdapterSchemeVersionService adapterSchemeVersionService;
    @Resource(name = AdapterMetadataService.BEAN_ID)
    private AdapterMetadataService adapterMetadataService;
    @Resource(name = AdapterDictEntryService.BEAN_ID)
    private AdapterDictEntryService adapterDictEntryService;
    @Resource(name = AdapterDictService.BEAN_ID)
    private AdapterDictService adapterDictService;
    @Resource(name =StdDictEntryService.BEAN_ID )
    private StdDictEntryService stdDictEntryService;
    @Resource(name =StdDictService.BEAN_ID )
    private StdDictService stdDictService;
    protected AdapterVersion adapterVersion;
    public AdapterVersion getAdapterVersion() {
        AdapterSchemeVersionModel adapterSchemeVersionModel = getAdapterSchemeVersionService().getEhrAdapterVersionLasted();
        if (adapterSchemeVersionModel == null) {
            adapterSchemeVersionModel=new AdapterSchemeVersionModel();
        }
        String version = adapterSchemeVersionModel.getVersion();
         adapterVersion = new AdapterVersion(version);
        return adapterVersion;
    }
    public AdapterDatasetService getAdapterDatasetService() {
        if (adapterDatasetService == null) {
            adapterDatasetService = SpringBeanUtil.getService(AdapterDatasetService.BEAN_ID);
        }
        return adapterDatasetService;
    }
    public AdapterSchemeVersionService getAdapterSchemeVersionService() {
        if (adapterSchemeVersionService == null) {
            adapterSchemeVersionService = SpringBeanUtil.getService(AdapterSchemeVersionService.BEAN_ID);
        }
        return adapterSchemeVersionService;
    }
    public AdapterMetadataService getAdapterMetadataService() {
        if (adapterMetadataService == null) {
            adapterMetadataService = SpringBeanUtil.getService(AdapterMetadataService.BEAN_ID);
        }
        return adapterMetadataService;
    }
    public AdapterDictService getAdapterDictService() {
        if (adapterDictService == null) {
            adapterDictService = SpringBeanUtil.getService(AdapterDictService.BEAN_ID);
        }
        return adapterDictService;
    }
    public AdapterDictEntryService getAdapterDictEntryService() {
        if (adapterDictEntryService == null) {
            adapterDictEntryService = SpringBeanUtil.getService(AdapterDictEntryService.BEAN_ID);
        }
        return adapterDictEntryService;
    }
    public StdDictEntryService getStdDictEntryService() {
        if (stdDictEntryService == null) {
            stdDictEntryService = SpringBeanUtil.getService(StdDictEntryService.BEAN_ID);
        }
        return stdDictEntryService;
    }
    public StdDictService getStdDictService() {
        if (stdDictService == null) {
            stdDictService = SpringBeanUtil.getService(StdDictService.BEAN_ID);
        }
        return stdDictService;
    }
}

+ 65 - 0
hos-admin/src/main/java/com/yihu/hos/crawler/format/AdapterScheme.java

@ -0,0 +1,65 @@
package com.yihu.hos.crawler.format;
import com.yihu.hos.crawler.model.adapter.AdapterMetaData;
import com.yihu.hos.standard.model.adapter.AdapterDatasetModel;
import com.yihu.hos.standard.model.adapter.AdapterMetadataModel;
import com.yihu.hos.standard.model.adapter.AdapterSchemeVersionModel;
import com.yihu.hos.standard.service.adapter.AdapterDatasetService;
import com.yihu.hos.standard.service.bo.AdapterVersion;
import net.sf.json.JSONObject;
import javax.annotation.Resource;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
/**
 * 适配方案
 * 一个适配方案可能有一到多个机构要采集数据
 *
 * @created Created by Air on 2015/6/3.
 */
public class AdapterScheme  extends AdapterBase{
    @Resource(name = AdapterDatasetService.BEAN_ID)
    private AdapterDatasetService adapterDatasetService;
    private HashMap<String, List<AdapterMetadataModel>> adapterMetaDataMap;
    public AdapterScheme() {
    }
    public synchronized HashMap<String, List<AdapterMetadataModel>> getAdapterMetaDataMap() {
        if (adapterMetaDataMap ==null){
            AdapterSchemeVersionModel adapterSchemeVersionModel = getAdapterSchemeVersionService().getEhrAdapterVersionLasted();
            if (adapterSchemeVersionModel == null) {
                adapterSchemeVersionModel=new AdapterSchemeVersionModel();
            }
            String version = adapterSchemeVersionModel.getVersion();
            AdapterVersion adapterVersion = new AdapterVersion(version);
            adapterMetaDataMap =new HashMap<>();
            Map<String, String> condition = new HashMap<>();
            condition.put("column", "adapter_dataset_code");
            JSONObject jsonpObject = JSONObject.fromObject(condition);
            List<AdapterDatasetModel> adapterDataSets = adapterDatasetService.getAdapterDatasetNotNullList(adapterVersion, jsonpObject.toString());
            for (AdapterDatasetModel adapterDataSet:adapterDataSets){
                List<AdapterMetadataModel> adapterMetaDataTList = getAdapterMetadataService().getAdapterMetadataByDataset(version, adapterDataSet.getId());
                if (adapterMetaDataTList!=null && adapterMetaDataTList.size()>0){
                    adapterMetaDataMap.put(adapterDataSet.getStdDatasetCode(), adapterMetaDataTList);
                }
            }
        }
        return adapterMetaDataMap;
    }
    public List<AdapterMetadataModel> getAdapterMetaDatas(String dataSetCode){
        if (dataSetCode==null){
            return new ArrayList<>();
        }
        return getAdapterMetaDataMap().get(dataSetCode);
    }
}

+ 159 - 0
hos-admin/src/main/java/com/yihu/hos/crawler/format/DataSetTransformer.java

@ -0,0 +1,159 @@
package com.yihu.hos.crawler.format;
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.node.ArrayNode;
import com.fasterxml.jackson.databind.node.ObjectNode;
import com.yihu.hos.crawler.model.adapter.AdapterDataSet;
import com.yihu.hos.crawler.model.adapter.AdapterDict;
import com.yihu.hos.crawler.model.adapter.AdapterMetaData;
import com.yihu.hos.crawler.model.patient.Patient;
import com.yihu.hos.crawler.model.transform.DictDataType;
import com.yihu.hos.crawler.model.transform.TransformType;
import com.yihu.ehr.dbhelper.jdbc.DBHelper;
import com.yihu.ehr.framework.constrant.Constants;
import com.yihu.ehr.framework.util.operator.StringUtil;
import java.util.Iterator;
import java.util.Map;
/**
 * 目前只处理json格式
 * <p>
 * json 格式
 * {
 * "inner_version":"xxxxx",
 * "patient_id":"xxxx",
 * "event_no":"xxxx",
 * "code":"dataset_code",
 * "org_code":"xxxx"
 * "data":
 * [{"metadata_code1":"5","metadata_code2":"6"},
 * [{"metadata_code1":"1","metadata_code2":"2"}]}
 * <p>
 * Created by Air on 2015/6/4.
 */
public class DataSetTransformer implements IDataTransformer{
    private JsonNode jsonObject;
    private Patient patient;
    private static DBHelper db;
    public JsonNode getJsonObject() {
        return jsonObject;
    }
    public DataSetTransformer() {
        if (db == null) {
            db = new DBHelper();
        }
    }
    public boolean transfer(Map<String, AdapterDataSet> dataSetMap) {
        if (jsonObject != null) {
            String dataSetCode = jsonObject.get("code").asText();
            ArrayNode jsonArray = (ArrayNode) jsonObject.get("data");
            AdapterDataSet adapterDataSet = dataSetMap.get(dataSetCode);
            boolean transfer = transferJson(jsonArray, adapterDataSet);
            return transfer;
        }
        return false;
    }
    public String getData() {
        //确保文档有版本信息
        return jsonObject.asText();
    }
    public void setData(JsonNode data) {
        jsonObject = data;
        setPatient();
    }
    @Override
    public Patient getPatient() {
        return patient;
    }
    public TransformType getTransformType() {
        return TransformType.DATA_SET_JSON;
    }
    /**
     * json 格式
     * {
     * "inner_version":"xxxxx",
     * "patient_id":"xxxx",
     * "event_no":"xxxx",
     * "code":"dataset_code",
     * "data":
     * [{"metadata_code1":"5","metadata_code2":"6"},
     * [{"metadata_code1":"1","metadata_code2":"2"}]}
     *
     * @param jsonArray
     * @param adapterDataSet
     * @return
     */
    public boolean transferJson(ArrayNode jsonArray, AdapterDataSet adapterDataSet) {
        for (Object objectRow : jsonArray) {
            if (objectRow instanceof JsonNode) {
                transferJsonRow((ObjectNode) objectRow, adapterDataSet);
            }
        }
        return false;
    }
    public void transferJsonRow(ObjectNode jsonObject, AdapterDataSet adapterDataSet) {
        Iterator<Map.Entry<String, JsonNode>> fields = jsonObject.fields();
        while (fields.hasNext()) {
            Map.Entry<String, JsonNode> next = fields.next();
            String key = next.getKey();
            JsonNode jsonNode = next.getValue();
            String value = jsonNode.asText();
            String stdValue = transferElem(adapterDataSet, key, value);
            if (jsonNode instanceof ObjectNode) {
                ObjectNode objectNode = (ObjectNode) next;
                objectNode.put(key, stdValue);
            }
        }
    }
    /**
     * @param adapterDataSet 数据集编码
     * @param code        数据元编码
     * @param esbData        数据
     * @return String 标准值
     * @modify 2015.09.16 airhead 增加值与编码转换
     */
    public String transferElem(AdapterDataSet adapterDataSet, String code, String esbData) {
        Map<String, AdapterMetaData> adapterMetaDataMap = adapterDataSet.getAdapterMetaDataMap();
        AdapterMetaData adapterMetaData = adapterMetaDataMap.get(code);
        AdapterDict adapterDict = adapterMetaData.getAdapterDict();
        if (adapterDict == null) {
            return esbData;
        }
        String ehrData = null;
        DictDataType adapterDictDataType = adapterMetaData.getAdapterDictDataType();
        if (adapterDictDataType == DictDataType.VALUE) {
            ehrData = adapterDict.getAdapterValueToCodeMap().get(esbData);
        } else if (adapterDictDataType == DictDataType.CODE) {
            ehrData = adapterDict.getAdapterCodeToCodeMap().get(esbData);
        }
        if (StringUtil.isEmpty(ehrData)) {
            return Constants.EMPTY;
        }
        return ehrData;
    }
    /**
     * 根据DataSet信息构造Patient
     */
    private void setPatient() {
        patient = new Patient();
        patient.setPatientId(jsonObject.get("patient_id").asText());
        patient.setEventNo(jsonObject.get("event_no").asText());
        patient.setOrgCode(jsonObject.get("org_code").asText());
    }
}

+ 54 - 0
hos-admin/src/main/java/com/yihu/hos/crawler/format/DocumentTransformer.java

@ -0,0 +1,54 @@
package com.yihu.hos.crawler.format;
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.node.ObjectNode;
import com.yihu.hos.crawler.model.adapter.AdapterDataSet;
import com.yihu.hos.crawler.model.patient.Patient;
import com.yihu.hos.crawler.model.transform.TransformType;
import java.io.IOException;
import java.util.Map;
/**
 * Created by Administrator on 2015/10/15.
 */
public class DocumentTransformer implements IDataTransformer  {
    private JsonNode jsonObject;
    protected AdapterScheme adapterScheme;
    protected Patient patient;
    public DocumentTransformer(AdapterScheme adapterScheme) {
        this.adapterScheme = adapterScheme;
    }
    @Override
    public Patient getPatient() {
        return patient;
    }
    /**
     * 非结构化的不需要转换
     *
     * @return
     */
    public boolean transfer(Map<String, AdapterDataSet> dataSetMap) {
        return true;
    }
    public JsonNode getJsonObject() {
        return jsonObject;
    }
    public String getData() {
        return jsonObject.asText();
    }
    public void setData(JsonNode data) {
        jsonObject = data;
    }
    public TransformType getTransformType() {
        return TransformType.DOCUMENT;
    }
}

+ 27 - 0
hos-admin/src/main/java/com/yihu/hos/crawler/format/IDataTransformer.java

@ -0,0 +1,27 @@
package com.yihu.hos.crawler.format;
import com.fasterxml.jackson.databind.JsonNode;
import com.yihu.hos.crawler.model.adapter.AdapterDataSet;
import com.yihu.hos.crawler.model.patient.Patient;
import com.yihu.hos.crawler.model.transform.TransformType;
import java.util.Map;
/**
 * @author Airhead
 * @version 1.0
 * @created 22-5月-2015 11:24:26
 */
public interface IDataTransformer {
    boolean transfer(Map<String, AdapterDataSet> dataSetMap);
    String getData();
    void setData(JsonNode data);
    Patient getPatient();
    TransformType getTransformType();
}

+ 109 - 0
hos-admin/src/main/java/com/yihu/hos/crawler/model/adapter/AdapterDataSet.java

@ -0,0 +1,109 @@
package com.yihu.hos.crawler.model.adapter;
import com.yihu.hos.crawler.model.patient.PatientIdentity;
import com.yihu.ehr.framework.util.operator.CollectionUtil;
import com.yihu.ehr.framework.util.springutil.SpringBeanUtil;
import com.yihu.hos.standard.model.adapter.AdapterDatasetModel;
import com.yihu.hos.standard.model.adapter.AdapterDictEntryModel;
import com.yihu.hos.standard.model.adapter.AdapterMetadataModel;
import com.yihu.hos.standard.model.adapter.AdapterSchemeModel;
import com.yihu.hos.standard.service.adapter.AdapterMetadataService;
import com.yihu.hos.standard.service.adapter.AdapterSchemeService;
import com.yihu.hos.standard.service.bo.AdapterVersion;
import com.yihu.hos.standard.service.bo.StandardVersion;
import net.sf.json.JSONObject;
import java.util.*;
/**
 * @author Air
 * @version 1.0
 * @created 2015.08.11 11:29
 */
public class AdapterDataSet {
    private List<AdapterMetaData> adapterMetaDataList;
    private AdapterDatasetModel adapterDatasetModel;
    private Map<String, AdapterMetaData> adapterMetaDataMap;
    private String eventNoCode;
    private AdapterVersion adapterVersion;
    public AdapterDataSet(AdapterDatasetModel adapterDatasetModel, AdapterVersion adapterVersion, Map<Integer, List<AdapterDictEntryModel>> entryMap) {
        this.adapterDatasetModel = adapterDatasetModel;
        this.adapterVersion = adapterVersion;
        prepareData(entryMap);
    }
    public AdapterDatasetModel getAdapterDataSetT() {
        return adapterDatasetModel;
    }
    public void prepareData(Map<Integer, List<AdapterDictEntryModel>> entryMap) {
        try {
            adapterMetaDataList = new ArrayList<>();
            adapterMetaDataMap = new HashMap<>();
            Map<String, Object> condition = new HashMap<>();
            condition.put("column", "adapter_metadata_code");
            JSONObject jsonpObject = JSONObject.fromObject(condition);
            AdapterMetadataService metadataService = SpringBeanUtil.getService(AdapterMetadataService.BEAN_ID);
            List<AdapterMetadataModel> adapterMetaDataModelList = metadataService.getAdapterMetadataNotNullList(adapterVersion, adapterDatasetModel.getStdDatasetId(), jsonpObject.toString());
            List<Integer> stdMetaDataIdList = new ArrayList<>();
            if (!CollectionUtil.isEmpty(adapterMetaDataModelList)) {
                for (AdapterMetadataModel adapterMetadataModel : adapterMetaDataModelList) {
                    stdMetaDataIdList.add(adapterMetadataModel.getStdMetadataId());
                }
                for (AdapterMetadataModel adapterMetadataModel : adapterMetaDataModelList) {
                    AdapterMetaData adapterMetaData = new AdapterMetaData(adapterMetadataModel, adapterVersion, entryMap);
                    adapterMetaDataList.add(adapterMetaData);
                    adapterMetaDataMap.put(adapterMetadataModel.getStdMetadataCode().toUpperCase(), adapterMetaData);
                }
            }
        } catch (Exception e) {
            e.printStackTrace();
        }
    }
    public Map<String, AdapterMetaData> getAdapterMetaDataMap() {
        return adapterMetaDataMap;
    }
    public List<AdapterMetaData> getAdapterMetaDataList() {
        return adapterMetaDataList;
    }
    public boolean isHavePatientID() {
        return findPatientIdentity(PatientIdentity.getPatientIDCode());
    }
    public boolean isHaveEventNo() {
        Set<String> eventNoSet = PatientIdentity.getEventNoSet();
        for (String eventNo : eventNoSet) {
            if (findPatientIdentity(eventNo)) {
                eventNoCode = eventNo;
                return true;
            }
        }
        return false;
    }
    public boolean isHaveLocalCardNo() {
        return findPatientIdentity(PatientIdentity.getLocalCardNoCode());
    }
    public boolean isHaveIdCard() {
        return findPatientIdentity(PatientIdentity.getIdCardCode());
    }
    public boolean findPatientIdentity(String code) {
        if (getAdapterMetaDataMap().get(code) == null) {
            return false;
        }
        return true;
    }
    public String getEventNoCode() {
        return eventNoCode;
    }
}

+ 46 - 0
hos-admin/src/main/java/com/yihu/hos/crawler/model/adapter/AdapterDict.java

@ -0,0 +1,46 @@
package com.yihu.hos.crawler.model.adapter;
import com.yihu.hos.crawler.format.AdapterBase;
import com.yihu.hos.standard.model.adapter.AdapterDictEntryModel;
import com.yihu.hos.standard.model.adapter.AdapterDictModel;
import com.yihu.hos.standard.service.bo.AdapterVersion;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
/**
 * @author Air
 * @version 1.0
 * @created 2015.08.11 11:31
 */
public class AdapterDict extends AdapterBase {
    private AdapterDictModel adapterDictModel;
    private AdapterDictModel adapterDictT;
    private Map<String, String> adapterValueToCodeMap;
    private Map<String, String> adapterCodeToCodeMap;
    public AdapterDict(AdapterDictModel adapterDictModel, AdapterVersion adapterVersion, Map<Integer, List<AdapterDictEntryModel>> entryMap) {
        this.adapterDictModel = adapterDictModel;
        this.adapterDictT = adapterDictModel;
        prepareData(entryMap);
    }
    public void prepareData(Map<Integer, List<AdapterDictEntryModel>> entryMap) {
        adapterValueToCodeMap = new HashMap<>();
        adapterCodeToCodeMap = new HashMap<>();
        List<AdapterDictEntryModel> adapterDictEntryModelList = entryMap.get(adapterDictModel.getStdDictId());
        for (AdapterDictEntryModel adapterDictEntryModel : adapterDictEntryModelList) {
            adapterValueToCodeMap.put(adapterDictEntryModel.getStdEntryValue(), adapterDictEntryModel.getAdapterEntryCode());
            adapterCodeToCodeMap.put(adapterDictEntryModel.getStdEntryCode(), adapterDictEntryModel.getAdapterEntryCode());
        }
    }
    public Map<String, String> getAdapterValueToCodeMap() {
        return adapterValueToCodeMap;
    }
    public Map<String, String> getAdapterCodeToCodeMap() {
        return adapterCodeToCodeMap;
    }
}

+ 13 - 0
hos-admin/src/main/java/com/yihu/hos/crawler/model/adapter/AdapterDictEntry.java

@ -0,0 +1,13 @@
package com.yihu.hos.crawler.model.adapter;
import com.yihu.hos.standard.model.adapter.AdapterDictEntryModel;
public class AdapterDictEntry {
    private AdapterDictEntryModel adapterDictEntryModel;
    private String version;
    public AdapterDictEntry(AdapterDictEntryModel adapterDictEntryModel, String version) {
        this.adapterDictEntryModel = adapterDictEntryModel;
        this.version = version;
    }
}

+ 63 - 0
hos-admin/src/main/java/com/yihu/hos/crawler/model/adapter/AdapterMetaData.java

@ -0,0 +1,63 @@
package com.yihu.hos.crawler.model.adapter;
import com.yihu.hos.crawler.model.transform.DictDataType;
import com.yihu.hos.crawler.model.standard.StdMetaData;
import com.yihu.ehr.framework.util.operator.NumberUtil;
import com.yihu.ehr.framework.util.springutil.SpringBeanUtil;
import com.yihu.hos.standard.model.adapter.AdapterDictEntryModel;
import com.yihu.hos.standard.model.adapter.AdapterDictModel;
import com.yihu.hos.standard.model.adapter.AdapterMetadataModel;
import com.yihu.hos.standard.model.standard.StdMetaDataModel;
import com.yihu.hos.standard.service.adapter.AdapterDictService;
import com.yihu.hos.standard.service.bo.AdapterVersion;
import java.util.List;
import java.util.Map;
/**
 * @author Air
 * @version 1.0
 * @created 2015.08.11 11:31
 */
public class AdapterMetaData {
    private AdapterMetadataModel adapterMetadataModel;
    private AdapterDict adapterDict;
    private AdapterVersion adapterVersion;
    public AdapterMetaData(AdapterMetadataModel adapterMetadataModel, AdapterVersion adapterVersion, Map<Integer, List<AdapterDictEntryModel>> entryMap) {
        this.adapterMetadataModel = adapterMetadataModel;
        this.adapterVersion = adapterVersion;
        prepareData(entryMap);
    }
    public AdapterMetadataModel getAdapterMetadataModel() {
        return adapterMetadataModel;
    }
    public void prepareData(Map<Integer, List<AdapterDictEntryModel>> entryMap) {
        if (!NumberUtil.isZero(adapterMetadataModel.getStdDictId())) {
            AdapterDictService adapterDictService = SpringBeanUtil.getService(AdapterDictService.BEAN_ID);
            /**
             * 默认适配字典的id与数据元的标准id一致
             */
            AdapterDictModel adapterDictModel = (AdapterDictModel) adapterDictService.get(AdapterDictModel.class, adapterVersion.getDictTableName(), adapterMetadataModel.getStdDictId());
            if (adapterDictModel != null) {
                adapterDict = new AdapterDict(adapterDictModel, adapterVersion, entryMap);
            }
        }
    }
    public AdapterDict getAdapterDict() {
        return adapterDict;
    }
    public DictDataType getAdapterDictDataType() {
        Integer orgDictDataType = adapterMetadataModel.getAdapterDataType();
        if (orgDictDataType == null){
            return DictDataType.VALUE;
        }
        return DictDataType.values()[orgDictDataType];
    }
}

+ 198 - 0
hos-admin/src/main/java/com/yihu/hos/crawler/model/config/SysConfig.java

@ -0,0 +1,198 @@
package com.yihu.hos.crawler.model.config;
import com.yihu.hos.crawler.model.patient.PatientIdentity;
import com.yihu.hos.crawler.model.patient.PatientIndex;
import com.yihu.ehr.framework.util.operator.StringUtil;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.dom4j.Document;
import org.dom4j.DocumentException;
import org.dom4j.Element;
import org.dom4j.io.SAXReader;
import java.io.InputStream;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
public class SysConfig {
    private static Logger logger = LogManager.getLogger(SysConfig.class);
    public static final String HOS_RESOURCES_CONFIG = "/config/sys.config.xml";
    private static volatile SysConfig instance = null;
    private HashMap<String, PatientIdentity> patientIdentityHashMap;
    private Map<String, PatientIndex> patientIndexMap;//病人摘要信息内容
    private Map<String, String> publicKeyMap;
    private Map<String, String> versionMap;
    public static String tempFile;
    public static String orgcode;
    public static String registerDataSet;
    public static String registerIdCardNo;
    private SysConfig() {
        patientIdentityHashMap = new HashMap<>();
        patientIndexMap = new HashMap<>();
        publicKeyMap = new HashMap<>();
        versionMap = new HashMap<>();
        init();
    }
    public static SysConfig getInstance() {
        if (instance == null) {
            synchronized (SysConfig.class) {
                if (instance == null) {
                    try {
                        instance = new SysConfig();
                    } catch (Exception e) {
                        e.printStackTrace();
                    }
                }
            }
        }
        return instance;
    }
    public void finalize() throws Throwable {
    }
    public String getTempFile() {
        return this.tempFile;
    }
    public void setTempFile(String tempFile) {
        this.tempFile = tempFile;
    }
    public PatientIdentity getPatientIdentity(String dataSetCode) {
        return patientIdentityHashMap.get(dataSetCode);
    }
    public HashMap<String, PatientIdentity> getPatientIdentityHashMap() {
        return patientIdentityHashMap;
    }
    public String getRegisterDataSet() {
        return registerDataSet;
    }
    public String getRegisterIdCardNo() {
        return registerIdCardNo;
    }
    public Map<String, PatientIndex> getPatientIndexMap() {
        return patientIndexMap;
    }
    public Map<String, String> getPublicKeyMap() {
        return publicKeyMap;
    }
    public Map<String, String> getVersionMap() {
        return versionMap;
    }
    public void setVersionMap(Map<String, String> versionMap) {
        this.versionMap = versionMap;
    }
    private Document getDocument() throws DocumentException {
        SAXReader reader = new SAXReader();
        Document document = null;
        try {
            InputStream inputStream = SysConfig.class.getResourceAsStream(HOS_RESOURCES_CONFIG);
            document = reader.read(inputStream);
            return document;
        } catch (DocumentException de) {
            logger.info("读取classpath下的xml文档路径发生异常");
            return null;
        }
    }
    private void init() {
        try {
            Document document = this.getDocument();
            Element rootElement = null;
            if (document != null) {
                rootElement = document.getRootElement();
            }
            if (rootElement == null) {
                return;
            }
            this.initCrawler(rootElement);
            this.initEventNo(rootElement);
            this.initVersion(rootElement);
//            this.initPatientIndex(rootElement);
        } catch (Exception e) {
            logger.error(e.getCause().toString());
        }
    }
    private void initVersion(Element rootElement){
        String tempFile = rootElement.elementTextTrim("temp_file");
        if (!StringUtil.isEmpty(tempFile)) {
            this.tempFile = tempFile;
        } else {
            String home = System.getProperty("catalina.home").replace('\\','/');
            String homeUrl = home.substring(0,home.lastIndexOf('/')+1);
            this.tempFile = homeUrl + "temp";
        }
        List queueDataSets = rootElement.element("ehr_version").elements("org_code");
        for (Object obj : queueDataSets) {
            if (obj instanceof Element) {
                Element element = (Element) obj;
                String dataSetCode = element.attributeValue("code");
                String version = element.elementTextTrim("version");
                String ehrVersion=new String(version);
                versionMap.put(dataSetCode,ehrVersion);
            }
        }
        Element registerDataSet = rootElement.element("register").element("dataset");
        this.registerDataSet = registerDataSet.attributeValue("code");
        this.registerIdCardNo = registerDataSet.elementTextTrim("id_card");
    }
    private void initCrawler(Element rootElement) {
        String tempFile = rootElement.elementTextTrim("temp_file");
        if (!StringUtil.isEmpty(tempFile)) {
            this.tempFile = tempFile;
        } else {
            String home = System.getProperty("catalina.home").replace('\\','/');
            String homeUrl = home.substring(0,home.lastIndexOf('/')+1);
            this.tempFile = homeUrl + "temp";
        }
        List queueDataSets = rootElement.element("patient_queue").elements("dataset");
        for (Object obj : queueDataSets) {
            if (obj instanceof Element) {
                Element element = (Element) obj;
                String dataSetCode = element.attributeValue("code");
                String eventNo = element.elementTextTrim("event_no");
                String refTime = element.elementTextTrim("ref_time");
                PatientIdentity patientIdentity = new PatientIdentity(eventNo, refTime);
                patientIdentityHashMap.put(dataSetCode, patientIdentity);
            }
        }
        Element registerDataSet = rootElement.element("register").element("dataset");
        this.registerDataSet = registerDataSet.attributeValue("code");
        this.registerIdCardNo = registerDataSet.elementTextTrim("id_card");
    }
    private void initEventNo(Element rootElement) {
        List eventItems = rootElement.element("event_no").elements("item");
        for (Object obj : eventItems) {
            if (obj instanceof Element) {
                Element element = (Element) obj;
                String eventNoCode = element.getTextTrim();
                PatientIdentity.addEventNoCode(eventNoCode);
            }
        }
    }
}//end SysConfig

+ 63 - 0
hos-admin/src/main/java/com/yihu/hos/crawler/model/flow/CrawlerDataSetModel.java

@ -0,0 +1,63 @@
package com.yihu.hos.crawler.model.flow;
import com.yihu.ehr.framework.model.Result;
import java.io.Serializable;
/**
 * 任务编排-数据集关系
 *
 * @created HZY 2016/4/27.
 */
public class CrawlerDataSetModel extends Result implements Serializable {
    private Integer schemeId;
    private Integer schemeVersionId;
    private Integer datasetId;
    private String datasetCode;
    private String datasetName;
    public Integer getSchemeId() {
        return schemeId;
    }
    public void setSchemeId(Integer schemeId) {
        this.schemeId = schemeId;
    }
    public Integer getSchemeVersionId() {
        return schemeVersionId;
    }
    public void setSchemeVersionId(Integer schemeVersionId) {
        this.schemeVersionId = schemeVersionId;
    }
    public Integer getDatasetId() {
        return datasetId;
    }
    public void setDatasetId(Integer datasetId) {
        this.datasetId = datasetId;
    }
    public String getDatasetCode() {
        return datasetCode;
    }
    public void setDatasetCode(String datasetCode) {
        this.datasetCode = datasetCode;
    }
    public String getDatasetName() {
        return datasetName;
    }
    public void setDatasetName(String datasetName) {
        this.datasetName = datasetName;
    }
}

+ 53 - 0
hos-admin/src/main/java/com/yihu/hos/crawler/model/flow/CrawlerFlowHeadModel.java

@ -0,0 +1,53 @@
package com.yihu.hos.crawler.model.flow;
import com.yihu.ehr.framework.model.Result;
import java.io.Serializable;
/**
 * 标准基本信息
 *
 * @created Airhead 2015/12/23.
 */
public class CrawlerFlowHeadModel extends Result implements Serializable {
    private Integer schemeVersionId;
    private String datasetCode;
    private String metadataCode;
    private String identityCode;
    public Integer getSchemeVersionId() {
        return schemeVersionId;
    }
    public void setSchemeVersionId(Integer schemeVersionId) {
        this.schemeVersionId = schemeVersionId;
    }
    public String getDatasetCode() {
        return datasetCode;
    }
    public void setDatasetCode(String datasetCode) {
        this.datasetCode = datasetCode;
    }
    public String getMetadataCode() {
        return metadataCode;
    }
    public void setMetadataCode(String metadataCode) {
        this.metadataCode = metadataCode;
    }
    public String getIdentityCode() {
        return identityCode;
    }
    public void setIdentityCode(String identityCode) {
        this.identityCode = identityCode;
    }
}

+ 93 - 0
hos-admin/src/main/java/com/yihu/hos/crawler/model/flow/CrawlerFlowModel.java

@ -0,0 +1,93 @@
package com.yihu.hos.crawler.model.flow;
import com.yihu.ehr.framework.model.Result;
import java.io.Serializable;
/**
 * 标准基本信息
 *
 * @created Airhead 2015/12/23.
 */
public class CrawlerFlowModel extends Result implements Serializable {
    private Integer id;
    private Integer schemeVersionId;
    private String datasetCode;
    private String metadataCode;
    private String inputDatasetCode;
    private String inputMetadataCode;
    private String inputMetadataCode2;
    private String inputDefaultValue;
    public Integer getId() {
        return id;
    }
    public void setId(Integer id) {
        this.id = id;
    }
    public Integer getSchemeVersionId() {
        return schemeVersionId;
    }
    public void setSchemeVersionId(Integer schemeVersionId) {
        this.schemeVersionId = schemeVersionId;
    }
    public String getDatasetCode() {
        return datasetCode;
    }
    public void setDatasetCode(String datasetCode) {
        this.datasetCode = datasetCode;
    }
    public String getMetadataCode() {
        return metadataCode;
    }
    public void setMetadataCode(String metadataCode) {
        this.metadataCode = metadataCode;
    }
    public String getInputDatasetCode() {
        return inputDatasetCode;
    }
    public void setInputDatasetCode(String inputDatasetCode) {
        this.inputDatasetCode = inputDatasetCode;
    }
    public String getInputMetadataCode() {
        return inputMetadataCode;
    }
    public void setInputMetadataCode(String inputMetadataCode) {
        this.inputMetadataCode = inputMetadataCode;
    }
    public String getInputMetadataCode2() {
        return inputMetadataCode2;
    }
    public void setInputMetadataCode2(String inputMetadataCode2) {
        this.inputMetadataCode2 = inputMetadataCode2;
    }
    public String getInputDefaultValue() {
        return inputDefaultValue;
    }
    public void setInputDefaultValue(String inputDefaultValue) {
        this.inputDefaultValue = inputDefaultValue;
    }
}

+ 21 - 0
hos-admin/src/main/java/com/yihu/hos/crawler/model/flow/resultModel/CrawlerDatasetResultDetailModel.java

@ -0,0 +1,21 @@
package com.yihu.hos.crawler.model.flow.resultModel;
import com.yihu.hos.crawler.model.flow.CrawlerDataSetModel;
/**
 * 任务编排数据集关联
 */
public class CrawlerDatasetResultDetailModel extends CrawlerDataSetModel implements java.io.Serializable {
	private String checked;
	public String getChecked() {
		return checked;
	}
	public void setChecked(String checked) {
		this.checked = checked;
	}
}

+ 21 - 0
hos-admin/src/main/java/com/yihu/hos/crawler/model/flow/resultModel/CrawlerDatasetResultModel.java

@ -0,0 +1,21 @@
package com.yihu.hos.crawler.model.flow.resultModel;
import com.yihu.hos.standard.model.adapter.AdapterSchemeVersionModel;
import java.io.Serializable;
/**
 * @author HZY
 * @vsrsion 1.0
 * Created at 2016/4/27.
 */
public class CrawlerDatasetResultModel extends AdapterSchemeVersionModel implements Serializable {
    private String dataSets;
    public String getDataSets() {
        return dataSets;
    }
    public void setDataSets(String dataSets) {
        this.dataSets = dataSets;
    }
}

+ 30 - 0
hos-admin/src/main/java/com/yihu/hos/crawler/model/flow/resultModel/FlowEntrance.java

@ -0,0 +1,30 @@
package com.yihu.hos.crawler.model.flow.resultModel;
import java.io.Serializable;
/**
 *  编排入口
 * @author HZY
 * @vsrsion 1.0
 * Created at 2016/4/27.
 */
public class FlowEntrance implements Serializable {
    private String dataSet;
    private String meta;
    public String getDataSet() {
        return dataSet;
    }
    public void setDataSet(String dataSet) {
        this.dataSet = dataSet;
    }
    public String getMeta() {
        return meta;
    }
    public void setMeta(String meta) {
        this.meta = meta;
    }
}

+ 48 - 0
hos-admin/src/main/java/com/yihu/hos/crawler/model/flow/resultModel/FlowLines.java

@ -0,0 +1,48 @@
package com.yihu.hos.crawler.model.flow.resultModel;
import java.io.Serializable;
/**
 *  编排映射关系
 * @author HZY
 * @vsrsion 1.0
 * Created at 2016/4/27.
 */
public class FlowLines implements Serializable {
    private String from;
    private String to;
    private String fromPort;
    private String toPort;
    public String getFrom() {
        return from;
    }
    public void setFrom(String from) {
        this.from = from;
    }
    public String getTo() {
        return to;
    }
    public void setTo(String to) {
        this.to = to;
    }
    public String getFromPort() {
        return fromPort;
    }
    public void setFromPort(String fromPort) {
        this.fromPort = fromPort;
    }
    public String getToPort() {
        return toPort;
    }
    public void setToPort(String toPort) {
        this.toPort = toPort;
    }
}

+ 41 - 0
hos-admin/src/main/java/com/yihu/hos/crawler/model/flow/resultModel/FlowMapping.java

@ -0,0 +1,41 @@
package com.yihu.hos.crawler.model.flow.resultModel;
import java.io.Serializable;
import java.util.LinkedList;
import java.util.List;
/**
 * 适配方案映射数据类
 * @author HZY
 * @vsrsion 1.0
 * Created at 2016/4/28.
 */
public class FlowMapping implements Serializable {
    private List<FlowEntrance> entrances;
    private List<FlowLines> lines;
    private Integer schemeVersionId;
    public List<FlowEntrance> getEntrances() {
        return entrances;
    }
    public void setEntrances(List<FlowEntrance> entrances) {
        this.entrances = entrances;
    }
    public List<FlowLines> getLines() {
        return lines;
    }
    public void setLines(List<FlowLines> lines) {
        this.lines = lines;
    }
    public Integer getSchemeVersionId() {
        return schemeVersionId;
    }
    public void setSchemeVersionId(Integer schemeVersionId) {
        this.schemeVersionId = schemeVersionId;
    }
}

+ 47 - 0
hos-admin/src/main/java/com/yihu/hos/crawler/model/flow/resultModel/MappingDataset.java

@ -0,0 +1,47 @@
package com.yihu.hos.crawler.model.flow.resultModel;
import java.util.List;
/**
 * @author HZY
 * @vsrsion 1.0
 * Created at 2016/4/29.
 */
public class MappingDataset {
    private Integer id;
    private String code;
    private String name;
    private List<MappingMetadata> data;
    public List<MappingMetadata> getData() {
        return data;
    }
    public void setData(List<MappingMetadata> data) {
        this.data = data;
    }
    public Integer getId() {
        return id;
    }
    public void setId(Integer id) {
        this.id = id;
    }
    public String getCode() {
        return code;
    }
    public void setCode(String code) {
        this.code = code;
    }
    public String getName() {
        return name;
    }
    public void setName(String name) {
        this.name = name;
    }
}

+ 36 - 0
hos-admin/src/main/java/com/yihu/hos/crawler/model/flow/resultModel/MappingMetadata.java

@ -0,0 +1,36 @@
package com.yihu.hos.crawler.model.flow.resultModel;
/**
 * @author HZY
 * @vsrsion 1.0
 * Created at 2016/4/29.
 */
public class MappingMetadata {
    private Integer id;
    private String code;
    private String name;
    public Integer getId() {
        return id;
    }
    public void setId(Integer id) {
        this.id = id;
    }
    public String getCode() {
        return code;
    }
    public void setCode(String code) {
        this.code = code;
    }
    public String getName() {
        return name;
    }
    public void setName(String name) {
        this.name = name;
    }
}

+ 103 - 0
hos-admin/src/main/java/com/yihu/hos/crawler/model/patient/Patient.java

@ -0,0 +1,103 @@
package com.yihu.hos.crawler.model.patient;
/**
 * @author Air
 * @version 1.0
 * @created 2015.08.14 10:12
 */
public class Patient {
    private String patientId;
    private String eventNo;
    private String localCardNo;
    private String idCard;
    private String orgCode;
    private String latestTime;
    private String referenceTime;
    private String reUploadFlg;
    private String jobTimeStamp;
    private String failType; //采集失败类型
    public Patient() {
    }
    public String getPatientId() {
        return patientId;
    }
    public void setPatientId(String patientId) {
        this.patientId = patientId;
    }
    public String getEventNo() {
        return eventNo;
    }
    public void setEventNo(String eventNo) {
        this.eventNo = eventNo;
    }
    public String getLocalCardNo() {
        return localCardNo;
    }
    public void setLocalCardNo(String localCardNo) {
        this.localCardNo = localCardNo;
    }
    public String getIdCard() {
        return idCard;
    }
    public void setIdCard(String idCard) {
        this.idCard = idCard;
    }
    public String getOrgCode() {
        return orgCode;
    }
    public void setOrgCode(String orgCode) {
        this.orgCode = orgCode;
    }
    public String getLatestTime() {
        return latestTime;
    }
    public void setLatestTime(String latestTime) {
        this.latestTime = latestTime;
    }
    public String getReferenceTime() {
        return referenceTime;
    }
    public void setReferenceTime(String referenceTime) {
        this.referenceTime = referenceTime;
    }
    public String getJobTimeStamp() {
        return jobTimeStamp;
    }
    public void setJobTimeStamp(String jobTimeStamp) {
        this.jobTimeStamp = jobTimeStamp;
    }
    public String getReUploadFlg() {
        return reUploadFlg;
    }
    public void setReUploadFlg(String reUploadFlg) {
        this.reUploadFlg = reUploadFlg;
    }
    public String getFailType() {
        return failType;
    }
    public void setFailType(String failType) {
        this.failType = failType;
    }
}

+ 59 - 0
hos-admin/src/main/java/com/yihu/hos/crawler/model/patient/PatientIdentity.java

@ -0,0 +1,59 @@
package com.yihu.hos.crawler.model.patient;
import java.util.HashSet;
import java.util.Set;
/**
 * 病人身份标识
 * 病人ID, 事件号(门诊号,住院号等),卡号,身份证号
 * 目前使用的是病人ID和事件号
 *
 * @author Air
 * @version 1.0
 * @created 2015.06.16 16:29
 */
public class PatientIdentity {
    public static final String PATIENT_ID = "PATIENT_ID";
    public static final String LOCAL_CARD_NO = "CARD_NO";
    public static final String ID_CARD = "DE02_01_030_00";
    public static Set<String> EVENT_NO_SET = new HashSet<>();
    private String eventNoCode;
    private String refTimeCode;
    public PatientIdentity(String eventNoCode, String refTimeCode) {
        this.eventNoCode = eventNoCode;
        this.refTimeCode = refTimeCode;
        EVENT_NO_SET.add(eventNoCode);
    }
    public static String getPatientIDCode() {
        return PATIENT_ID;
    }
    public static String getLocalCardNoCode() {
        return LOCAL_CARD_NO;
    }
    public static String getIdCardCode() {
        return ID_CARD;
    }
    public static Set<String> getEventNoSet() {
        return EVENT_NO_SET;
    }
    public static void addEventNoCode(String eventNoCode) {
        EVENT_NO_SET.add(eventNoCode);
    }
    public String getEventNoCode() {
        return eventNoCode;
    }
    public String getRefTimeCode() {
        return refTimeCode;
    }
}

+ 110 - 0
hos-admin/src/main/java/com/yihu/hos/crawler/model/patient/PatientIndex.java

@ -0,0 +1,110 @@
package com.yihu.hos.crawler.model.patient;
/**
 * 病人摘要标识
 *
 *
 * @author HXY
 * @version 1.0
 * @created 2016.03.01 13:50
 */
public class PatientIndex {
    public static final String PATIENT_ID = "PATIENT_ID";
    private String eventNoCode;
    private String refTimeCode;
//    private String organization;
    private String officeCode;
    private String officeName;
    private String leaveTime;
    private String diagDataSet;
    private String diagCode;
    private String diagName;
    private String diagType;
    public PatientIndex() {
    }
    public String getDiagType() {
        return diagType;
    }
    public void setDiagType(String diagType) {
        this.diagType = diagType;
    }
    public static String getPatientId() {
        return PATIENT_ID;
    }
    public String getEventNoCode() {
        return eventNoCode;
    }
    public void setEventNoCode(String eventNoCode) {
        this.eventNoCode = eventNoCode;
    }
    public String getRefTimeCode() {
        return refTimeCode;
    }
    public void setRefTimeCode(String refTimeCode) {
        this.refTimeCode = refTimeCode;
    }
//    public String getOrganization() {
//        return organization;
//    }
//
//    public void setOrganization(String organization) {
//        this.organization = organization;
//    }
    public String getOfficeCode() {
        return officeCode;
    }
    public void setOfficeCode(String officeCode) {
        this.officeCode = officeCode;
    }
    public String getOfficeName() {
        return officeName;
    }
    public void setOfficeName(String officeName) {
        this.officeName = officeName;
    }
    public String getLeaveTime() {
        return leaveTime;
    }
    public void setLeaveTime(String leaveTime) {
        this.leaveTime = leaveTime;
    }
    public String getDiagDataSet() {
        return diagDataSet;
    }
    public void setDiagDataSet(String diagDataSet) {
        this.diagDataSet = diagDataSet;
    }
    public String getDiagCode() {
        return diagCode;
    }
    public void setDiagCode(String diagCode) {
        this.diagCode = diagCode;
    }
    public String getDiagName() {
        return diagName;
    }
    public void setDiagName(String diagName) {
        this.diagName = diagName;
    }
}

+ 41 - 0
hos-admin/src/main/java/com/yihu/hos/crawler/model/standard/StdDict.java

@ -0,0 +1,41 @@
package com.yihu.hos.crawler.model.standard;
import com.yihu.hos.crawler.format.AdapterBase;
import com.yihu.hos.standard.model.standard.StdDictionaryEntryModel;
import com.yihu.hos.standard.model.standard.StdDictionaryModel;
/**
 * @author Air
 * @version 1.0
 * @created 2015.08.11 15:25
 */
public class StdDict extends AdapterBase {
    private StdDictionaryModel stdDictT;
    public StdDict(StdDictionaryModel stdDictT) {
        this.stdDictT = stdDictT;
    }
    public StdDictionaryModel getStdDictT() {
        return stdDictT;
    }
    public String toValue(String code) {
        StdDictionaryEntryModel entryModel=getStdDictEntryService().getStEntryValueByCode(adapterVersion.getVersion(), stdDictT.getId(), code);
        if (entryModel==null){
            return null;
        }else {
            return entryModel.getValue();
        }
    }
    public String toCode(String value) {
        StdDictionaryEntryModel entryModel=getStdDictEntryService().getStEntryValueByCode(adapterVersion.getVersion(), stdDictT.getId(), value);
        if (entryModel==null){
            return null;
        }else {
            return entryModel.getCode();
        }
    }
}

+ 37 - 0
hos-admin/src/main/java/com/yihu/hos/crawler/model/standard/StdMetaData.java

@ -0,0 +1,37 @@
package com.yihu.hos.crawler.model.standard;
import com.yihu.hos.crawler.format.AdapterBase;
import com.yihu.hos.crawler.model.transform.DictDataType;
import com.yihu.hos.standard.model.standard.StdMetaDataModel;
import static com.yihu.hos.crawler.model.transform.MetaDataType.S2;
import static com.yihu.hos.crawler.model.transform.MetaDataType.S3;
/**
 * @author Air
 * @version 1.0
 * @created 2015.08.11 15:25
 */
public class StdMetaData extends AdapterBase {
    private StdMetaDataModel stdMetaDataT;
    public StdMetaData(StdMetaDataModel stdMetaDataT) {
        this.stdMetaDataT = stdMetaDataT;
    }
    public StdMetaDataModel getStdMetaDataT() {
        return stdMetaDataT;
    }
    public DictDataType getDictDataType() {
        Integer dict = stdMetaDataT.getDictId();
        String type = stdMetaDataT.getType();
        int dictValueType;
        if (dict!=null && (type.equals(S2.name()) || type.equals(S3.name()))) {
            dictValueType = DictDataType.CODE.ordinal();
        } else {
            dictValueType = DictDataType.VALUE.ordinal();
        }
        return DictDataType.values()[dictValueType];
    }
}

+ 9 - 0
hos-admin/src/main/java/com/yihu/hos/crawler/model/transform/DictDataType.java

@ -0,0 +1,9 @@
package com.yihu.hos.crawler.model.transform;
/**
 * Created by Administrator on 2015/9/16.
 */
public enum DictDataType {
    VALUE, //值
    CODE //编码
}

+ 65 - 0
hos-admin/src/main/java/com/yihu/hos/crawler/model/transform/EhrCondition.java

@ -0,0 +1,65 @@
package com.yihu.hos.crawler.model.transform;
/**
 * 集成平台采集条件基类
 * Created by HZY on 2016/2/25.
 */
public class EhrCondition {
    private String andOr;
    private String field;
    private String condition;
    private String value;
    public EhrCondition(String condition, String field, String value){
        this.andOr=" AND ";
        this.field=field;
        this.condition=condition;
        this.value=value;
    }
    public void andQuery(String condition,String field,String value){
        this.andOr=" AND ";
        this.field=field;
        this.condition=condition;
        this.value=value;
    }
    public void orQuery(String condition,String field,String value){
        this.andOr=" OR ";
        this.field=field;
        this.condition=condition;
        this.value=value;
    }
    public String getAndOr() {
        return andOr;
    }
    public void setAndOr(String andOr) {
        this.andOr = andOr;
    }
    public String getField() {
        return field;
    }
    public void setField(String field) {
        this.field = field;
    }
    public String getCondition() {
        return condition;
    }
    public void setCondition(String condition) {
        this.condition = condition;
    }
    public String getValue() {
        return value;
    }
    public void setValue(String value) {
        this.value = value;
    }
}

+ 15 - 0
hos-admin/src/main/java/com/yihu/hos/crawler/model/transform/IVerifier.java

@ -0,0 +1,15 @@
package com.yihu.hos.crawler.model.transform;
/**
 * @author Airhead
 * @version 1.0
 * @created 22-5-2015 11:24:26
 */
public interface IVerifier {
    /**
     */
    boolean check();
    String getErrorInfo();
}

+ 12 - 0
hos-admin/src/main/java/com/yihu/hos/crawler/model/transform/LogicValues.java

@ -0,0 +1,12 @@
package com.yihu.hos.crawler.model.transform;
/**
 * Created by HZY on 2016/1/29.
 */
public class LogicValues {
    public static final String LOGIC_OK = "ok";
    public static final String LOGIC_NO = "no";
    public static final String LOGIC_TRUE = "true";
    public static final String LOGIC_FALSE = "false";
}

+ 45 - 0
hos-admin/src/main/java/com/yihu/hos/crawler/model/transform/MetaDataType.java

@ -0,0 +1,45 @@
package com.yihu.hos.crawler.model.transform;
/**
 * 数据元类型
 *
 * @created Created by Air on 2015/6/9.
 */
public enum MetaDataType {
    /**
     * 字符,不可枚举
     */
    S1,
    /**
     * 字符,枚举<=3个
     */
    S2,
    /**
     * 字符,代码表
     */
    S3,
    /**
     * 布尔,0(False),1(True)
     */
    L,
    /**
     * 数值,
     */
    N,
    /**
     * 日期
     */
    D,
    /**
     * 时间日期
     */
    DT,
    /**
     * 时间
     */
    T,
    /**
     * 二进制
     */
    BY
}

+ 37 - 0
hos-admin/src/main/java/com/yihu/hos/crawler/model/transform/MetaDataVerify.java

@ -0,0 +1,37 @@
package com.yihu.hos.crawler.model.transform;
import com.yihu.hos.crawler.model.standard.StdMetaData;
/**
 * @author Airhead
 * @version 1.0
 * @created 22-5-2015 11:24:25
 */
public class MetaDataVerify implements IVerifier {
    private StdMetaData stdMetaData;
    private String value;
    private String errorInfo;
    public MetaDataVerify(StdMetaData stdMetaData, String value) {
        this.stdMetaData = stdMetaData;
        this.value = value;
    }
    public void finalize() throws Throwable {
    }
    /**
     * 目前不做任何数据校验
     * 必要时根据StdMetaData的type, format内容做校验
     */
    public boolean check() {
        return true;
    }
    @Override
    public String getErrorInfo() {
        return errorInfo;
    }
}//end MetaDataVerify

+ 21 - 0
hos-admin/src/main/java/com/yihu/hos/crawler/model/transform/TransformType.java

@ -0,0 +1,21 @@
package com.yihu.hos.crawler.model.transform;
/**
 * @created  Air on 2015/6/8.
 */
public enum TransformType {
    /**
     * 数据集
     */
    DATA_SET_JSON,
    DATA_SET_XML,
    /**
     * CDA
     */
    CDA_JSON,
    CDA_XML,
    /**
     * 非结构化
     */
    DOCUMENT,
}

+ 235 - 0
hos-admin/src/main/java/com/yihu/hos/crawler/origin/FileSystemOrigin.java

@ -0,0 +1,235 @@
package com.yihu.hos.crawler.origin;
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.node.ObjectNode;
import com.yihu.hos.common.Services;
import com.yihu.hos.crawler.format.AdapterScheme;
import com.yihu.hos.crawler.model.adapter.AdapterDataSet;
import com.yihu.hos.crawler.model.patient.Patient;
import com.yihu.hos.crawler.model.transform.LogicValues;
import com.yihu.hos.crawler.service.EsbHttp;
import com.yihu.ehr.framework.util.file.FtpFileUtil;
import com.yihu.ehr.framework.util.operator.DateUtil;
import com.yihu.ehr.framework.util.operator.StringUtil;
import com.yihu.ehr.framework.util.springutil.SpringBeanUtil;
import com.yihu.hos.system.model.SystemDatasource;
import com.yihu.hos.system.model.SystemOrganization;
import com.yihu.hos.system.service.OrganizationManager;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import java.io.IOException;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.Date;
import java.util.List;
import java.util.Map;
/**
 * �ݻ�
 *
 * @author Airhead
 * @version 1.0
 * @created 22-5��-2015 11:24:24
 */
public class FileSystemOrigin implements IDataOrigin {
    private static Logger logger = LogManager.getLogger(FileSystemOrigin.class);
    public static String dirHear = "/home/test/patient/";        //病人数据文件根目录
    public static String fileType = "/image/";                    //采集的文件类型文件夹
    protected AdapterScheme adapterScheme;
    public FileSystemOrigin(AdapterScheme adapterScheme) {
        this.adapterScheme=adapterScheme;
    }
    /**
     * ftp采集数据
     * 非结构化档案中,key_words格式暂定为:数据集-数据元,生成文件上传时再转成:数据集.数据源(主要因为mongodb的key不支持特殊符号"."
     * @param patient         病人ID
     * @param orgAgencyOrigin 数据源
     * @param adapterDataSet  适配数据集   @return
     */
    @Override
    public String fecthData(Patient patient, SystemDatasource orgAgencyOrigin, AdapterDataSet adapterDataSet) {
        try {
            String data = null;
            String innerVersion= EsbHttp.getRemoteVersion(patient.getOrgCode());
            List<String> datas = null;
            String agencyCode = patient.getOrgCode();
            String ftpConfig = orgAgencyOrigin.getConfig();
            FtpFileUtil ftp = genFtpUtil(ftpConfig);
            String filePath = "";//远程ftp文件路径
            ObjectNode jsonObject = null;
            boolean patientId = true;
            boolean eventNo = true;
            if (patient.getPatientId() != null && !"".equals(patient.getPatientId())) {
                if (patient.getEventNo() != null && !"".equals(patient.getEventNo())) {
                    //文件路径
                    filePath = dirHear + agencyCode + "/" + patient.getPatientId() + "/" + patient.getEventNo() +  fileType;
                } else {
                    eventNo = false;
                }
            } else {
                patientId = false;
            }
            if (!patientId || !eventNo) {
                throw new Exception("采集病人数据集必须要有病人ID,事件号,数据集名:" + adapterDataSet.getAdapterDataSetT().getStdDatasetCode());
            }
            datas = ftp.readFileData(filePath);
            if (datas != null && datas.size() > 0) {
                data = datas.get(0);
            }
            //TODO "data"内容实现,主要包括key_words和content,
            //json生成
            jsonObject.put("patient_id", patient.getPatientId());
            jsonObject.put("event_no", patient.getEventNo());
            jsonObject.put("org_code", agencyCode);
            jsonObject.put("inner_version", innerVersion);
            jsonObject.put("create_date", DateUtil.toString(new Date(), DateUtil.DEFAULT_YMDHMSDATE_FORMAT));
            jsonObject.put("event_time", patient.getReferenceTime());//DateUtil.toString(patient.getReferenceTime(), DateUtil.DEFAULT_YMDHMSDATE_FORMAT)
            if (StringUtil.isEmpty(patient.getReUploadFlg())) {
                jsonObject.put("reUploadFlg", LogicValues.LOGIC_FALSE);
            } else {
                jsonObject.put("reUploadFlg", patient.getReUploadFlg());
            }
            return jsonObject.toString();
        } catch (SQLException e) {
//            e.printStackTrace();
            logger.error("", e);
        } catch (Exception e) {
//            e.printStackTrace();
            logger.error("", e);
        }
        return null;
    }
    /**
     * 获取病人列表
     *
     * @param orgAgencyOrigin 数据源
     * @param adapterDataSet  适配数据集
     * @param condition       查询条件
     * @return 病人集合
     */
    @Override
    public List<Patient> getPatientList(SystemDatasource orgAgencyOrigin, AdapterDataSet adapterDataSet, Map<String, Object> condition) {
        ArrayList<Patient> patientList = new ArrayList<>();
        try {
            String ftpConfig = orgAgencyOrigin.getConfig();
            FtpFileUtil ftp = genFtpUtil(ftpConfig);
//			StdDataSet stdDataSet = adapterDataSet.getStdDataSet();
            OrganizationManager organizationManager= SpringBeanUtil.getService(Services.Organization);
            SystemOrganization orgAgency =organizationManager.getOrgById(orgAgencyOrigin.getOrgId());
            String agencyCode =orgAgency.getCode();
            List<Map<String, String>> patientMaps = ftp.getPatientList(dirHear, agencyCode);
            if (patientMaps != null && patientMaps.size() > 0) {
                for (Map<String, String> patientMap : patientMaps) {
                    Patient patient = new Patient();
                    String patientId = patientMap.get("patient_id");
                    String eventNo = patientMap.get("event_no");
                    if (orgAgency == null) {
                        logger.error("获取病人列表错误,无法获取机构代码.");
                        continue;
                    }
                    patient.setPatientId(patientId);
                    patient.setEventNo(eventNo);
                    patient.setReferenceTime(DateUtil.toString(new Date(), DateUtil.DEFAULT_YMDHMSDATE_FORMAT));//暂设置为当前时间
                    patient.setOrgCode(orgAgency.getCode());
                    patientList.add(patient);
                }
            }
        } catch (Exception e) {
//            e.printStackTrace();
            logger.error("", e);
        }
        return patientList;
    }
    /**
     * 清除ftp数据
     *
     * @param patient
     * @param orgAgencyOrigin
     * @param adapterDataSet  @return
     */
    @Override
    public boolean clearData(Patient patient, SystemDatasource orgAgencyOrigin, AdapterDataSet adapterDataSet) {
        try {
            boolean clear = false;
            String agencyCode = patient.getOrgCode();
            String ftpConfig = orgAgencyOrigin.getConfig();
            FtpFileUtil ftp = genFtpUtil(ftpConfig);
            String filePath = "";//远程ftp文件路径
            boolean patientId = true;
            boolean eventNo = true;
            if (patient.getPatientId() != null && !"".equals(patient.getPatientId())) {
                if (patient.getEventNo() != null && !"".equals(patient.getEventNo())) {
                    //文件路径
                    filePath = dirHear + agencyCode + "/" + patient.getPatientId() + "/" + patient.getEventNo()  + fileType;
                } else {
                    eventNo = false;
                }
            } else {
                patientId = false;
            }
            if (!patientId || !eventNo) {
                throw new Exception("清除病人数据集必须要有病人ID,事件号,数据集名:" + adapterDataSet.getAdapterDataSetT().getStdDatasetCode());
            }
            ftp.connect();
            clear = ftp.removeData(filePath);
            ftp.closeConnect();
            return clear;
        } catch (SQLException e) {
            e.printStackTrace();
            logger.error("", e);
        } catch (Exception e) {
            e.printStackTrace();
            logger.error("", e);
        }
        return false;
    }
    @Override
    public Date getServerDateTime(SystemDatasource orgAgencyOrigin) {
        return null;
    }
    public void finalize() throws Throwable {
    }
    public FtpFileUtil genFtpUtil(String ftpConfig) {
        ObjectMapper mapper = new ObjectMapper();
        FtpFileUtil ftpUtil = null;
        JsonNode rootNode = null;
        try {
            rootNode = mapper.readValue(ftpConfig, JsonNode.class);
            String username = rootNode.path("username").asText();
            String password = rootNode.path("password").asText();
            String host = rootNode.path("host").asText();
            int port = rootNode.path("port").asInt();
            ftpUtil = new FtpFileUtil(username, password, host, port);
        } catch (IOException e) {
            logger.error("获取Ftp服务器配置失败", e);
            e.printStackTrace();
        }
        return ftpUtil;
    }
}//end FileSystemOrigin

+ 47 - 0
hos-admin/src/main/java/com/yihu/hos/crawler/origin/IDataOrigin.java

@ -0,0 +1,47 @@
package com.yihu.hos.crawler.origin;
import com.yihu.hos.crawler.model.adapter.AdapterDataSet;
import com.yihu.hos.crawler.model.patient.Patient;
import com.yihu.hos.system.model.SystemDatasource;
import java.util.Date;
import java.util.List;
import java.util.Map;
/**
 * @author Airhead
 * @version 1.0
 * @created 22-5-2015 11:24:26
 */
public interface IDataOrigin {
    String fecthData(Patient patient, SystemDatasource orgAgencyOrigin, AdapterDataSet adapterDataSet);
    List<Patient> getPatientList(SystemDatasource orgAgencyOrigin, AdapterDataSet adapterDataSet, Map<String, Object> condition);
    boolean clearData(Patient patient, SystemDatasource orgAgencyOrigin, AdapterDataSet adapterDataSet);
    Date getServerDateTime(SystemDatasource orgAgencyOrigin);
    enum OriginType {
        /**
         * 数据库
         */
        DB,
        /**
         * RESTful Web Service
         */
        REST,
        /**
         * FileSystem
         */
        FS,
        /**
         * SOAP Web Service
         */
        SOAP
    }
}

+ 323 - 0
hos-admin/src/main/java/com/yihu/hos/crawler/service/CrawlerFlowManager.java

@ -0,0 +1,323 @@
package com.yihu.hos.crawler.service;
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.yihu.hos.crawler.dao.CrawlerDatasetDao;
import com.yihu.hos.crawler.dao.CrawlerFlowDao;
import com.yihu.hos.crawler.dao.CrawlerFlowHeadDao;
import com.yihu.hos.crawler.model.adapter.AdapterDataSet;
import com.yihu.hos.crawler.model.config.SysConfig;
import com.yihu.hos.crawler.model.flow.CrawlerDataSetModel;
import com.yihu.hos.crawler.model.flow.CrawlerFlowHeadModel;
import com.yihu.hos.crawler.model.flow.CrawlerFlowModel;
import com.yihu.hos.crawler.model.patient.Patient;
import com.yihu.ehr.framework.model.DictItem;
import com.yihu.ehr.framework.util.operator.CollectionUtil;
import com.yihu.ehr.framework.util.operator.StringUtil;
import com.yihu.ehr.framework.util.springutil.SpringBeanUtil;
import com.yihu.hos.standard.model.adapter.AdapterDatasetModel;
import com.yihu.hos.standard.model.adapter.AdapterDictEntryModel;
import com.yihu.hos.standard.model.adapter.AdapterSchemeVersionModel;
import com.yihu.hos.standard.service.adapter.AdapterDatasetService;
import com.yihu.hos.standard.service.adapter.AdapterSchemeVersionService;
import com.yihu.hos.standard.service.bo.AdapterVersion;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import java.util.*;
public class CrawlerFlowManager {
    private static Logger logger = LogManager.getLogger(CrawlerFlowManager.class);
    private static DataCollectDispatcher dispatch = DataCollectDispatcher.getInstance();
    private static SysConfig sysConfig = SysConfig.getInstance();
    private Map<String, AdapterDataSet> adapterDataSetMap;
    private List<CrawlerFlowHeadModel> crawlerFlowHeadModelList;
    private Map<String, List<CrawlerFlowModel>> crawlerFlowDatasetMap;
    private Map<String, List<CrawlerFlowModel>> crawlerFlowMetadataMap;
    private Boolean adapterFlg = false;
    private List<DictItem> datasetList;
    private String schemeVersion;
    public CrawlerFlowManager(List datasetList, String schemeVersion) {
        this.datasetList = datasetList;
        this.schemeVersion = schemeVersion;
    }
    public CrawlerFlowManager() {
    }
    public void finalize() throws Throwable {
    }
    public String dataCrawler(Map<String, Object> condition) {
        Integer count = 0;
        Integer totalCount = 0;
        String message;
        /**
         * 适配基本数据准备
         */
        if (!getDataForPrepare()) {
            message = "适配数据尚未准备";
            logger.error(message);
            return message;
        }
        List<Patient> patientList = dispatch.getPatientList(condition, adapterDataSetMap);
        if (!CollectionUtil.isEmpty(patientList)) {
            totalCount = patientList.size();
            for (Patient patient : patientList) {
                boolean result = collectProcess(patient);
                if (result) {
                    count++;
                }
            }
        }
        message = "本次采集病人共" + totalCount + "条,成功采集信息" + count + "条";
        return message;
    }
    public boolean collectProcess(Patient patient) {
        if (!getDataForPrepare()) {
            logger.error("适配数据尚未准备完毕");
            return false;
        }
        patient.setReUploadFlg(StringUtil.toString(false));
        logger.trace("采集->注册->打包上传,任务ID:,patient_id:" + patient.getPatientId() + ", event_no:" + patient.getEventNo());
        try {
            /**
             * 获取token
             */
            if (!dispatch.getToken()) {
                logger.error("token获取失败");
                return false;
            }
            /**
             * 获取远程版本
             */
//            if (!dispatch.getRemoteVersion(patient.getOrgCode())) {
//                logger.error("远程版本获取失败");
//                return false;
//            }
            /**
             * 获取版本
             */
            if (StringUtil.isEmpty(sysConfig.getVersionMap().get(patient.getOrgCode()))) {
                logger.error("版本获取失败");
                return false;
            }
            Map<String, JsonNode> dataMap = new HashMap<>();
            for (CrawlerFlowHeadModel crawlerFlowHeadModel : crawlerFlowHeadModelList) {
                /**
                 * 采集信息
                 */
                String datasetCode = crawlerFlowHeadModel.getDatasetCode();
                AdapterDataSet adapterDataSet = adapterDataSetMap.get(datasetCode);
                JsonNode data = dispatch.fecthData(patient, adapterDataSet);
                if (StringUtil.isEmpty(data)) {
                    continue;
                }
                dataMap.put(datasetCode, data);
                /**
                 * 根据采集流程递归查询
                 */
                getDataByCrawlerFlow(datasetCode, patient, dataMap);
                if (sysConfig.getRegisterDataSet().equals(adapterDataSet.getAdapterDataSetT().getStdDatasetCode())) {
                    if (!StringUtil.isEmpty(data.get("data")) && !StringUtil.isEmpty(data.get("data").get(0))) {
                        if (!StringUtil.isEmpty(data.get("data").get(0).get(SysConfig.getInstance().getRegisterIdCardNo()))) {
                            logger.info("注册病人");
                            dispatch.register(patient, data.toString());
                        }
                    }
                }
            }
            logger.info("采集病人成功,patient_id:" + patient.getPatientId() + ", event_no:" + patient.getEventNo());
            /**
             * 上传档案
             */
            try {
                if (!CollectionUtil.isEmpty(dataMap.keySet())) {
                    if (!dispatch.upload(dataMap, patient, adapterDataSetMap)) {
                        logger.error("上传档案失败");
                        return false;
                    }
                }
            } catch (Exception e) {
                logger.error("档案上传失败,patient_id:" + patient.getPatientId() + ", event_no:" + patient.getEventNo());
                return false;
            }
        } catch (Exception e) {
            logger.error("采集病人失败,patient_id:" + patient.getPatientId() + ", event_no:" + patient.getEventNo(), e);
            return false;
        }
        return true;
    }
    public Boolean getDataByCrawlerFlow(String preDatasetCode, Patient patient, Map<String, JsonNode> dataMap) {
        try {
            JsonNode preData = dataMap.get(preDatasetCode);
            Map<String, String> relationValueMap = new HashMap<>();
            List<CrawlerFlowModel> crawlerFlowDatasetList = crawlerFlowDatasetMap.get(preDatasetCode);
            for (CrawlerFlowModel crawlerFlowDataset : crawlerFlowDatasetList) {
                List<CrawlerFlowModel> crawlerFlowMetadataList = crawlerFlowMetadataMap.get(crawlerFlowDataset.getDatasetCode());
                for (CrawlerFlowModel crawlerFlowMetadata : crawlerFlowMetadataList) {
                    String metadataCode = crawlerFlowMetadata.getMetadataCode();
                    metadataCode = StringUtil.substring(metadataCode, metadataCode.indexOf("-") + 1, metadataCode.length());
                    String inputMetadataCode = crawlerFlowMetadata.getInputMetadataCode();
                    inputMetadataCode = StringUtil.substring(inputMetadataCode, inputMetadataCode.indexOf("-") + 1, inputMetadataCode.length());
                    Iterator<JsonNode> array = preData.get("data").iterator();
                    while (array.hasNext()) {
                        JsonNode dataNode = array.next();
                        relationValueMap.put(metadataCode, dataNode.get(inputMetadataCode).asText());
                    }
                }
                String datasetCode = crawlerFlowDataset.getDatasetCode();
                AdapterDataSet adapterDataSet = adapterDataSetMap.get(datasetCode);
                String data = dispatch.fecthData(patient, adapterDataSet, relationValueMap);
                if (StringUtil.isEmpty(data)) {
                    continue;
                } else {
                    ObjectMapper objectMapper = new ObjectMapper();
                    JsonNode jsonObject = objectMapper.readTree(data);
                    dataMap.put(datasetCode, jsonObject);
                }
                getDataByCrawlerFlow(datasetCode, patient, dataMap);
            }
            return true;
        } catch (Exception e) {
            return false;
        }
    }
    public Boolean getDataForPrepare() {
        if (adapterFlg) {
            return true;
        }
        logger.info("适配基本相关数据准备");
        try {
            adapterDataSetMap = new HashMap<>();
            AdapterVersion adapterVersion;
            List<Integer> datasetIdList = new ArrayList<>();
            /**
             * 推模式接口调用,默认只使用最新版本的适配
             */
            AdapterSchemeVersionService adapterSchemeVersionService = SpringBeanUtil.getService(AdapterSchemeVersionService.BEAN_ID);
            AdapterSchemeVersionModel adapterSchemeVersionModel;
            if (datasetList.isEmpty()) {
                adapterSchemeVersionModel = adapterSchemeVersionService.getEhrAdapterVersionLasted();
                if (adapterSchemeVersionModel == null) {
                    logger.error("获取最新ehr适配版本错误");
                    return false;
                } else {
                    this.schemeVersion = adapterSchemeVersionModel.getVersion();
                    adapterVersion = new AdapterVersion(schemeVersion);
                }
                /**
                 * 获取该版本下数据集
                 */
                CrawlerDatasetDao crawlerDatasetDao = SpringBeanUtil.getService(CrawlerDatasetDao.BEAN_ID);
                List<CrawlerDataSetModel> crawlerDataSetModelList = crawlerDatasetDao.getCrawlerDatasetList(adapterSchemeVersionModel.getId());
                if (CollectionUtil.isEmpty(crawlerDataSetModelList)) {
                    return false;
                }
                for (CrawlerDataSetModel crawlerDataSetModel : crawlerDataSetModelList) {
                    datasetIdList.add(crawlerDataSetModel.getDatasetId());
                }
            } else {
                /**
                 * 拉模式接口调用,由任务配置决定适配版本
                 */
                adapterSchemeVersionModel = adapterSchemeVersionService.getByVersion(schemeVersion);
                adapterVersion = new AdapterVersion(schemeVersion);
                for (DictItem dictItem : datasetList) {
                    datasetIdList.add(Integer.valueOf(dictItem.getCode()));
                }
            }
            AdapterDatasetService adapterDatasetService = SpringBeanUtil.getService(AdapterDatasetService.BEAN_ID);
            /**
             * 字典项初始化
             */
            List<AdapterDictEntryModel> adapterDictEntryModelList = adapterDatasetService.getList(AdapterDictEntryModel.class, adapterVersion.getDictEntryTableName(), null, null, null, null);
            Map<Integer, List<AdapterDictEntryModel>> adapterDictEntryModelMap = new HashMap<>();
            for (AdapterDictEntryModel adapterDictEntryModel : adapterDictEntryModelList) {
                List<AdapterDictEntryModel> entryModelList = adapterDictEntryModelMap.get(adapterDictEntryModel.getStdDictId());
                if (CollectionUtil.isEmpty(entryModelList)) {
                    entryModelList = new ArrayList<>();
                }
                entryModelList.add(adapterDictEntryModel);
                adapterDictEntryModelMap.put(adapterDictEntryModel.getStdDictId(), entryModelList);
            }
            /**
             * 数据集初始化
             */
            List<AdapterDatasetModel> adapterDataSetModelList = adapterDatasetService.getListByAdapterDatasetIdList(adapterVersion, datasetIdList);
            for (AdapterDatasetModel adapterDatasetModel : adapterDataSetModelList) {
                adapterDataSetMap.put(adapterDatasetModel.getStdDatasetCode(), new AdapterDataSet(adapterDatasetModel, adapterVersion, adapterDictEntryModelMap));
            }
            /**
             * 获取采集流程表头
             */
            CrawlerFlowHeadDao crawlerFlowHeadDao = SpringBeanUtil.getService(CrawlerFlowHeadDao.BEAN_ID);
            CrawlerFlowDao crawlerFlowDao = SpringBeanUtil.getService(CrawlerFlowDao.BEAN_ID);
            crawlerFlowHeadModelList = crawlerFlowHeadDao.getCrawlerFlowHeadList(adapterSchemeVersionModel.getId());
            List<CrawlerFlowModel> crawlerFlowModelList = crawlerFlowDao.getCrawlerFlowList(adapterSchemeVersionModel.getId());
            crawlerFlowDatasetMap = new HashMap<>();
            crawlerFlowMetadataMap = new HashMap<>();
            /**
             * 获取关联表
             */
            for (CrawlerFlowModel crawlerFlowModel : crawlerFlowModelList) {
                List<CrawlerFlowModel> datasetList = new ArrayList<>();
                List<CrawlerFlowModel> metadataList = new ArrayList<>();
                String inputDatasetCode = crawlerFlowModel.getInputDatasetCode();
                String datasetCode = crawlerFlowModel.getDatasetCode();
                if (StringUtil.isEmpty(inputDatasetCode)) {
                    continue;
                }
                if (crawlerFlowDatasetMap.containsKey(inputDatasetCode)) {
                    datasetList = crawlerFlowDatasetMap.get(inputDatasetCode);
                }
                datasetList.add(crawlerFlowModel);
                crawlerFlowDatasetMap.put(inputDatasetCode, datasetList);
                if (crawlerFlowMetadataMap.containsKey(datasetCode)) {
                    metadataList = crawlerFlowMetadataMap.get(datasetCode);
                }
                metadataList.add(crawlerFlowModel);
                crawlerFlowMetadataMap.put(datasetCode, metadataList);
            }
//            SysConfig.getInstance().setVersionMap(new HashMap<>());
            adapterFlg = true;
            return true;
        } catch (Exception e) {
            adapterFlg = false;
            return false;
        }
    }
    /**
     * 解析病人索引信息
     *
     * @param patientInfo 病人索引信息
     * @return
     */
    public Patient parsePatient(String patientInfo) {
        try {
            ObjectMapper mapper = new ObjectMapper();
            Patient patient = mapper.readValue(patientInfo, Patient.class);
            return patient;
        } catch (Exception e) {
            logger.error("patient参数错误:" + patientInfo, e);
            return null;
        }
    }
    public void setAdapterFlg(Boolean adapterFlg) {
        this.adapterFlg = adapterFlg;
    }
}

+ 243 - 0
hos-admin/src/main/java/com/yihu/hos/crawler/service/CrawlerManager.java

@ -0,0 +1,243 @@
package com.yihu.hos.crawler.service;
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.yihu.hos.crawler.model.adapter.AdapterDataSet;
import com.yihu.hos.crawler.model.config.SysConfig;
import com.yihu.hos.crawler.model.patient.Patient;
import com.yihu.ehr.framework.model.DictItem;
import com.yihu.ehr.framework.util.operator.CollectionUtil;
import com.yihu.ehr.framework.util.operator.StringUtil;
import com.yihu.ehr.framework.util.springutil.SpringBeanUtil;
import com.yihu.hos.standard.model.adapter.AdapterDatasetModel;
import com.yihu.hos.standard.model.adapter.AdapterDictEntryModel;
import com.yihu.hos.standard.model.adapter.AdapterSchemeVersionModel;
import com.yihu.hos.standard.service.adapter.AdapterDatasetService;
import com.yihu.hos.standard.service.adapter.AdapterSchemeVersionService;
import com.yihu.hos.standard.service.bo.AdapterVersion;
import net.sf.json.JSONObject;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
public class CrawlerManager {
    private static Logger logger = LogManager.getLogger(CrawlerManager.class);
    private static DataCollectDispatcher dispatch = DataCollectDispatcher.getInstance();
    private static SysConfig sysConfig=SysConfig.getInstance();
    private static Map<String, Map<String, AdapterDataSet>> adapterDataSetVersionMap = new HashMap<>();
    private Map<String, AdapterDataSet> adapterDataSetMap;
    private Boolean adapterFlg = false;
    private String schemeVersion;
    private List<DictItem> datasetList;
    public CrawlerManager(List datasetList, String schemeVersion) {
        this.datasetList = datasetList;
        this.schemeVersion = schemeVersion;
    }
    public CrawlerManager() {
    }
    public void finalize() throws Throwable {
    }
    public String dataCrawler(Map<String, Object> condition) {
        Integer count = 0;
        Integer totalCount = 0;
        String message;
        /**
         * 适配基本数据准备
         */
        if (!getDataForPrepare()) {
            message = "适配数据尚未准备";
            logger.error(message);
            return message;
        }
        List<Patient> patientList = dispatch.getPatientList(condition, adapterDataSetMap);
        if (!CollectionUtil.isEmpty(patientList)) {
            totalCount = patientList.size();
            for (Patient patient : patientList) {
                Boolean result = collectProcess(patient);
                if (result) {
                    count++;
                }
            }
        }
        message = "本次采集病人共" + totalCount + "条,成功采集信息"+ count + "条";
        return message;
    }
    //单个病人采集上传
    public Boolean collectProcess(Patient patient) {
        if (!getDataForPrepare()) {
            logger.error("适配数据尚未准备");
            return false;
        }
        patient.setReUploadFlg(StringUtil.toString(false));
        logger.trace("采集->注册->打包上传,patient_id:" + patient.getPatientId() + ", event_no:" + patient.getEventNo());
        try {
            //getToken
            if (!dispatch.getToken()) {
                logger.error("token获取失败");
                return false;
            }
            //getRemoteVersion
//            if (!dispatch.getRemoteVersion(patient.getOrgCode())) {
//                logger.error("远程版本获取失败");
//                return false;
//            }
            if(StringUtil.isEmpty(sysConfig.getVersionMap().get(patient.getOrgCode()))){
                logger.error("版本获取失败");
                return false;
            }
            Map<String, AdapterDataSet> dataSetMap = new HashMap<>();
            Map<String, JsonNode> dataMap = new HashMap<>();
            for (String key : adapterDataSetMap.keySet()) {
                /**
                 * 获取数据
                 */
                AdapterDataSet adapterDataSet = adapterDataSetMap.get(key);
                JsonNode jsonObject = dispatch.fecthData(patient, adapterDataSet);
                if (StringUtil.isEmpty(jsonObject)) {
                    continue;
                }
                dataSetMap.put(adapterDataSet.getAdapterDataSetT().getStdDatasetCode(), adapterDataSet);
                dataMap.put(key, jsonObject);
                /**
                 * 注册病人
                 */
                if (SysConfig.getInstance().getRegisterDataSet().equals(adapterDataSet.getAdapterDataSetT().getStdDatasetCode())) {
                    if (!StringUtil.isEmpty(jsonObject.get("data")) && !StringUtil.isEmpty(jsonObject.get("data").get(0))) {
                        if (!StringUtil.isEmpty(jsonObject.get("data").get(0).get(SysConfig.getInstance().getRegisterIdCardNo()))) {
                            logger.info("注册病人");
                            dispatch.register(patient, jsonObject.toString());
                        }
                    }
                }
            }
            logger.info("采集病人成功,patient_id:" + patient.getPatientId() + ", event_no:" + patient.getEventNo());
            //上传档案
            logger.info("上传病人档案");
            try {
                if (!CollectionUtil.isEmpty(dataMap.keySet())) {
                    if (!dispatch.upload(dataMap, patient, dataSetMap)) {
                        logger.error("上传档案失败");
                        return false;
                    }
                }
            } catch (Exception e) {
                logger.error("档案上传失败,patient_id:" + patient.getPatientId() + ", event_no:" + patient.getEventNo());
                return false;
            }
        } catch (Exception e) {
            logger.error("采集病人失败,patient_id:" + patient.getPatientId() + ", event_no:" + patient.getEventNo(), e);
            return false;
        }
        return true;
    }
    public Boolean getDataForPrepare() {
        if (adapterFlg) {
            return true;
        }
        logger.info("适配基本相关数据准备");
        try {
            adapterDataSetMap = new HashMap<>();
            AdapterVersion adapterVersion;
            AdapterDatasetService adapterDatasetService = SpringBeanUtil.getService(AdapterDatasetService.BEAN_ID);
            List<AdapterDatasetModel> adapterDataSetModelList = new ArrayList<>();
            if (!CollectionUtil.isEmpty(datasetList)) {
                /**
                 * 拉模式接口调用,由任务配置决定适配版本
                 */
                adapterVersion = new AdapterVersion(schemeVersion);
                List<Integer> datasetIdList = new ArrayList<>();
                for (DictItem dictItem : datasetList) {
                    datasetIdList.add(Integer.parseInt(dictItem.getCode()));
                }
                adapterDataSetModelList = adapterDatasetService.getAdapterDatasetByAdapterIdList(adapterVersion, datasetIdList);
            } else {
                /**
                 * 推模式接口调用,默认只使用最新版本的适配
                 */
                AdapterSchemeVersionService adapterSchemeVersionService = SpringBeanUtil.getService(AdapterSchemeVersionService.BEAN_ID);
                AdapterSchemeVersionModel adapterSchemeVersionModel = adapterSchemeVersionService.getEhrAdapterVersionLasted();
                if (adapterSchemeVersionModel == null) {
                    logger.error("获取最新ehr适配版本错误");
                    return false;
                } else {
                    this.schemeVersion = adapterSchemeVersionModel.getVersion();
                    adapterVersion = new AdapterVersion(schemeVersion);
                }
                if (adapterDataSetVersionMap.get(schemeVersion) != null) {
                    adapterDataSetMap = adapterDataSetVersionMap.get(schemeVersion);
                    adapterFlg = true;
                    return true;
                }
                Map<String, String> condition = new HashMap<>();
                condition.put("column", "adapter_dataset_code");
                JSONObject jsonpObject = JSONObject.fromObject(condition);
                adapterDataSetModelList = adapterDatasetService.getAdapterDatasetNotNullList(adapterVersion, jsonpObject.toString());
            }
            /**
             * 字典项初始化
             */
            List<AdapterDictEntryModel> adapterDictEntryModelList = adapterDatasetService.getList(AdapterDictEntryModel.class, adapterVersion.getDictEntryTableName(), null, null, null, null);
            Map<Integer, List<AdapterDictEntryModel>> adapterDictEntryModelMap = new HashMap<>();
            for (AdapterDictEntryModel adapterDictEntryModel : adapterDictEntryModelList) {
                List<AdapterDictEntryModel> entryModelList = adapterDictEntryModelMap.get(adapterDictEntryModel.getStdDictId());
                if (CollectionUtil.isEmpty(entryModelList)) {
                    entryModelList = new ArrayList<>();
                }
                entryModelList.add(adapterDictEntryModel);
                adapterDictEntryModelMap.put(adapterDictEntryModel.getStdDictId(), entryModelList);
            }
            /**
             * 数据集初始化
             */
            for (AdapterDatasetModel adapterDatasetModel : adapterDataSetModelList) {
                adapterDataSetMap.put(adapterDatasetModel.getStdDatasetCode(), new AdapterDataSet(adapterDatasetModel, adapterVersion, adapterDictEntryModelMap));
            }
            adapterDataSetVersionMap.put(schemeVersion, adapterDataSetMap);
            adapterFlg = true;
            return true;
        } catch (Exception e) {
            return false;
        }
    }
    /**
     * 解析病人索引信息
     *
     * @param patientInfo 病人索引信息
     * @return
     */
    public Patient parsePatient(String patientInfo) {
        try {
            ObjectMapper mapper = new ObjectMapper();
            Patient patient = mapper.readValue(patientInfo, Patient.class);
            return patient;
        } catch (Exception e) {
            logger.error("patient参数错误:" + patientInfo, e);
            return null;
        }
    }
    public void setSchemeVersion(String schemeVersion) {
        this.schemeVersion = schemeVersion;
    }
    public void setDatasetList(List<DictItem> datasetList) {
        this.datasetList = datasetList;
    }
}

+ 516 - 0
hos-admin/src/main/java/com/yihu/hos/crawler/service/CrawlerService.java

@ -0,0 +1,516 @@
package com.yihu.hos.crawler.service;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.node.ObjectNode;
import com.yihu.hos.crawler.dao.CrawlerDatasetDao;
import com.yihu.hos.crawler.dao.CrawlerFlowDao;
import com.yihu.hos.crawler.dao.CrawlerFlowHeadDao;
import com.yihu.hos.crawler.model.flow.CrawlerDataSetModel;
import com.yihu.hos.crawler.model.flow.CrawlerFlowHeadModel;
import com.yihu.hos.crawler.model.flow.CrawlerFlowModel;
import com.yihu.hos.crawler.model.flow.resultModel.*;
import com.yihu.hos.datacollect.model.DtoJobDataset;
import com.yihu.ehr.framework.constrant.Constants;
import com.yihu.ehr.framework.model.ActionResult;
import com.yihu.ehr.framework.model.DetailModelResult;
import com.yihu.ehr.framework.util.operator.StringUtil;
import com.yihu.hos.standard.model.adapter.AdapterDatasetModel;
import com.yihu.hos.standard.model.adapter.AdapterMetadataModel;
import com.yihu.hos.standard.model.adapter.AdapterSchemeVersionModel;
import com.yihu.hos.standard.model.adapter.resultModel.AdapterSchemeResultModel;
import com.yihu.hos.standard.model.adapter.resultModel.AdapterSchemeVersionResultDetailModel;
import com.yihu.hos.standard.service.adapter.AdapterDatasetService;
import com.yihu.hos.standard.service.adapter.AdapterMetadataService;
import com.yihu.hos.standard.service.adapter.AdapterSchemeService;
import com.yihu.hos.standard.service.adapter.AdapterSchemeVersionService;
import com.yihu.hos.standard.service.bo.AdapterVersion;
import net.sf.json.JSONArray;
import net.sf.json.JSONObject;
import org.apache.commons.lang.StringUtils;
import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Transactional;
import javax.annotation.Resource;
import java.io.IOException;
import java.util.*;
@Transactional
@Service("CrawlerService")
public class CrawlerService {
    public static final String BEAN_ID = "CrawlerService";
    @Resource(name = AdapterSchemeVersionService.BEAN_ID)
    private AdapterSchemeVersionService adapterSchemeVersionService;
    @Resource(name = AdapterDatasetService.BEAN_ID)
    private AdapterDatasetService adapterDatasetService;
    @Resource(name = CrawlerDatasetDao.BEAN_ID)
    private CrawlerDatasetDao crawlerDatasetDao;
    @Resource(name = CrawlerFlowDao.BEAN_ID)
    private CrawlerFlowDao crawlerFlowDao;
    @Resource(name = CrawlerFlowHeadDao.BEAN_ID)
    private CrawlerFlowHeadDao crawlerFlowHeadDao;
    @Resource(name = AdapterMetadataService.BEAN_ID)
    private AdapterMetadataService adapterMetadataService;
    private static Map<Integer, List<FlowLines>> lineCache = new HashMap<>();
    @Resource(name = AdapterSchemeService.BEAN_ID)
    private AdapterSchemeService adapterSchemeService;
    public static Map<Integer, List<FlowLines>> getLineCache() {
        return lineCache;
    }
    /**
     * 保存编排映射关系
     *
     * @param version 适配方案版本
     * @param json    映射数据
     * @return
     * @throws Exception
     */
    public ActionResult saveDataSetRelation(String version, String json) throws Exception {
        JSONObject root = JSONObject.fromObject(json);
        JSONArray jsonList = root.getJSONArray("lines");
        JSONArray entrances = root.getJSONArray("entrances");
        AdapterSchemeVersionModel versionModel = (AdapterSchemeVersionModel) adapterSchemeVersionService.get(Integer.valueOf(version));
        if (jsonList != null && jsonList.size() > 0) {
            // 删除旧关联关系
            crawlerFlowDao.deleteCrawlerFlowList(versionModel.getId());
            for (Object item : jsonList) {
                JSONObject obj = JSONObject.fromObject(item);
                String from = obj.getString("from");
                String to = obj.getString("to");
                String fromPort = obj.getString("fromPort");
                String toPort = obj.getString("toPort");
                // 保存编排关系
                CrawlerFlowModel crawlerFlow = new CrawlerFlowModel();
                crawlerFlow.setDatasetCode(to);
                crawlerFlow.setInputDatasetCode(from);
                crawlerFlow.setMetadataCode(toPort);
                crawlerFlow.setInputMetadataCode(fromPort);
                crawlerFlow.setSchemeVersionId(versionModel.getId());
                crawlerFlowDao.saveEntity(crawlerFlow);
            }
            // 删除编排头部信息
            crawlerFlowHeadDao.deleteCrawlerFlowHeadList(versionModel.getId());
            for (Object item : entrances) {
                JSONObject obj = JSONObject.fromObject(item);
                String dataSet = obj.getString("dataSet");
                String meta = obj.getString("meta");
                //  保存入口数据集
                CrawlerFlowHeadModel headModel = new CrawlerFlowHeadModel();
                headModel.setSchemeVersionId(versionModel.getId());
                headModel.setDatasetCode(dataSet);
                headModel.setMetadataCode(meta);
                crawlerFlowHeadDao.saveEntity(headModel);
            }
        }
        return new ActionResult(true, "保存成功!");
    }
    /**
     * 获取任务编排数据集列表
     *
     * @param schemeVersionId
     * @return
     * @throws Exception
     */
    public DetailModelResult getSchemeDataset(Integer schemeVersionId, String datasetName) throws Exception {
        AdapterSchemeVersionModel versionModel = (AdapterSchemeVersionModel) adapterSchemeVersionService.get(schemeVersionId);
        //获取适配数据集总和
        Map<String, Object> map = new HashMap<String, Object>();
        String condition = null;
        if (datasetName != null && !"".equals(datasetName)) {
            map.put("name", datasetName);
            condition = net.sf.json.JSONObject.fromObject(map).toString();
        }
        List<AdapterDatasetModel> adapterDatasetModelList = adapterDatasetService.getDatasetList(AdapterDatasetModel.class, versionModel.getVersion(), condition, null, null, null);
        // 筛选掉 未适配数据
        List<AdapterDatasetModel> nAdapterDataSetModelList = new ArrayList<>();
        for (AdapterDatasetModel datasetModel : adapterDatasetModelList) {
            if (datasetModel.getAdapterDatasetId() != null && datasetModel.getAdapterDatasetName() != null && datasetModel.getAdapterDatasetCode() != null) {
                nAdapterDataSetModelList.add(datasetModel);
            }
        }
        //获取编排数据集
        List<CrawlerDataSetModel> crawlerDataset = crawlerDatasetDao.getCrawlerDatasetList(versionModel.getId());
        DetailModelResult re = new DetailModelResult();
        List<CrawlerDatasetResultDetailModel> list = new ArrayList<>();
        for (AdapterDatasetModel datasetModel : nAdapterDataSetModelList) {
            if (!StringUtil.isStrEmpty(datasetModel.getAdapterDatasetCode())) {
                List<AdapterMetadataModel> metadatas = adapterMetadataService.getAdapterMetadataByDataset(versionModel.getVersion(), datasetModel.getStdDatasetId());
                if (metadatas != null && metadatas.size() > 0) {
                    CrawlerDatasetResultDetailModel obj = new CrawlerDatasetResultDetailModel();
                    obj.setSchemeVersionId(schemeVersionId);
                    obj.setDatasetId(datasetModel.getStdDatasetId());
                    obj.setDatasetCode(datasetModel.getStdDatasetCode());
                    obj.setDatasetName(datasetModel.getStdDatasetName());
                    obj.setSchemeId(datasetModel.getSchemeId());
                    if (crawlerDataset != null && crawlerDataset.size() > 0) {
                        for (CrawlerDataSetModel cDataSet : crawlerDataset) {
                            if (cDataSet.getDatasetId().equals(datasetModel.getStdDatasetId())) {
                                obj.setSchemeVersionId(cDataSet.getSchemeVersionId());
                                obj.setChecked("1");
                                break;
                            }
                        }
                    }
                    list.add(obj);
                }
            }
        }
        re.setDetailModelList(list);
        return re;
    }
    /**
     * 保存任务编排数据
     *
     * @param json
     * @param rows
     * @param page @return
     * @throws Exception
     */
    public void saveJobData(String json, Integer rows, Integer page) throws Exception {
        JSONArray jsonList = JSONArray.fromObject(json);
        //清空当页数据
        deleteCurrentPage(rows, page);
        for (Object item : jsonList) {
            JSONObject obj = JSONObject.fromObject(item);
            if (obj.containsKey("schemeId") && obj.containsKey("versionId")) {
                String schemeId = obj.getString("schemeId");
                String versionId = obj.getString("versionId");
                AdapterSchemeVersionModel versionModel = (AdapterSchemeVersionModel) adapterSchemeVersionService.get(Integer.valueOf(versionId));
                if (versionModel != null) {
                    AdapterVersion adapterVersion = new AdapterVersion(versionModel.getVersion());
                    //删除已存在的数据集
                    crawlerDatasetDao.deleteCrawlerDatasetList(versionModel.getId());
                    List<AdapterDatasetModel> adapterDatasetModelList;
                    //根据id字符串获取编排数据集
                    if (obj.containsKey("dataSets")) {
                        List<Integer> newDatasetIdList = new ArrayList<>();
                        String dataSetStr = obj.getString("dataSets");
                        if (StringUtils.isNotBlank(dataSetStr)) {
                            String[] IdList = dataSetStr.split(",");
                            for (String aIdList : IdList) {
                                if (!Objects.equals(aIdList, "")) {
                                    Integer DaSetId = Integer.valueOf(aIdList);
                                    newDatasetIdList.add(DaSetId);
                                }
                            }
                        }
                        adapterDatasetModelList = adapterDatasetService.getListByAdapterDatasetIdList(adapterVersion, newDatasetIdList);
                        for (AdapterDatasetModel model : adapterDatasetModelList) {
                            CrawlerDataSetModel dataSetModel = new CrawlerDataSetModel();
                            dataSetModel.setSchemeId(Integer.valueOf(schemeId));
                            dataSetModel.setSchemeVersionId(versionModel.getId());
                            dataSetModel.setDatasetId(model.getStdDatasetId());
                            dataSetModel.setDatasetCode(model.getStdDatasetCode());
                            dataSetModel.setDatasetName(model.getStdDatasetName());
                            crawlerDatasetDao.saveEntity(dataSetModel);
                        }
                    }
                    //如果保存传入编排映射关系,进行保存操作
                    if (obj.containsKey("relation") && !Objects.equals(obj.getString("relation"), "")) {
                        saveDataSetRelation(versionId, obj.getString("relation"));
                    }
                }
            }
        }
    }
    public List<FlowEntrance> getFlowEntrances(Integer schemeVersionId) {
        List<FlowEntrance> entrances = new ArrayList<>();
        List<CrawlerFlowHeadModel> modelList = crawlerFlowHeadDao.getCrawlerFlowHeadList(schemeVersionId);
        for (CrawlerFlowHeadModel headModel : modelList) {
            FlowEntrance entrance = new FlowEntrance();
            entrance.setDataSet(headModel.getDatasetCode());
            entrance.setMeta(headModel.getMetadataCode());
            entrances.add(entrance);
        }
        return entrances;
    }
    public List<FlowLines> getFlowLines(Integer schemeVersionId) {
        List<FlowLines> lines = new ArrayList<>();
        List<CrawlerFlowModel> modelList = crawlerFlowDao.getCrawlerFlowList(schemeVersionId);
        for (CrawlerFlowModel model : modelList) {
            FlowLines line = new FlowLines();
            line.setFrom(model.getInputDatasetCode());
            line.setFromPort(model.getInputMetadataCode());
            line.setTo(model.getDatasetCode());
            line.setToPort(model.getMetadataCode());
            lines.add(line);
        }
        return lines;
    }
    /**
     * 删除编排数据
     *
     * @param version
     */
    @Transactional
    public String deleteJobData(String version) {
        try {
            AdapterSchemeVersionModel versionModel = (AdapterSchemeVersionModel) adapterSchemeVersionService.get(Integer.valueOf(version));
            if (versionModel == null || versionModel.getId() == null) {
                return "删除失败";
            }
            //删除对应表记录
            crawlerDatasetDao.deleteCrawlerDatasetList(versionModel.getId());
            crawlerFlowHeadDao.deleteCrawlerFlowHeadList(versionModel.getId());
            crawlerFlowDao.deleteCrawlerFlowList(versionModel.getId());
        } catch (Exception e) {
            e.printStackTrace();
            return "删除失败";
        }
        return Constants.EMPTY;
    }
    /**
     * 数据集列表
     *
     * @param limit  rows
     * @param offset page
     * @return
     */
    public DetailModelResult getDataSetResult(Integer limit, Integer offset) {
        try {
            StringBuffer stringBuffer = new StringBuffer();
            String sql = "SELECT 1 as status, a.scheme_id, a.scheme_version_id, GROUP_CONCAT(a.dataset_id SEPARATOR ',') AS datasetId, GROUP_CONCAT(a.dataset_name SEPARATOR ',') AS datasetName" +
                    " FROM crawler_dataset a " +
                    " GROUP BY a.scheme_id, a.scheme_version_id ";
            stringBuffer.append(sql);
            if (limit != null && offset != null) {
                if (limit > 0 && offset > 0) {
                    stringBuffer.append("  LIMIT " + (offset - 1) * limit + "," + limit);
                }
            }
            stringBuffer.append(" ;");
            Integer total = crawlerDatasetDao.getTotalRows();
            List<Map<String, Object>> list = crawlerDatasetDao.queryListBySql(stringBuffer.toString());
            DetailModelResult detailModelResult = DetailModelResult.success("获取数据集成功");
            detailModelResult.setDetailModelList(list);
            detailModelResult.setTotalCount(total);
            return detailModelResult;
        } catch (Exception e) {
            e.printStackTrace();
            return DetailModelResult.error("获取数据集失败");
        }
    }
    /**
     * 获取编排已选择的适配数据集
     *
     * @param schemeVersionId
     * @param datasetIdStr
     * @return
     * @throws Exception
     */
    public List<MappingDataset> getSchemeDatasetByChecked(Integer schemeVersionId, String datasetIdStr) throws Exception {
        AdapterSchemeVersionModel versionModel = (AdapterSchemeVersionModel) adapterSchemeVersionService.get(schemeVersionId);
        //获取适配数据集总和
        AdapterVersion adapterVersion = new AdapterVersion(versionModel.getVersion());
        List<AdapterDatasetModel> adapterDatasetModelList = new ArrayList<>();
        if (datasetIdStr != null && !"".equals(datasetIdStr)) {
            String[] datasetIdList = datasetIdStr.split(",");
            List<Integer> newDatasetIdList = new ArrayList<>();
            for (String datasetId : datasetIdList) {
                if (!StringUtil.isStrEmpty(datasetId)) {
                    Integer newDatasetId = Integer.parseInt(datasetId);
                    newDatasetIdList.add(newDatasetId);
                }
            }
            adapterDatasetModelList = adapterDatasetService.getListByAdapterDatasetIdList(adapterVersion, newDatasetIdList);
        } else {
            adapterDatasetModelList = adapterDatasetService.getDatasetList(AdapterDatasetModel.class, versionModel.getVersion(), null, null, null, null);
        }
        //获取编排数据集
        List<CrawlerDataSetModel> crawlerDataset = crawlerDatasetDao.getCrawlerDatasetList(versionModel.getId());
        List<MappingDataset> list = new ArrayList<>();
        for (AdapterDatasetModel datasetModel : adapterDatasetModelList) {
//            if (crawlerDataset != null && crawlerDataset.size() > 0) {
            MappingDataset obj = new MappingDataset();
//                for (CrawlerDataSetModel cDataSet : crawlerDataset) {
            if (!StringUtil.isStrEmpty(datasetModel.getAdapterDatasetCode())) {
                List<MappingMetadata> metadatas = getMappingMetaDatasByDataset(versionModel.getVersion(), datasetModel.getStdDatasetId());
                obj.setId(datasetModel.getStdDatasetId());
                obj.setCode(datasetModel.getStdDatasetCode());
                obj.setName(datasetModel.getStdDatasetName());
                obj.setData(metadatas);
//                        break;
            }
//                }
            list.add(obj);
//            }
        }
        return list;
    }
    /**
     * 返回前端映射数据元信息
     *
     * @param adapterVersion 适配版本号
     * @param dataSetId      适配数据集ID
     * @return
     */
    public List<MappingMetadata> getMappingMetaDatasByDataset(String adapterVersion, Integer dataSetId) {
        List<AdapterMetadataModel> adapterMetadataModels = adapterMetadataService.getAdapterMetadataByDataset(adapterVersion, dataSetId);
        AdapterDatasetModel adapterDatasetModel = adapterMetadataService.getAdapterDataset(adapterVersion, dataSetId);
        List<MappingMetadata> resultList = new ArrayList<>();
        if (adapterMetadataModels != null && adapterMetadataModels.size() > 0) {
            for (AdapterMetadataModel metadataModel : adapterMetadataModels) {
                if (!StringUtil.isStrEmpty(metadataModel.getAdapterMetadataCode())) {
                    MappingMetadata metadata = new MappingMetadata();
                    metadata.setId(metadataModel.getAdapterMetadataId());
                    metadata.setCode(adapterDatasetModel.getAdapterDatasetCode() + "-" + metadataModel.getAdapterMetadataCode());
                    metadata.setName(metadataModel.getAdapterMetadataName());
                    resultList.add(metadata);
                }
            }
        }
        return resultList;
    }
    /**
     * 获取适配方案映射数据
     *
     * @param schemeVersionId
     * @param datasetIdStr
     * @return
     */
    public String getRelations(Integer schemeVersionId, String datasetIdStr, String lineStr) {
        JSONObject jsonObject = new JSONObject();
        try {
            List<MappingDataset> datasets = getSchemeDatasetByChecked(schemeVersionId, datasetIdStr);
            List<FlowLines> lines = getFlowLines(schemeVersionId);
            if (StringUtil.isEmpty(lineStr)) {
                lines = getFlowLines(schemeVersionId);
            } else {
                lines = lineCache.get(schemeVersionId);
            }
            jsonObject.put("tables", datasets);
            jsonObject.put("rels", lines);
        } catch (Exception e) {
            e.printStackTrace();
        }
        return jsonObject.toString();
    }
    public void setLinesCache(Integer schemeVersionId, String lines) throws IOException {
        ObjectMapper objectMapper = new ObjectMapper();
        ObjectNode rootNode = objectMapper.readValue(lines, ObjectNode.class);
        String lineJson = rootNode.get("lines").toString();
        List<FlowLines> line = objectMapper.readValue(lineJson, List.class);
        lineCache.put(schemeVersionId, line);
    }
    public void deleteCurrentPage(Integer rows, Integer page) {
        DetailModelResult currentResut = getDataSetResult(rows, page);
        List<Map<String, Object>> list = currentResut.getDetailModelList();
        for (Map<String, Object> map : list) {
            String version = String.valueOf(map.get("scheme_version_id"));
            deleteJobData(version);
        }
    }
    public DetailModelResult getDataSetSavedResult(Integer version) {
        try {
            DetailModelResult checkedSchemeLs = getDataSetResult(null, null);
            List<Map<String, Object>> list = checkedSchemeLs.getDetailModelList();
            for (Map<String, Object> objectMap : list) {
                String versionID = objectMap.get("scheme_version_id").toString();
                if (versionID.equals(version.toString())) {
                    String datasetIdStr = objectMap.get("dataSetId").toString();
                    List<MappingDataset> datasetList = getSchemeDatasetByChecked(version, datasetIdStr);
                    List<DtoJobDataset>  rsJobDatasetList = new ArrayList<>();
                    for (MappingDataset dataset : datasetList) {
                        DtoJobDataset rsJobDataset = new DtoJobDataset();
                        rsJobDataset.setJobDatasetId(dataset.getId().toString());
                        rsJobDataset.setJobDatasetName(dataset.getName());
                        rsJobDataset.setJobDatasetCode(dataset.getCode());
                        rsJobDatasetList.add(rsJobDataset);
                    }
                    DetailModelResult result = new DetailModelResult();
                    result.setDetailModelList(rsJobDatasetList);
                    return result;
                }
            }
        } catch (Exception e) {
            e.printStackTrace();
            return DetailModelResult.error("获取已存任务编排数据集错误");
        }
        return DetailModelResult.error("获取已存任务编排数据集错误");
    }
    public DetailModelResult getSchemeSavedResult() {
        DetailModelResult allScheme = adapterSchemeService.getAdapterSchemeResultModelList();
        List<AdapterSchemeResultModel> allSchemeLs = allScheme.getDetailModelList();
        if (allSchemeLs != null && allSchemeLs.size() > 0) {
            DetailModelResult checkedSchemeLs = getDataSetResult(null, null);
            List<Map<String, Object>> list = checkedSchemeLs.getDetailModelList();
            Set<String> schemeIdSets = new HashSet<>();
            Set<String> versionSets = new HashSet<>();
            DetailModelResult result = new DetailModelResult();
            List<AdapterSchemeVersionResultDetailModel> versionModelList = new ArrayList<>();
            if (list != null && list.size() > 0) {
                for (Map<String, Object> objectMap : list) {
                    if (objectMap.get("scheme_id") != null && objectMap.get("scheme_version_id") != null) {
                        schemeIdSets.add(objectMap.get("scheme_id").toString());
                        versionSets.add(objectMap.get("scheme_version_id").toString());
                    }
                }
                if (schemeIdSets.size() > 0 && versionSets.size() > 0) {
                    for (AdapterSchemeResultModel schemeL : allSchemeLs) {
                        String schemeID = schemeL.getSchemeId().toString();
                        if (schemeIdSets.contains(schemeID)) {
                            String name = schemeL.getName();
                            List<AdapterSchemeVersionModel> versionModels = schemeL.getVersionList();
                            for (AdapterSchemeVersionModel versionModel : versionModels) {
                                String versionID = versionModel.getId().toString();
                                if (versionSets.contains(versionID)) {
                                    AdapterSchemeVersionResultDetailModel model = new AdapterSchemeVersionResultDetailModel();
                                    model.setSchemeName(name);
                                    model.setId(versionModel.getId());
                                    model.setName(versionModel.getName());
                                    model.setSchemeId(versionModel.getSchemeId());
                                    model.setBaseVersion(versionModel.getBaseVersion());
                                    model.setPath(versionModel.getPath());
                                    model.setPublishStatus(versionModel.getPublishStatus());
                                    model.setPublishTime(versionModel.getPublishTime());
                                    model.setPublishUser(versionModel.getPublishUser());
                                    model.setVersion(versionModel.getVersion());
                                    versionModelList.add(model);
                                }
                            }
                        }
                    }
                    result.setDetailModelList(versionModelList);
                    return result;
                }
            }
        }
        return DetailModelResult.error("获取已编排任务适配方案失败!");
    }
}

+ 381 - 0
hos-admin/src/main/java/com/yihu/hos/crawler/service/DataCollectDispatcher.java

@ -0,0 +1,381 @@
package com.yihu.hos.crawler.service;
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.node.ArrayNode;
import com.fasterxml.jackson.databind.node.ObjectNode;
import com.yihu.hos.crawler.format.DataSetTransformer;
import com.yihu.hos.crawler.model.adapter.AdapterDataSet;
import com.yihu.hos.crawler.model.adapter.AdapterMetaData;
import com.yihu.hos.crawler.model.config.SysConfig;
import com.yihu.hos.crawler.model.patient.Patient;
import com.yihu.hos.crawler.model.patient.PatientIdentity;
import com.yihu.hos.crawler.model.transform.EhrCondition;
import com.yihu.hos.crawler.model.transform.LogicValues;
import com.yihu.ehr.framework.constrant.Constants;
import com.yihu.ehr.framework.util.file.FileUtil;
import com.yihu.ehr.framework.util.operator.DateUtil;
import com.yihu.ehr.framework.util.operator.StringUtil;
import com.yihu.hos.standard.model.adapter.AdapterMetadataModel;
import org.apache.http.NameValuePair;
import org.apache.http.message.BasicNameValuePair;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import java.io.IOException;
import java.text.SimpleDateFormat;
import java.util.*;
public class DataCollectDispatcher {
    private static DataCollectDispatcher ourInstance = new DataCollectDispatcher();
    private static Logger logger = LogManager.getLogger(DataCollectDispatcher.class);
    private String token;
    private DataCollectDispatcher() {
    }
    public static DataCollectDispatcher getInstance() {
        return ourInstance;
    }
    public void finalize() throws Throwable {
    }
    public Boolean getToken () {
        try {
            token = EsbHttp.getToken();
            if (StringUtil.isEmpty(token)) {
                return false;
            }
            return true;
        } catch (Exception e) {
            logger.error("本次任务执行失败,获取token失败!");
            return false;
        }
    }
    public Boolean getRemoteVersion (String orgCode) {
        try {
            if (StringUtil.isEmpty(SysConfig.getInstance().getVersionMap().get(orgCode))) {
                String stdVersion = EsbHttp.getRemoteVersion(orgCode);
                if (StringUtil.isEmpty(stdVersion)) {
                    return false;
                }
                SysConfig.getInstance().getVersionMap().put(orgCode, stdVersion);
            }
            return true;
        } catch (Exception e) {
            logger.error("本次任务执行失败,获取token失败!");
            return false;
        }
    }
    /**
     * 获取病人列表
     *
     * @param condition
     * @return List<PatientT>
     */
    public List<Patient> getPatientList(Map<String, Object> condition, Map<String, AdapterDataSet> adapterDataSetMap) {
        ArrayList<Patient> patientList = new ArrayList<>();
        SimpleDateFormat df = new SimpleDateFormat(DateUtil.DEFAULT_YMDHMSDATE_FORMAT);
        for (String key : adapterDataSetMap.keySet()) {
            PatientIdentity patientIdentity = SysConfig.getInstance().getPatientIdentity(key);
            if (patientIdentity == null) {
                continue;
            }
            try {
                //获取病人列表字段检验
                Map<String, String> propertyMap = getItemList(adapterDataSetMap.get(key));
                if (propertyMap == null) {
                    return patientList;
                }
                //请求参数
                Date beginDate = (Date) condition.get("beginDate");
                String beginTime = df.format(beginDate);
                Date endDate = (Date) condition.get("endDate");
                String endTime = df.format(endDate);
                List<EhrCondition> queryParams = new ArrayList<>();
                queryParams.add(new EhrCondition(" > ", patientIdentity.getRefTimeCode(), beginTime));
                queryParams.add(new EhrCondition(" < ", patientIdentity.getRefTimeCode(), endTime));
                //Rest 接口请求
                String rootStr = EsbHttp.getPatientList(adapterDataSetMap.get(key), queryParams);
                if (StringUtil.isEmpty(rootStr)) {
                    return null;
                }
                ObjectMapper mapper = new ObjectMapper();
                JsonNode resultNode = mapper.readValue(rootStr, JsonNode.class);
                JsonNode patientNode = resultNode.path("detailModelList");
                boolean isArr = patientNode.isArray();
                if (isArr) {
                    Iterator<JsonNode> array = patientNode.iterator();
                    while (array.hasNext()) {
                        JsonNode node = array.next();
                        Patient patient = new Patient();
                        String patientId = node.path(propertyMap.get(Constants.PATIENT_ID)).asText();
                        String eventNo = node.path(propertyMap.get(Constants.EVENT_NO)).asText();
                        String refTime = node.path(propertyMap.get(Constants.EVENT_TIME)).asText();
                        String orgCode = node.path(Constants.ORG_CODE.toUpperCase()).asText();
                        patient.setPatientId(patientId);
                        patient.setEventNo(eventNo);
                        patient.setReferenceTime(refTime);
                        patient.setOrgCode(orgCode);
                        patientList.add(patient);
                    }
                }
            } catch (Exception e) {
                logger.error("采集病人失败", e);
            }
        }
        return patientList;
    }
    public Map<String, String> getItemList(AdapterDataSet adapterDataSet) throws Exception {
        Map<String, String> propertyMap = new HashMap<>();
        PatientIdentity patientIdentity = SysConfig.getInstance().getPatientIdentity(adapterDataSet.getAdapterDataSetT().getStdDatasetCode());
        if (adapterDataSet.isHavePatientID()) {
            AdapterMetaData adapterMetaData = adapterDataSet.getAdapterMetaDataMap().get(PatientIdentity.getPatientIDCode());
            propertyMap.put(Constants.PATIENT_ID, adapterMetaData.getAdapterMetadataModel().getStdMetadataCode().toUpperCase());
        } else {
            logger.error("", new Exception("采集病人列表数据集必须有patient_id."));
            return null;
        }
        if (adapterDataSet.isHaveEventNo()) {
            AdapterMetaData adapterMetaData = adapterDataSet.getAdapterMetaDataMap().get(patientIdentity.getEventNoCode());
            propertyMap.put(Constants.EVENT_NO,  adapterMetaData.getAdapterMetadataModel().getStdMetadataCode().toUpperCase());
        } else {
            logger.error("采集病人列表数据集必须有event_no.");
            return null;
        }
        AdapterMetaData adapterRefMetaData = adapterDataSet.getAdapterMetaDataMap().get(patientIdentity.getRefTimeCode());
        if (adapterRefMetaData == null) {
            logger.error("采集病人列表数据集必须有采集时间.");
            return null;
        }
        propertyMap.put(Constants.EVENT_TIME, adapterRefMetaData.getAdapterMetadataModel().getStdMetadataCode().toUpperCase());
        return propertyMap;
    }
    public String fecthData(Patient patient, AdapterDataSet adapterDataSet, List<EhrCondition> queryParams) {
        try {
            ObjectMapper mapper = new ObjectMapper();
            ObjectNode paramsNode = mapper.createObjectNode();
            paramsNode.put("tableCode", adapterDataSet.getAdapterDataSetT().getStdDatasetCode());
            paramsNode.put("condition", mapper.writeValueAsString(queryParams));
            List<NameValuePair> formParams = new ArrayList<>();
//            formParams.add(new BasicNameValuePair("secret", secret));
            formParams.add(new BasicNameValuePair("api", "collectionData"));
            formParams.add(new BasicNameValuePair("param", mapper.writeValueAsString(paramsNode)));
            //调用资源服务网关
            String rootStr = EsbHttp.getFecthData(formParams);
            JsonNode resultNode = mapper.readValue(rootStr, JsonNode.class);
            JsonNode result = resultNode.path("detailModelList");
            JsonNode data = matchAdapterData(result, adapterDataSet);
            ObjectNode jsonObject = mapper.createObjectNode();
            if (data != null && data.size() > 0) {
                jsonObject.set("data", data);
                jsonObject.put("code", adapterDataSet.getAdapterDataSetT().getStdDatasetCode());
                jsonObject.put("patient_id", patient.getPatientId());
                jsonObject.put("event_no", patient.getEventNo());
                String agencyCode = patient.getOrgCode();
                jsonObject.put("org_code", agencyCode);
                jsonObject.put("inner_version", SysConfig.getInstance().getVersionMap().get(patient.getOrgCode()));
                jsonObject.put("create_date", DateUtil.toString(new Date(), DateUtil.DEFAULT_YMDHMSDATE_FORMAT));
                jsonObject.put("event_time", patient.getReferenceTime());//DateUtil.toString(patient.getReferenceTime(), DateUtil.DEFAULT_YMDHMSDATE_FORMAT)
                if (StringUtil.isEmpty(patient.getReUploadFlg())) {
                    jsonObject.put("reUploadFlg", LogicValues.LOGIC_FALSE);
                } else {
                    jsonObject.put("reUploadFlg", patient.getReUploadFlg());
                }
                return jsonObject.toString();
            } else {
                return Constants.EMPTY;
            }
        } catch (Exception e) {
            return Constants.EMPTY;
        }
    }
    public String fecthData(Patient patient, AdapterDataSet adapterDataSet, Map<String, String> relationValueMap) {
        List<EhrCondition> queryParams = new ArrayList<>();
        for (String key : relationValueMap.keySet()) {
            queryParams.add(new EhrCondition(" = ", key, relationValueMap.get(key)));
        }
        return fecthData(patient, adapterDataSet, queryParams);
    }
    /**
     * 根据编排任务进行采集
     * @param patient
     * @param adapterDataSet
     * @return
     */
    public JsonNode fecthData(Patient patient, AdapterDataSet adapterDataSet) {
        try {
            ObjectMapper mapper = new ObjectMapper();
            List<EhrCondition> queryParams = new ArrayList<>();
            boolean patientId = true;
            if (adapterDataSet.isHavePatientID()) {
                AdapterMetaData adapterMetaData = adapterDataSet.getAdapterMetaDataMap().get(PatientIdentity.getPatientIDCode());
                queryParams.add(new EhrCondition(" = ", adapterMetaData.getAdapterMetadataModel().getStdMetadataCode(), patient.getPatientId()));
            } else {
                patientId = false;
            }
            boolean eventNo = true;
            if (adapterDataSet.isHaveEventNo()) {
                AdapterMetaData adapterMetaData = adapterDataSet.getAdapterMetaDataMap().get(adapterDataSet.getEventNoCode());
                queryParams.add(new EhrCondition(" = ", adapterMetaData.getAdapterMetadataModel().getStdMetadataCode(), patient.getEventNo()));
            } else {
                eventNo = false;
            }
            if (!patientId && !eventNo) {
                logger.error("采集病人数据集至少需要一项病人标识.数据集名:" + adapterDataSet.getAdapterDataSetT().getStdDatasetCode());
                return null;
            }
            ObjectNode paramsNode = mapper.createObjectNode();
            paramsNode.put("tableCode", adapterDataSet.getAdapterDataSetT().getStdDatasetCode());
            paramsNode.put("condition", mapper.writeValueAsString(queryParams));
            List<NameValuePair> formParams = new ArrayList<>();
//            formParams.add(new BasicNameValuePair("secret", secret));
            formParams.add(new BasicNameValuePair("api", "collectionData"));
            formParams.add(new BasicNameValuePair("param", mapper.writeValueAsString(paramsNode)));
            //调用资源服务网关
            String rootStr = EsbHttp.getFecthData(formParams);
            JsonNode resultNode = mapper.readValue(rootStr, JsonNode.class);
            JsonNode result = resultNode.path("detailModelList");
            JsonNode data = matchAdapterData(result, adapterDataSet);
            ObjectNode jsonObject = mapper.createObjectNode();
            if (data != null && data.size() > 0) {
                jsonObject.set("data", data);
                jsonObject.put("code", adapterDataSet.getAdapterDataSetT().getStdDatasetCode());
                jsonObject.put("patient_id", patient.getPatientId());
                jsonObject.put("event_no", patient.getEventNo());
                String agencyCode = patient.getOrgCode();
                jsonObject.put("org_code", agencyCode);
                jsonObject.put("inner_version", SysConfig.getInstance().getVersionMap().get(patient.getOrgCode()));
                jsonObject.put("create_date", DateUtil.toString(new Date(), DateUtil.DEFAULT_YMDHMSDATE_FORMAT));
                jsonObject.put("event_time", patient.getReferenceTime());//DateUtil.toString(patient.getReferenceTime(), DateUtil.DEFAULT_YMDHMSDATE_FORMAT)
                if (StringUtil.isEmpty(patient.getReUploadFlg())) {
                    jsonObject.put("reUploadFlg", LogicValues.LOGIC_FALSE);
                } else {
                    jsonObject.put("reUploadFlg", patient.getReUploadFlg());
                }
                return jsonObject;
            } else {
                return null;
            }
        } catch (Exception e) {
            logger.error("", e);
        }
        return null;
    }
    public JsonNode matchAdapterData(JsonNode data, AdapterDataSet adapterDataSet) {
        ObjectMapper mapper = new ObjectMapper();
        ArrayNode result = mapper.createArrayNode();
        Iterator<JsonNode> array = data.iterator();
        while (array.hasNext()) {
            JsonNode dataNode = array.next();
            ObjectNode jsonNode = mapper.createObjectNode();
            for (AdapterMetaData adapterMetaData : adapterDataSet.getAdapterMetaDataList()) {
                AdapterMetadataModel adapterMetadataModel = adapterMetaData.getAdapterMetadataModel();
                String orgMetaDataCode = adapterMetadataModel.getAdapterMetadataCode();
                String stdMetaDataCode = adapterMetadataModel.getStdMetadataCode();
                if (!StringUtil.isEmpty(orgMetaDataCode)) {
                    jsonNode.put(orgMetaDataCode, dataNode.path(stdMetaDataCode).asText());
                }
            }
            result.add(jsonNode);
        }
        return result;
    }
    public Boolean register(Patient patient, String data) {
        return EsbHttp.register(patient, data, token);
    }
    public Boolean upload(Map<String, JsonNode> dataMap, Patient patient, Map<String, AdapterDataSet> dataSetMap) {
        Boolean result = true;
        try {
            DataSetTransformer dataTransformer = new DataSetTransformer();
            for (String key : dataMap.keySet()) {
                dataTransformer.setData(dataMap.get(key));
                if (!toFile(dataTransformer, patient, "origin")) {
                    logger.info("存储原始文件失败:patient_id=" + patient.getPatientId()
                            + "event_no=" + patient.getEventNo());
                    result = false;
                    break;
                }
                dataTransformer.transfer(dataSetMap);
                if (!toFile(dataTransformer, patient, "standard")) {
                    logger.info("存储标准文件失败:patient_id=" + patient.getPatientId()
                            + "event_no=" + patient.getEventNo());
                    result = false;
                    break;
                }
            }
            PatientCDAUpload patientCDAUpload = new PatientCDAUpload();
            if (!patientCDAUpload.upload(patient, token)) {
                result = false;
            }
        } catch (Exception e) {
            result = false;
        }
        return result;
    }
    public boolean toFile(DataSetTransformer dataTransformer, Patient patient, String fileName) {
        JsonNode jsonObject = dataTransformer.getJsonObject();
        PatientCDAIndex patientCDAIndex = new PatientCDAIndex(patient);
        String filePath = patientCDAIndex.createDataIndex(fileName, PatientCDAIndex.FileType.JSON);
        boolean writeFile = false;
        try {
            writeFile = FileUtil.writeFile(filePath, jsonObject.toString(), "UTF-8");
        } catch (IOException e) {
            logger.info("存储临时文件失败.");
            logger.error("", e);
        }
        return writeFile;
    }
    /**
     * 解析token内容
     *
     * @param responToken
     * @return
     */
    public Map<String, Object> parseToken(String responToken) {
        ObjectMapper mapper = new ObjectMapper();
        Map<String, Object> tokenMap = null;
        try {
            Map<String, Object> map = mapper.readValue(responToken, Map.class);
            String code = (String) map.get("code");
            if (Constants.OK.equals(code)) {
                tokenMap = (Map<String, Object>) map.get("result");
            }
        } catch (IOException e) {
            e.printStackTrace();
        }
        return tokenMap;
    }
}//end DataCollectDispatcher

+ 379 - 0
hos-admin/src/main/java/com/yihu/hos/crawler/service/EsbHttp.java

@ -0,0 +1,379 @@
package com.yihu.hos.crawler.service;
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.node.ObjectNode;
import com.yihu.hos.crawler.model.adapter.AdapterDataSet;
import com.yihu.hos.crawler.model.config.SysConfig;
import com.yihu.hos.crawler.model.patient.Patient;
import com.yihu.hos.crawler.model.transform.EhrCondition;
import com.yihu.ehr.framework.constrant.Constants;
import com.yihu.ehr.framework.util.encrypt.MD5;
import com.yihu.ehr.framework.util.httpclient.HttpClientUtil;
import com.yihu.ehr.framework.util.httpclient.HttpHelper;
import com.yihu.ehr.framework.util.httpclient.HttpResponse;
import com.yihu.ehr.framework.util.operator.StringUtil;
import org.apache.http.HttpStatus;
import org.apache.http.NameValuePair;
import org.apache.http.message.BasicNameValuePair;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.json.JSONObject;
import sun.misc.BASE64Encoder;
import java.io.File;
import java.util.*;
/**
 * Created by hzp on 2016/3/10.
 */
public class EsbHttp {
    private static Logger logger = LogManager.getLogger(EsbHttp.class);
    /***************************** 用户接口 *********************************************/
    /**
     * 用户登录验证
     */
    public static HttpResponse loginAction(String user, String password) throws Exception{
        String loginAction = HttpHelper.defaultHttpUrl+"/authorizations/users/" + user;
        Map<String,Object> header = new HashMap<>();
        String auth  = new BASE64Encoder().encode((user+":"+password).getBytes());
        header.put("Authorization","Basic "+auth);
        return HttpHelper.put(loginAction, null, header);
    }
    /*
    *   获取用户信息
    * */
    public static HttpResponse getUserInfo(String user,String token)
    {
        String url = HttpHelper.defaultHttpUrl+"/users/" + user;
        Map<String,Object> params = new HashMap<>();
        params.put("token",token);
        params.put("user",user);
        return HttpHelper.get(url, params);
    }
    /***************************** 应用接口 *********************************************/
    /**
     * 获取本机指纹
     * @return
     */
    private static String GetFingerprint(){
        try {
            return UUID.randomUUID().toString();
        }
        catch (Exception e)
        {
            System.out.print(e.getMessage());
            return "";
        }
    }
    /**
     * 应用登录验证
     */
    public static String getToken(){
        try {
            String loginAction = HttpHelper.defaultHttpUrl + "/authorizations/clients/" + HttpHelper.clientId;
            Map<String, Object> header = new HashMap<>();
            header.put("Authorization", "Basic " + HttpHelper.clientKey);
            //本地指纹
            Map<String, Object> params = new HashMap<String, Object>();
            params.put("info", "{\"fingerprint\": \"" + GetFingerprint() + "\"}");
            HttpResponse response = HttpHelper.put(loginAction, params, header);
            if (response != null && response.getStatusCode() == HttpStatus.SC_OK) {
                JSONObject obj = new JSONObject(response.getBody());
                //判断是否成功
                if (obj.has("token")) {
                    return obj.getString("token");
                } else {
                    logger.info("返回未包含token。");
                    return null;
                }
            } else {
                String msg = "获取Token失败。";
                if (response != null) {
                    msg += "(错误代码:" + response.getStatusCode() + ",错误信息:" + response.getBody() + ")";
                }
                logger.info(msg);
                return null;
            }
        }
        catch (Exception ex)
        {
            logger.info("获取Token失败," + ex.getMessage());
            return null;
        }
    }
    /**
     * 获取病人列表
     */
    public static String getPatientList(AdapterDataSet adapterDataSet, List<EhrCondition> queryParams){
        try {
            ObjectMapper mapper = new ObjectMapper();
            ObjectNode paramsNode = mapper.createObjectNode();
            paramsNode.put("tableCode", adapterDataSet.getAdapterDataSetT().getStdDatasetCode());
            paramsNode.put("condition", mapper.writeValueAsString(queryParams));
            List<NameValuePair> formParams = new ArrayList<>();
            formParams.add(new BasicNameValuePair("api", "collectionData"));
            String params = mapper.writeValueAsString(paramsNode);
            formParams.add(new BasicNameValuePair("param", params));
            HttpResponse response = HttpClientUtil.postForm(HttpHelper.httpGateway, formParams);
            if (response == null || response.getStatusCode() != 200) {
                logger.error("获取病人列表错误,请求HTTP错误,请检查配置或HTTP是否可用.");
                return "";
            }
            JsonNode responseNode = mapper.readValue(response.getBody(), JsonNode.class);
            String code = responseNode.path("responseCode").asText();
            if (StringUtil.isEmpty(code) || !code.equals("10000")) {
                logger.error("获取病人列表错误,请求HTTP错误,请检查集成平台网关是否可用.");
                return "";
            }
            String rootStr = responseNode.path("responseResult").asText();
            if ("".equals(rootStr)) {
                logger.error("获取病人列表错误,集成平台获取病人列表失败.");
                return "";
            }
            return rootStr;
        } catch (Exception e) {
            logger.error("获取病人列表失败!", e);
            return "";
        }
    }
    public static String getFecthData(List<NameValuePair> formParams) {
        try {
            HttpResponse response = HttpClientUtil.postForm(HttpHelper.httpGateway, formParams);
            if (response == null || response.getStatusCode() != 200) {
                logger.info("获取病人数据错误,请求HTTP错误,请检查配置或HTTP是否可用.");
                return Constants.EMPTY;
            }
            ObjectMapper mapper = new ObjectMapper();
            JsonNode responseNode = mapper.readValue(response.getBody(), JsonNode.class);
            String code = responseNode.path("responseCode").asText();
            if (StringUtil.isEmpty(code) || !code.equals("10000")) {
                logger.info("获取病人数据错误,请求HTTP错误,请检查集成平台网关是否可用.");
                return Constants.EMPTY;
            }
            String rootStr = responseNode.path("responseResult").asText();
            if (Constants.EMPTY.equals(rootStr)) {
                logger.info("获取病人数据错误,集成平台获取病人数据失败.");
                return Constants.EMPTY;
            }
            return rootStr;
        } catch (Exception e) {
            logger.error("获取病人数据失败.", e);
            return Constants.EMPTY;
        }
    }
        /**
         * 获取公钥
         */
    public static String getPublicKey(String orgCode){
        try {
            String token = getToken();
            if (!StringUtil.isEmpty(SysConfig.getInstance().getPublicKeyMap().get(orgCode))) {
                return SysConfig.getInstance().getPublicKeyMap().get(orgCode);
            }
            Map<String, Object> header = new HashMap<>();
            header.put("Authorization", "Basic " + HttpHelper.clientKey);
            Map<String, Object> paramMap = new HashMap<>();
            paramMap.put("org_code", orgCode);
            paramMap.put("token", token);
            String publicKeyMethod = HttpHelper.defaultHttpUrl + "/organizations/"+orgCode+"/key";
            HttpResponse response = HttpHelper.get(publicKeyMethod, paramMap, header);
            if (response != null && response.getStatusCode() == HttpStatus.SC_OK) {
                JSONObject json = new JSONObject(response.getBody());
                if(json.has("publicKey"))
                {
                    String publicKey = json.getString("publicKey");
                    SysConfig.getInstance().getPublicKeyMap().put(orgCode, publicKey);
                    return publicKey;
                }
                else{
                    logger.info("获取公钥失败,返回未包含publicKey。");
                    return null;
                }
            }
            else{
                String msg = "获取公钥失败。";
                if (response != null)
                {
                    msg +="(错误代码:"+ response.getStatusCode() + ",错误信息:"+response.getBody()+")";
                }
                logger.info(msg);
                return null;
            }
        } catch (Exception e) {
            logger.info(e.getMessage());
            return null;
        }
    }
    /**
     * 获取健康云平台标准版本号
     */
    public static String getRemoteVersion(String orgCode) {
        try {
            String token = getToken();
            String versionMethod = HttpHelper.defaultHttpUrl + "/adaptions/org_plan/version";
            Map<String, Object> header = new HashMap<>();
            header.put("Authorization", "Basic " + HttpHelper.clientKey);
            Map<String, Object> params = new HashMap<>();
            params.put("org_code", orgCode);
            params.put("token", token);
            HttpResponse response = HttpHelper.get(versionMethod, params, header);
            if (response != null && response.getStatusCode() == HttpStatus.SC_OK) {
                return response.getBody();
            }
            else{
                String msg = "获取健康云平台标准版本号失败";
                if (response != null)
                {
                    msg +="(错误代码:"+ response.getStatusCode() + ",错误信息:"+response.getBody()+")";
                }
                logger.info(msg);
                return null;
            }
        } catch (Exception e) {
            logger.info("获取远程版本号异常");
            logger.error(e.getCause().toString());
            return null;
        }
    }
    /**
     * 注册病人
     */
    public static Boolean register(Patient patient, String data, String token) {
        try {
            JSONObject json = new JSONObject(data);
            String colName = SysConfig.registerIdCardNo;
            Map<String, Object> header = new HashMap<>();
            header.put("Authorization", "Basic " + HttpHelper.clientKey);
            header.put("User-Agent", "client " + HttpHelper.clientId);
            if(json!=null && json.has("data")) {
                JSONObject p = (JSONObject)json.getJSONArray("data").get(0);
                if(!p.has(colName) || StringUtil.isEmpty(p.get(colName))) {
                    logger.info("注册病人信息请求失败:身份证号码为空,patient_id=" + patient.getPatientId() + ", event_no=" + patient.getEventNo());
                    return false;
                }
                else{
                    String idCord = p.getString(colName);
                    String registerMethod = HttpHelper.defaultHttpUrl + "/patients/"+idCord;
                    if (StringUtil.isEmpty(data)) {
                        logger.info("注册病人信息请求失败:无具体病人信息,patient_id=" + patient.getPatientId() + ", event_no=" + patient.getEventNo());
                        return false;
                    }
                    Map<String, Object> paramMap = new HashMap<>();
                    paramMap.put("demographic_id", idCord);
                    paramMap.put("json", data);
                    paramMap.put("token", token);
                    HttpResponse response = HttpHelper.post(registerMethod, paramMap, header);
                    if (response != null && response.getStatusCode() == HttpStatus.SC_OK) {
                        logger.info("注册病人信息成功。patient_id:" + patient.getPatientId() + ", event_no:" + patient.getEventNo());
                        return true;
                    }
                    else{
                        String msg = "注册病人信息请求失败。patient_id:" + patient.getPatientId() + ", event_no:" + patient.getEventNo();
                        if(response != null)
                        {
                            msg +="(错误代码:"+ response.getStatusCode() + ",错误信息:"+response.getBody()+")";
                        }
                        logger.info(msg);
                        return false;
                    }
                }
            }
            else{
                logger.info("注册病人信息请求失败:传入数据无效,patient_id=" + patient.getPatientId() + ", event_no=" + patient.getEventNo());
                return false;
            }
        }
        catch (Exception e)
        {
            logger.info("注册病人信息请求失败." + e.getMessage());
            return false;
        }
    }
    /**
     * 上传病人档案
     */
    public static boolean upload(Patient patient, File file, String encryptPwd, String token) {
        try {
            String uploadMethod = HttpHelper.defaultHttpUrl + "/packages";
            String fileMd5= MD5.getMd5ByFile(file);
            List<NameValuePair> formParams = new ArrayList<>();
            formParams.add(new BasicNameValuePair("md5", fileMd5));
            formParams.add(new BasicNameValuePair("package_crypto", encryptPwd));
            formParams.add(new BasicNameValuePair("org_code", patient.getOrgCode()));
            formParams.add(new BasicNameValuePair("token", token));
            Map<String, Object> header = new HashMap<>();
            header.put("Authorization", "Basic " + HttpHelper.clientKey);
            header.put("User-Agent", "client " + HttpHelper.clientId);
            HttpResponse response = HttpHelper.postFile(uploadMethod, formParams, file.getAbsolutePath(), header);
            if (response != null && response.getStatusCode() == HttpStatus.SC_OK) {
                logger.info("上传病人档案成功,patient_id:" + patient.getPatientId() + ",event_no:" + patient.getEventNo());
                return true;
            }
            else {
                String msg = "上传病人档案请求失败,patient_id:" + patient.getPatientId() + ",event_no:" + patient.getEventNo();
                if (response != null)
                {
                    msg +="(错误代码:"+ response.getStatusCode() + ",错误信息:"+response.getBody()+")";
                }
                logger.info(msg);
                return false;
            }
        }
        catch (Exception e) {
            logger.info("上传病人档案异常,patient_id:" + patient.getPatientId() + ",event_no:" + patient.getEventNo());
            logger.error(e.getCause().toString());
            return false;
        }
    }
    /**
     * 下载标准包
     */
    public static HttpResponse download(String remoteVersion, String orgCode) {
        try {
            String token = getToken();
            String downLoadMethod = HttpHelper.defaultHttpUrl + "/adaptions/"+orgCode+"/source";
            Map<String, Object> params = new HashMap<>();
            params.put("version_code", remoteVersion);
            params.put("org_code", orgCode);
            params.put("token", token);
            Map<String, Object> header = new HashMap<>();
            header.put("Authorization", "Basic " + HttpHelper.clientKey);
            HttpResponse response = HttpHelper.get(downLoadMethod, params, header);
            return response;
        } catch (Exception e) {
            logger.info("下载标准包异常:");
            logger.error(e.getCause().toString());
            return null;
        }
    }
}

+ 149 - 0
hos-admin/src/main/java/com/yihu/hos/crawler/service/OldPatientCDAUpload.java

@ -0,0 +1,149 @@
package com.yihu.hos.crawler.service;
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.yihu.hos.crawler.model.config.SysConfig;
import com.yihu.hos.crawler.model.patient.Patient;
import com.yihu.ehr.framework.util.compress.Zipper;
import com.yihu.ehr.framework.util.encrypt.MD5;
import com.yihu.ehr.framework.util.encrypt.RSA;
import com.yihu.ehr.framework.util.file.FileUtil;
import com.yihu.ehr.framework.util.httpclient.HttpHelper;
import com.yihu.ehr.framework.util.httpclient.HttpResponse;
import org.apache.http.NameValuePair;
import org.apache.http.message.BasicNameValuePair;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import java.io.File;
import java.security.Key;
import java.util.*;
/**
 * 档案上传
 *
 * @author Air
 * @version 1.0
 * @created 2015.07.06 15:58
 */
public class OldPatientCDAUpload {
    private static Logger logger = LogManager.getLogger(OldPatientCDAUpload.class);
    public static String uploadMethod;
    /**
     * @param patient
     * @return
     * @modify 2015.09.15 airhead 修订删除目录
     * @modify 2015.09.19 airhead 修复无文档问题及错误信息
     */
    public boolean upload(Patient patient,String token) {
        ZipFile zipFile = zip(patient);
        try {
            if (zipFile == null || zipFile.file == null) {
                logger.info("压缩病人档案失败,病人文档未生成,patient_id:" + patient.getPatientId() + ",event_no:" + patient.getEventNo());
                return false;
            }
            boolean result = upload(patient, zipFile,token);
            if (!result) {
                logger.info("上传病人档案失败,patient_id:" + patient.getPatientId() + ",event_no:" + patient.getEventNo());
                FileUtil.deleteDirectory(new File(zipFile.directory));
                return false;
            }
            result = FileUtil.deleteDirectory(new File(zipFile.directory));
            if (!result) {
                logger.info("删除临时文件失败,patient_id:" + patient.getPatientId() + ",event_no:" + patient.getEventNo());
            }
        } catch (Exception e) {
            FileUtil.deleteDirectory(new File(zipFile.directory));
        }
        return true;
    }
    /**
     * @param patient
     * @return
     * @modify 从data目录生成zip数据
     */
    public ZipFile zip(Patient patient) {
        try {
            PatientCDAIndex patientCDAIndex = new PatientCDAIndex(patient);
            String dataDirectory = patientCDAIndex.getDataDirectory();
            String filePath = patientCDAIndex.createIndex(PatientCDAIndex.IndexType.ZIP, PatientCDAIndex.FileType.ZIP);
            UUID uuidPwd = UUID.randomUUID();
            String pwd = uuidPwd.toString();
            Key key = RSA.genPublicKey(SysConfig.getInstance().getPublicKeyMap().get(patient.getOrgCode()));
            if (key == null) {
                logger.info("压缩文件错误,无公钥信息.");
                FileUtil.deleteDirectory(new File( patientCDAIndex.getDirectory()));
                return null;
            }
            ZipFile zipFile = new ZipFile();
            zipFile.encryptPwd = RSA.encrypt(pwd, key);
            Zipper zipper = new Zipper();
            zipFile.file = zipper.zipFile(new File(dataDirectory), filePath, pwd);
            zipFile.dataDirectory = dataDirectory;
            zipFile.directory = patientCDAIndex.getDirectory();
            return zipFile;
        } catch (Exception e) {
            e.printStackTrace();
            logger.info("从data目录生成zip数据时,压缩文件异常", e);
        }
        return null;
    }
    private boolean upload(Patient patient,  ZipFile zipFile, String token) {
        try {
            String uploadMethod = HttpHelper.defaultHttpUrl + "/packages";
            String fileMd5= MD5.getMd5ByFile(zipFile.file);
            List<NameValuePair> formParams = new ArrayList<>();
            formParams.add(new BasicNameValuePair("md5", fileMd5));
            formParams.add(new BasicNameValuePair("package_crypto", zipFile.encryptPwd));
            formParams.add(new BasicNameValuePair("org_code", patient.getOrgCode()));
            formParams.add(new BasicNameValuePair("token", token));
            Map<String, Object> header = new HashMap<>();
            header.put("Authorization", "Basic " + HttpHelper.clientKey);
            HttpResponse response = HttpHelper.postFile(uploadMethod, formParams, zipFile.file.getAbsolutePath(), header);
            if (response == null) {
                logger.info( "上传病人档案请求失败,patient_id:" + patient.getPatientId() + ",event_no:" + patient.getEventNo());
                return false;
            }
            if (response.getStatusCode() != 200) {
                logger.info( "上传病人档案请求失败,错误代码:" + response.getStatusCode() + ",patient_id:" + patient.getPatientId() + ",event_no:" + patient.getEventNo());
                return false;
            }
            ObjectMapper mapper = new ObjectMapper();
            JsonNode rootNode = mapper.readValue(response.getBody(), JsonNode.class);
            JsonNode codeNode = rootNode.get("code");
            String result = codeNode.asText();
            if (!result.equals("0")) {
                logger.info("上传病人档案失败,错误代码:" + result + ",patient_id:" + patient.getPatientId() + ",event_no:" + patient.getEventNo());
                return false;
            } else {
                logger.info( "上传病人档案成功,patient_id:" + patient.getPatientId() + ",event_no:" + patient.getEventNo());
                return true;
            }
        } catch (Exception e) {
            e.printStackTrace();
            logger.info( "上传病人档案异常,patient_id:" + patient.getPatientId() + ",event_no:" + patient.getEventNo());
            return false;
        }
    }
    private class ZipFile {
        public File file;
        public String encryptPwd;
        public String directory;
        public String dataDirectory;
    }
}

+ 83 - 0
hos-admin/src/main/java/com/yihu/hos/crawler/service/PatientCDAIndex.java

@ -0,0 +1,83 @@
package com.yihu.hos.crawler.service;
import com.yihu.hos.crawler.model.config.SysConfig;
import com.yihu.hos.crawler.model.patient.Patient;
import java.util.UUID;
/**
 * 病人文件索引类,用于生成文件路径,不确保文件路径存在
 *
 * @author Air
 * @version 1.0
 * @created 2015.07.01 18:06
 */
public class PatientCDAIndex {
    private Patient patient;
    public PatientCDAIndex(Patient patient) {
        this.patient = patient;
    }
    public String getDirectory() {
        String dir = SysConfig.getInstance().getTempFile();
        return dir + "/" + patient.getOrgCode() + "/" + patient.getOrgCode() + "-" + patient.getPatientId() + "-" + patient.getEventNo();
    }
    /**
     * 生成病人档案目录
     * orgCode-pateintId-eventNo/data
     *
     * @return
     */
    public String getDataDirectory() {
        return getDirectory() + "/" + IndexType.DATA;
    }
    public String createIndex(String indexType, String fileType) {
        UUID uuid = UUID.randomUUID();
        String index = uuid.toString();
        String dir = getDirectory() + "/" + indexType;
        return dir + "/" + index + fileType;
    }
    public String createDataSetIndex(String indexType, String fileType) {
        UUID uuid = UUID.randomUUID();
        String index = "dataset_index";
        String dir = getDirectory() + "/" + IndexType.DATA + "/" +indexType;
        return dir + "/" + index + fileType;
    }
    /**
     * 生成最终病人档案目录
     * data/cda
     * data/origin
     * data/standard
     *
     * @param indexType
     * @param fileType
     * @return
     */
    public String createDataIndex(String indexType, String fileType) {
        return createIndex(IndexType.DATA + "/" + indexType, fileType);
    }
    public class FileType {
        public final static String XML = ".xml";
        public final static String JSON = ".json";
        public final static String ZIP = ".zip";
    }
    public class IndexType {
        public final static String DATA = "data";   //病人档案数据目录
        public final static String CDA = "cda";     //病人cda档案目录
        public final static String STANDARD = "standard";   //病人标准档案目录
        public final static String ORIGIN = "origin";   //病人原始档案目录
        public final static String ZIP = "zip";         //病人压缩包目录
        public final static String DOCUMENT = "document";
    }
}

+ 113 - 0
hos-admin/src/main/java/com/yihu/hos/crawler/service/PatientCDAUpload.java

@ -0,0 +1,113 @@
package com.yihu.hos.crawler.service;
import com.yihu.hos.crawler.model.config.SysConfig;
import com.yihu.hos.crawler.model.patient.Patient;
import com.yihu.ehr.framework.util.compress.Zipper;
import com.yihu.ehr.framework.util.encrypt.RSA;
import com.yihu.ehr.framework.util.file.FileUtil;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import java.io.File;
import java.security.Key;
import java.util.UUID;
/**
 * 档案上传
 *
 * @author Air
 * @version 1.0
 * @created 2015.07.06 15:58
 */
public class PatientCDAUpload {
    private static Logger logger = LogManager.getLogger(PatientCDAUpload.class);
    public static String uploadMethod;
    /**
     * @param patient
     * @return
     * @modify 2015.09.15 airhead 修订删除目录
     * @modify 2015.09.19 airhead 修复无文档问题及错误信息
     */
    public Boolean upload(Patient patient, String token) {
        ZipFile zipFile = zip(patient);
        try {
            if (zipFile == null || zipFile.file == null) {
                logger.info("压缩病人档案失败,病人文档未生成,patient_id:" + patient.getPatientId() + ",event_no:" + patient.getEventNo());
                return false;
            }
            boolean result = upload(patient, zipFile, token);
            if (!result) {
                logger.info("上传病人档案失败,patient_id:" + patient.getPatientId() + ",event_no:" + patient.getEventNo());
                return result;
            }
            logger.trace(zipFile.directory);
            result = FileUtil.deleteDirectory(new File(zipFile.directory));
            if (!result) {
                logger.info("删除临时文件失败,patient_id:" + patient.getPatientId() + ",event_no:" + patient.getEventNo());
                return result;
            }
        } catch (Exception e) {
            FileUtil.deleteDirectory(new File(zipFile.directory));
            return false;
        }
        return true;
    }
    /**
     * @param patient
     * @return
     * @modify 从data目录生成zip数据
     */
    public ZipFile zip(Patient patient) {
        try {
            PatientCDAIndex patientCDAIndex = new PatientCDAIndex(patient);
            String dataDirectory = patientCDAIndex.getDataDirectory();
            String filePath = patientCDAIndex.createIndex(PatientCDAIndex.IndexType.ZIP, PatientCDAIndex.FileType.ZIP);
            UUID uuidPwd = UUID.randomUUID();
            String pwd = uuidPwd.toString();
            String publicKey = SysConfig.getInstance().getPublicKeyMap().get(patient.getOrgCode());
            if(publicKey== null ||  publicKey.length() == 0) {
                publicKey = EsbHttp.getPublicKey(patient.getOrgCode());
                SysConfig.getInstance().getPublicKeyMap().put(patient.getOrgCode(), publicKey);
            }
            Key key = RSA.genPublicKey(publicKey);
            if (key == null) {
                logger.info("压缩文件错误,获取公钥错误.");
                return null;
            }
            ZipFile zipFile = new ZipFile();
            zipFile.encryptPwd = RSA.encrypt(pwd, key);
            Zipper zipper = new Zipper();
            zipFile.file = zipper.zipFileForAll(new File(dataDirectory), filePath, pwd);
            zipFile.dataDirectory = dataDirectory;
            zipFile.directory = patientCDAIndex.getDirectory();
            return zipFile;
        } catch (Exception e) {
            logger.error("从data目录生成zip数据时,压缩文件异常");
            logger.error(e.getCause().toString());
        }
        return null;
    }
    private boolean upload(Patient patient, ZipFile zipFile, String token) {
        return EsbHttp.upload(patient, zipFile.file, zipFile.encryptPwd, token);
    }
    private class ZipFile {
        public File file;
        public String encryptPwd;
        public String directory;
        public String dataDirectory;
    }
}

+ 37 - 0
hos-admin/src/main/java/com/yihu/hos/crawler/storage/DataSetStorage.java

@ -0,0 +1,37 @@
package com.yihu.hos.crawler.storage;
import com.yihu.hos.crawler.format.IDataTransformer;
import com.yihu.hos.crawler.model.patient.Patient;
import com.yihu.hos.crawler.format.AdapterScheme;
/**
 * Created by Administrator on 2015/10/15.
 */
public class DataSetStorage extends MongodbStorage {
    public static final String KEY_CODE = "code";
    public DataSetStorage(AdapterScheme adapterScheme, String dbName) {
        super(adapterScheme, dbName);
    }
    @Override
    public boolean save(IDataTransformer dataTransformer) {
        return true;
    }
    /**
     * @param patient
     * @return
     * @modify 将档案生成到到data目录
     */
    @Override
    public boolean toFile(Patient patient) {
        return true;
    }
    @Override
    public String getKey(){
        return KEY_CODE;
    }
}

+ 232 - 0
hos-admin/src/main/java/com/yihu/hos/crawler/storage/DocumentStorage.java

@ -0,0 +1,232 @@
package com.yihu.hos.crawler.storage;
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.node.ArrayNode;
import com.fasterxml.jackson.databind.node.ObjectNode;
import com.mongodb.client.FindIterable;
import com.mongodb.client.MongoCollection;
import com.mongodb.client.MongoCursor;
import com.mongodb.client.model.UpdateOptions;
//import com.yihu.common.util.log.DebugLogger;
import com.yihu.hos.common.mongo.MongoDB;
import com.yihu.hos.crawler.format.IDataTransformer;
import com.yihu.hos.crawler.model.config.SysConfig;
import com.yihu.hos.crawler.model.patient.Patient;
import com.yihu.hos.crawler.format.AdapterScheme;
import com.yihu.hos.crawler.format.DocumentTransformer;
import com.yihu.hos.crawler.service.EsbHttp;
import com.yihu.hos.crawler.service.PatientCDAIndex;
import com.yihu.ehr.framework.util.encode.Base64;
import com.yihu.ehr.framework.util.file.FileUtil;
import com.yihu.ehr.framework.util.operator.DateUtil;
import org.bson.Document;
import java.io.IOException;
import java.text.SimpleDateFormat;
import java.util.Date;
import java.util.Iterator;
import static com.mongodb.client.model.Filters.and;
import static com.mongodb.client.model.Filters.eq;
import static com.mongodb.client.model.Projections.excludeId;
/**
 * Created by Administrator on 2015/10/15.
 */
public class DocumentStorage extends MongodbStorage {
        public static final String KEY_CODE = "catalog";
    public DocumentStorage(AdapterScheme adapterScheme, String dbName) {
        super(adapterScheme, dbName);
    }
    @Override
    public boolean save(IDataTransformer dataTransformer) {
        DocumentTransformer documentTransformer = (DocumentTransformer) dataTransformer;
        JsonNode jsonObject = documentTransformer.getJsonObject();
        if (jsonObject != null) {
            String dataSetCode = jsonObject.get(getKey()).asText();
            String documentId = jsonObject.path(getKey()).asText();
            System.out.println(documentId);
            String patientId = jsonObject.get(PATIENT_ID).asText();
            String eventNo = jsonObject.get(EVENT_NO).asText();
            try {
                MongoCollection<Document> collection = MongoDB.db(dbName).getCollection(dataSetCode);
                createIndex(collection);    //创建索引
                Document filter = new Document();
                filter.append(PATIENT_ID, patientId);
                filter.append(EVENT_NO, eventNo);
                collection.deleteMany(filter);
                UpdateOptions updateOptions = new UpdateOptions();
                updateOptions.upsert(true);
                collection.replaceOne(filter, Document.parse(jsonObject.toString()), updateOptions);
                String url = createUrl(dataSetCode, patientId, eventNo);
                Date expiryDate = DateUtil.setDateTime(new Date(), getExpireDays().intValue());
                SimpleDateFormat sdf = new SimpleDateFormat(DateUtil.DEFAULT_YMDHMSDATE_FORMAT);
                String date = sdf.format(expiryDate);
                Document updateDoc = new Document(CREATE_AT, new Date());
                updateDoc.put("resource.url", url);
                updateDoc.put("resource.expiry_date", date);
                collection.updateMany(filter, new Document("$set", updateDoc));
            } catch (Exception e) {
                //DebugLogger.fatal("保存病人档案信息至MongoDB异常:", e);
                return false;
            }
            return true;
        }
        return false;
    }
    /**
     * @param patient
     * @return
     * @modify 将档案生成到到data目录
     */
    @Override
    public boolean toFile(Patient patient) {
        boolean result = true;
        PatientCDAIndex patientCDAIndex = new PatientCDAIndex(patient);
        ObjectMapper mapper = new ObjectMapper();
        ArrayNode arrayNode=mapper.createArrayNode();
        ObjectNode resultNode=mapper.createObjectNode();
        try {
            for (String name : MongoDB.db(dbName).listCollectionNames()) {
                MongoCollection<Document> collection = MongoDB.db(dbName).getCollection(name);
                FindIterable<Document> documents = collection.find(and(eq("patient_id", patient.getPatientId()), eq(EVENT_NO, patient.getEventNo()))).projection(excludeId());
                try (MongoCursor<Document> cursor = documents.iterator()) {
                    while (cursor.hasNext()) {
                        try {
                            String document = cursor.next().toJson();
                            ObjectNode rootNode = mapper.readValue(document, ObjectNode.class);
                            JsonNode jsonNode = rootNode.get("data");
                            boolean array = jsonNode.isArray();
                            if (!array) {
                                continue;
                            }
                            arrayNode=genunStructureData(jsonNode,patientCDAIndex);
                        } catch (IOException e) {
                            e.printStackTrace();
                            //DebugLogger.fatal("存储临时文件失败.");
                            result = false;
                        }
                    }
                } catch (Exception e) {
                    //DebugLogger.fatal("", e);
                    result = false;
                }
            }
            String innerVersion =  EsbHttp.getRemoteVersion(patient.getOrgCode());
            for (int i = 0; i != arrayNode.size(); ++i) {
                JsonNode keyWordsNode = arrayNode.get(i).path("key_words");
                ObjectNode newNode=mapper.createObjectNode();
                JsonNode jsonNode= transformKeyWords(keyWordsNode, newNode);
                ((ObjectNode) arrayNode.get(i)).set("key_words", jsonNode);
            }
            resultNode.set("data", arrayNode);
            resultNode.put("patient_id", patient.getPatientId());
            resultNode.put("event_no",patient.getEventNo());
            resultNode.put("org_code",patient.getOrgCode());
            resultNode.put("event_time",patient.getReferenceTime());
            resultNode.put("inner_version",innerVersion);
            String indexPath = patientCDAIndex.getDataDirectory()+"/"+"meta.json";
            boolean writeFile = FileUtil.writeFile(indexPath, mapper.writeValueAsString(resultNode), "UTF-8");
        } catch (Exception e) {
            //DebugLogger.fatal("", e);
            result = false;
        }
        return result;
    }
    @Override
    public String getDataSet(Patient patient, String dataSetCode) {
        return null;
    }
    @Override
    public String getKey(){
        return KEY_CODE;
    }
    /**
     * 生成非结构化 meta.json文件数据
     * @param jsonNode
     * @param patientCDAIndex
     * @return
     * @throws IOException
     */
    public ArrayNode genunStructureData(JsonNode jsonNode,PatientCDAIndex patientCDAIndex) throws IOException {
        ObjectMapper mapper=new ObjectMapper();
        ArrayNode arrayNode=mapper.createArrayNode();
        for (int i = 0; i != jsonNode.size(); ++i) {
            JsonNode documentNode = jsonNode.get(i);
            JsonNode contentNode=documentNode.path("content");
            if (contentNode.isArray()){
                for (int j = 0; j< contentNode.size(); j++) {
                    JsonNode fileArr = contentNode.get(j);
//                    String mimeType = fileArr.path("mime_type").asText();//文件类型
                    String names = fileArr.path("name").asText();
                    String fileType=names.substring(names.lastIndexOf("."));//文件后缀
                    JsonNode file=fileArr.path("file_content");//文件内容
                    Iterator<String> fileNames = file.fieldNames();
                    StringBuilder stringBuilder=new StringBuilder();
                    while (fileNames.hasNext()){
                        String key=fileNames.next();
                        String content =file.path(key).asText();
                        String filePath = patientCDAIndex.createDataIndex(dbName, fileType);
                        String fileName = filePath.substring(filePath.lastIndexOf("/")+1);
                        byte[]  fileContent = Base64.decode(content);
                        boolean writeFile = FileUtil.writeFile(filePath, fileContent, "UTF-8");
                        if (!writeFile) {
                            //DebugLogger.fatal("存储临时文件失败.");
                        } else {
                            stringBuilder.append(fileName).append(",");
                        }
                    }
                    if (file.isObject()) {
                        ((ObjectNode) fileArr).put("name", stringBuilder.toString());
                        ((ObjectNode) fileArr).remove("file_content");
                    }
                }
            }
            arrayNode.add(documentNode);
        }
        return arrayNode;
    }
    /**
     * 将key_words中key中包含的“-”转换成“."
     * @param keyWordsNode
     * @param newObjectNode
     * @return
     */
    public ObjectNode transformKeyWords(JsonNode keyWordsNode, ObjectNode newObjectNode){
        Iterator<String> iterator = keyWordsNode.fieldNames();
        while (iterator.hasNext()){
            String key=iterator.next();
            String value =keyWordsNode.path(key).asText();
            String newKey=key.replaceAll("-",".");
            newObjectNode.put(newKey,value);
        }
        return newObjectNode;
    }
}

+ 36 - 0
hos-admin/src/main/java/com/yihu/hos/crawler/storage/IDataStorage.java

@ -0,0 +1,36 @@
package com.yihu.hos.crawler.storage;
import com.yihu.hos.crawler.format.IDataTransformer;
import com.yihu.hos.crawler.model.patient.Patient;
import java.util.Map;
/**
 * 存储接口,应该只关心存取
 * 目前定义接口存在如下问题需修订:
 * 1.无取接口
 * 2.toFile超出职责范围
 *
 * @author Airhead
 * @version 1.0
 * @created 22-5-2015 11:24:26
 */
public interface IDataStorage {
    boolean save(IDataTransformer dataTransformer);
    boolean toFile(Patient patient);
    String getDataSet(Patient patient, String dataSetCode);
    String getArchive(String dataSetCode, Map<String, Object> params);
    Boolean isStored(String orgCode, String patientID, String eventNo);
    enum StorageType {
        MYSQL_DB,
        MONGODB,
        FILE_SYSTEM
    }
}

+ 390 - 0
hos-admin/src/main/java/com/yihu/hos/crawler/storage/MongodbStorage.java

@ -0,0 +1,390 @@
package com.yihu.hos.crawler.storage;
import com.fasterxml.jackson.databind.node.ObjectNode;
import com.mongodb.BasicDBObject;
import com.mongodb.client.FindIterable;
import com.mongodb.client.MongoCollection;
import com.mongodb.client.MongoCursor;
import com.mongodb.client.model.IndexOptions;
import com.mongodb.client.model.UpdateOptions;
//import com.yihu.common.util.log.DebugLogger;
import com.yihu.hos.common.mongo.MongoDB;
import com.yihu.hos.crawler.format.DataSetTransformer;
import com.yihu.hos.crawler.format.IDataTransformer;
import com.yihu.hos.crawler.model.config.SysConfig;
import com.yihu.hos.crawler.model.patient.Patient;
import com.yihu.hos.crawler.model.patient.PatientIdentity;
import com.yihu.hos.crawler.model.patient.PatientIndex;
import com.yihu.hos.crawler.format.AdapterScheme;
import com.yihu.hos.crawler.service.PatientCDAIndex;
import com.yihu.ehr.framework.util.file.ConfigureUtil;
import com.yihu.ehr.framework.util.file.FileUtil;
import com.yihu.ehr.framework.util.operator.DateUtil;
import com.yihu.ehr.framework.util.operator.NumberUtil;
import org.bson.Document;
import java.io.IOException;
import java.text.SimpleDateFormat;
import java.util.*;
import java.util.concurrent.TimeUnit;
import static com.mongodb.client.model.Filters.and;
import static com.mongodb.client.model.Filters.eq;
import static com.mongodb.client.model.Projections.excludeId;
/**
 * 档案数据只使用Mongo进行存储
 * 目前阶段只会有两种数据类型
 * 1.结构化,数据集
 * 2.非结构化,文档(Pictures,Word,PDF,Video etc.)
 *
 * @author Air
 * @version 1.0
 * @created 2015.07.06 10:38
 */
public class MongodbStorage implements IDataStorage {
    public static final String KEY = "code";
    public static final String PATIENT_ID = "patient_id";
    public static final String EVENT_NO = "event_no";
    public static final String CREATE_AT = "create_at";
    public static final String CREATE_TIME = "create_time";
    public static final String ORG_CODE = "org_code";
    public static final String TTL_INDEX = "ceate_at_1";   //TTL index name, 过期时间索引
    public static final String TTL_INDEX_EXPIRED = "create_time_1"; //旧的TTL index name,已经作废,用于删除索引时使用。
    public static final String INNER_VERSION = "inner_version";
    public static final String EVENT_TIME = "event_time";
    protected String dbName;
    protected AdapterScheme adapterScheme;
    public MongodbStorage(AdapterScheme adapterScheme, String dbName) {
        this.adapterScheme = adapterScheme;
        this.dbName = dbName;
    }
    @Override
    public boolean save(IDataTransformer dataTransformer) {
        DataSetTransformer dataSetTransformer = (DataSetTransformer) dataTransformer;
        ObjectNode jsonObject = (ObjectNode) dataSetTransformer.getJsonObject();
        if (jsonObject != null) {
            String dataSetCode = jsonObject.get(getKey()).asText();
            String patientId = jsonObject.get(PATIENT_ID).asText();
            String eventNo = jsonObject.get(EVENT_NO).asText();
            try {
                MongoCollection<Document> collection = MongoDB.db(dbName).getCollection(dataSetCode);
                createIndex(collection);    //创建索引
                Document filter = new Document();
                filter.append(PATIENT_ID, patientId);
                filter.append(EVENT_NO, eventNo);
                collection.deleteMany(filter);
                UpdateOptions updateOptions = new UpdateOptions();
                updateOptions.upsert(true);
                collection.replaceOne(filter, Document.parse(jsonObject.toString()), updateOptions);
                String url = createUrl(dataSetCode, patientId, eventNo);
                Date expiryDate = DateUtil.setDateTime(new Date(), getExpireDays().intValue());
                SimpleDateFormat sdf = new SimpleDateFormat(DateUtil.DEFAULT_YMDHMSDATE_FORMAT);
                String date = sdf.format(expiryDate);
                Document updateDoc = new Document(CREATE_AT, new Date());
                updateDoc.put("resource.url", url);
                updateDoc.put("resource.expiry_date", date);
                collection.updateMany(filter, new Document("$set", updateDoc));
            } catch (Exception e) {
                //DebugLogger.fatal("保存病人档案信息至MongoDB异常:", e);
                return false;
            }
            return true;
        }
        return false;
    }
    /**
     * @param patient
     * @return
     * @modify 将档案生成到到data目录
     */
    @Override
    public boolean toFile(Patient patient) {
        boolean result = true;
        PatientCDAIndex patientCDAIndex = new PatientCDAIndex(patient);
        Document datasetDoc = new Document();
        Document resultDoc = new Document();
        try {
            // 生成文件,轻量模式需清空data中数据
            for (String name : MongoDB.db(dbName).listCollectionNames()) {
                MongoCollection<Document> collection = MongoDB.db(dbName).getCollection(name);
                FindIterable<Document> documents = collection.find(and(eq(PATIENT_ID, patient.getPatientId()), eq(EVENT_NO, patient.getEventNo()))).projection(excludeId());
                try (MongoCursor<Document> cursor = documents.iterator()) {
                    while (cursor.hasNext()) {
                        String filePath = patientCDAIndex.createDataIndex(dbName, PatientCDAIndex.FileType.JSON);
                        try {
                            Document doc = cursor.next();
                            if ("HDSC01_02".equals(name) || "HDSC02_09".equals(name)) {
                                resultDoc.put(PATIENT_ID, doc.get(PATIENT_ID));
                                resultDoc.put(EVENT_NO, doc.get(EVENT_NO));
                                resultDoc.put(ORG_CODE, doc.get(ORG_CODE));
                                resultDoc.put(INNER_VERSION, doc.get(INNER_VERSION));
                                resultDoc.put(EVENT_TIME, doc.get(EVENT_TIME));
                                if ("HDSC01_02".equals(name)) {
                                    resultDoc.put("visit_type", "1");
                                } else {
                                    resultDoc.put("visit_type", "2");//临时约定,后续从字典中获取
                                }
                            }
                            Map<String, String> resource = (Map<String, String>) doc.get("resource");
                            datasetDoc.put(name, "");
                            resultDoc.put("expiry_date", resource.get("expiry_date"));
                            boolean writeFile = FileUtil.writeFile(filePath, doc.toJson(), "UTF-8");
                            if (!writeFile) {
                                //DebugLogger.fatal("存储临时文件失败:" + cursor.next().toJson());
                                result = false;
                            }
                        } catch (IOException e) {
                            //DebugLogger.fatal("存储临时文件失败.", e);
                            result = false;
                        }
                    }
                } catch (Exception e) {
                    //DebugLogger.fatal("", e);
                }
            }
            //摘要信息生成
//            Document indexData = genPatientIndexData(patient);
//            if (indexData != null) {
//                resultDoc.put("dataset", datasetDoc);
//                resultDoc.put("sumary", indexData);
//                String indexPath = patientCDAIndex.createDataSetIndex("index", PatientCDAIndex.FileType.JSON);
//                boolean writeFile = FileUtil.writeFile(indexPath, resultDoc.toJson(), "UTF-8");
//                if (!writeFile) {
//                    //DebugLogger.fatal("存储索引临时文件失败:" + resultDoc.toJson());
//                    result = false;
//                }
//            }
        } catch (Exception e) {
            //DebugLogger.fatal("", e);
            result = false;
        }
        return result;
    }
    @Override
    public String getDataSet(Patient patient, String dataSetCode) {
        try {
            MongoCollection<Document> collection = MongoDB.db(dbName).getCollection(dataSetCode);
            FindIterable<Document> documents = collection.find(and(eq(PATIENT_ID, patient.getPatientId()), eq(EVENT_NO, patient.getEventNo()), eq(ORG_CODE, patient.getOrgCode()))).projection(excludeId());
            Document document = documents.first();
            if (document != null) {
                return document.toJson();
            }
        } catch (Exception e) {
            //DebugLogger.fatal("", e);
        }
        return null;
    }
    /**
     * 根据条件 获取数据集信息
     *
     * @param dataSetCode 数据集编码
     * @param params      map参数集合
     * @return
     */
    @Override
    public String getArchive(String dataSetCode, Map<String, Object> params) {
        String data = null;
        boolean result = true;
        try {
            MongoCollection<Document> collection = MongoDB.db(dbName).getCollection(dataSetCode);
            BasicDBObject basicDBObject = new BasicDBObject();
            for (Map.Entry<String, Object> entry : params.entrySet()) {
                basicDBObject.put(entry.getKey(), entry.getValue());
            }
            FindIterable<Document> documents = collection.find(basicDBObject);
            try (MongoCursor<Document> cursor = documents.iterator()) {
                while (cursor.hasNext()) {
                    data = cursor.next().toJson();
                    //DebugLogger.fatal("存储临时文 :" + cursor.next().toJson());
                }
            } catch (Exception e) {
                //DebugLogger.fatal("", e);
            }
        } catch (Exception e) {
            //DebugLogger.fatal("", e);
        }
        return data;
    }
    @Override
    public Boolean isStored(String orgCode, String patientID, String eventNo) {
        HashMap<String, PatientIdentity> patientIdentityHashMap = SysConfig.getInstance().getPatientIdentityHashMap();
        Set<Map.Entry<String, PatientIdentity>> entries = patientIdentityHashMap.entrySet();
        Iterator<Map.Entry<String, PatientIdentity>> iterator = entries.iterator();
        try {
            while (iterator.hasNext()) {
                Map.Entry<String, PatientIdentity> next = iterator.next();
                String datasetCode = next.getKey();
                MongoCollection<Document> collection = MongoDB.db(dbName).getCollection(datasetCode);
                Document document = new Document();
                document.append(ORG_CODE, orgCode);
                document.append(PATIENT_ID, patientID);
                document.append(EVENT_NO, eventNo);
                Document findDoc = collection.find(document).first();
                if (findDoc != null) {
                    return true;
                }
            }
        } catch (Exception e) {
            //DebugLogger.fatal("", e);
        }
        return false;
    }
    protected void createIndex(MongoCollection<Document> collection) {
        for (final Document index : collection.listIndexes()) {
            if (index.get("name").equals(TTL_INDEX_EXPIRED)) {
                collection.dropIndex(TTL_INDEX_EXPIRED);  //删除旧的TTL Index
            } else if (index.get("name").equals(TTL_INDEX)) {
                return;
            }
        }
        Document createTimeIndex = new Document(CREATE_AT, 1);
        IndexOptions indexOptions = new IndexOptions();
        indexOptions.expireAfter(getExpireDays(), TimeUnit.DAYS);
        indexOptions.name(TTL_INDEX);
        collection.createIndex(createTimeIndex, indexOptions);
        Document patientIndex = new Document();
        patientIndex.append(PATIENT_ID, 1);
        patientIndex.append(EVENT_NO, 1);
        collection.createIndex(patientIndex);
    }
    /**
     * url生成
     *
     * @param patientId 病人ID
     * @param eventNo   事件号
     * @return
     */
    protected String createUrl(String dataSetCode, String patientId, String eventNo) {
        String requestPath = ConfigureUtil.getProValue("archive.properties", "hos.archives.request.url");
        return requestPath + dataSetCode + "/" + patientId + "/" + eventNo;
    }
    protected String getKey() {
        return KEY;
    }
    protected Long getExpireDays() {
        final Long expireDay = 30L;
        String value = ConfigureUtil.getProValue("archive.properties","hos.archives.expiry.days");
        Long days = NumberUtil.toLong(value);
        return days == null ? expireDay : days;
    }
    /**
     * 病人摘要信息生成
     * 从sys.config文件中的配置读取所需的摘要信息
     *
     * @param patient
     * @return
     */
    protected Document genPatientIndexData(Patient patient) {
        Map<String, PatientIndex> patientIndexMap = SysConfig.getInstance().getPatientIndexMap();
        PatientIndex patientIndex = null;
        List<Document> arrayNode = null;
        Document objectNode = null;
        Document result = new Document();
        MongoCursor<Document> cursor = null;
        MongoCursor<Document> diagCursor = null;
        try {
            for (Map.Entry<String, PatientIndex> entry : patientIndexMap.entrySet()) {
                String dataSetCode = entry.getKey();
                patientIndex = entry.getValue();
                arrayNode = new ArrayList<>();
                MongoCollection<Document> collection = MongoDB.db(dbName).getCollection(dataSetCode);
                FindIterable<Document> documents = collection.find(and(eq(KEY, dataSetCode), eq(PATIENT_ID, patient.getPatientId()), eq(EVENT_NO, patient.getEventNo()))).projection(excludeId());
                cursor = documents.iterator();
                if (cursor.hasNext()) {
                    while (cursor.hasNext()) {
                        Document document = cursor.next();
                        List<Document> list = document.get("data", List.class);
                        for (Document doc : list) {
                            objectNode = new Document();
                            objectNode.put(patientIndex.getPatientId(), patient.getPatientId());
                            objectNode.put(patientIndex.getEventNoCode(), patient.getEventNo());
                            objectNode.put(patientIndex.getRefTimeCode(), doc.get(patientIndex.getRefTimeCode()) == null ? null : (String) doc.get(patientIndex.getRefTimeCode()));
                            objectNode.put("orgCode", patient.getOrgCode());
                            objectNode.put(patientIndex.getOfficeCode(), doc.get(patientIndex.getOfficeCode()) == null ? null : (String) doc.get(patientIndex.getOfficeCode()));
                            objectNode.put(patientIndex.getOfficeName(), doc.get(patientIndex.getOfficeName()) == null ? null : (String) doc.get(patientIndex.getOfficeName()));
                            if ("HDSC02_09".equals(dataSetCode)) {
                                objectNode.put(patientIndex.getLeaveTime(), doc.get(patientIndex.getLeaveTime()) == null ? null : (String) doc.get(patientIndex.getLeaveTime()));
                            }
                            arrayNode.add(objectNode);
                        }
                    }
                    if (arrayNode != null && arrayNode.size() > 0) {
                        result.put(dataSetCode, arrayNode);
                    } else {
                        continue;
                    }
                    String diagDataSet = patientIndex.getDiagDataSet();
                    MongoCollection<Document> diagCollection = MongoDB.db(dbName).getCollection(diagDataSet);
                    FindIterable<Document> diags = diagCollection.find(and(eq(KEY, diagDataSet), eq(PATIENT_ID, patient.getPatientId()), eq(EVENT_NO, patient.getEventNo()))).projection(excludeId());
                    diagCursor = diags.iterator();
                    arrayNode = new ArrayList<>();
                    while (diagCursor.hasNext()) {
                        Document document = diagCursor.next();
                        List<Document> list = document.get("data", List.class);
                        for (Document doc : list) {
                            objectNode = new Document();
                            objectNode.put(patientIndex.getDiagType(), doc.get(patientIndex.getDiagType()) == null ? null : (String) doc.get(patientIndex.getDiagType()));
                            objectNode.put(patientIndex.getDiagCode(), doc.get(patientIndex.getDiagCode()) == null ? null : (String) doc.get(patientIndex.getDiagCode()));
                            objectNode.put(patientIndex.getDiagName(), doc.get(patientIndex.getDiagName()) == null ? null : (String) doc.get(patientIndex.getDiagName()));
                            arrayNode.add(objectNode);
                        }
                    }
                    if (arrayNode != null && arrayNode.size() > 0) {
                        result.put(diagDataSet, arrayNode);
                    }
                } else {
                    continue;
                }
            }
            if (result == null) {
                return null;
            } else {
                return result;
            }
        } catch (Exception e) {
            //DebugLogger.fatal("", e);
        } finally {
            if (cursor != null) {
                cursor.close();
            }
            if (diagCursor != null) {
                diagCursor.close();
            }
        }
        return null;
    }
}

+ 474 - 0
hos-admin/src/main/java/com/yihu/hos/datacollect/controller/DataCollectController.java

@ -0,0 +1,474 @@
package com.yihu.hos.datacollect.controller;
import com.yihu.hos.common.Services;
import com.yihu.hos.datacollect.model.RsJobConfig;
import com.yihu.hos.datacollect.service.intf.IDatacollectManager;
import com.yihu.hos.datacollect.service.intf.IDatacollectService;
import com.yihu.ehr.framework.constrant.DateConvert;
import com.yihu.ehr.framework.model.ActionResult;
import com.yihu.ehr.framework.model.Result;
import com.yihu.ehr.framework.util.controller.BaseController;
import com.yihu.ehr.framework.util.operator.CollectionUtil;
import com.yihu.hos.resource.service.IStdService;
import com.yihu.hos.standard.service.adapter.AdapterSchemeService;
import com.yihu.hos.system.service.intf.IDatasourceManager;
import net.sf.json.JSONArray;
import org.apache.commons.beanutils.BeanUtils;
import org.springframework.stereotype.Controller;
import org.springframework.ui.Model;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.ResponseBody;
import javax.annotation.Resource;
import javax.servlet.http.HttpServletRequest;
import java.lang.reflect.Method;
import java.text.SimpleDateFormat;
import java.util.*;
/**
 * 数据采集配置页面
 * Created by hzp on 2015/8/12.
 */
@RequestMapping("/datacollect")
@Controller("dataCollectController")
public class DataCollectController extends BaseController {
    @Resource(name = Services.Datacollect)
    IDatacollectManager datacollect;
    @Resource(name = Services.DatacollectService)
    IDatacollectService datacollectService;
    @Resource(name = Services.StdService)
    IStdService stdService;
    @Resource(name = Services.Datasource)
    IDatasourceManager datasource;
    @Resource(name = AdapterSchemeService.BEAN_ID)
    private AdapterSchemeService adapterSchemeService;
    /****************************
     * 任务管理
     ************************************************/
    /*
    任务配置
     */
    @RequestMapping("configJob")
    public String configJob(Model model) {
        model.addAttribute("contentPage", "/datacollect/configJob");
        return "partView";
    }
    /*
    任务新增/编辑
     */
    @RequestMapping("editorJob")
    public String editorJob(Model model, String jobId) {
        try {
            //是否编辑
            if (jobId != null && jobId.length() > 0) {
                //获取任务信息
                RsJobConfig job = datacollect.getJobById(jobId);
                model.addAttribute("model", job);
                String cron = datacollect.getCronByJobId(jobId);
                model.addAttribute("cronExpression", cron);
            }
            //获取方案列表
            List data = stdService.getSchemeVersion();
            JSONArray jsonArray = JSONArray.fromObject(data);
            model.addAttribute("schemeList", "{\"detailModelList\":" + (CollectionUtil.isEmpty(data) ? "[]" : jsonArray) + "}");
            model.addAttribute("contentPage", "/datacollect/editorJob");
            return "pageView";
        } catch (Exception ex) {
            model.addAttribute("contentPage", "/datacollect/editorJob");
            return "pageView";
        }
    }
    /*
    获取任务列表
    */
    @RequestMapping("getJob")
    @ResponseBody
    public Result getJob(String name, int page, int rows) {
        try {
            Map<String, Object> map = new HashMap<>();
            map.put("name", name);
            return datacollect.getJobList(map, page, rows);
        } catch (Exception ex) {
            return Result.error(ex.getMessage());
        }
    }
    @RequestMapping("compareServeTime")
    @ResponseBody
    public Result getJob(String time){
        try {
            SimpleDateFormat df = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");
            if(df.parse(time).before(new Date())) {
                return Result.success("beforeServeTime");
            }
            else
                return Result.error("afterServeTime");
        }catch (Exception ex){
            return Result.error(ex.getMessage());
        }
    }
    /**
     * 根据适配方案获取数据集列表
     */
    @RequestMapping("getSchemeDataset")
    @ResponseBody
    public Result getSchemeDataset(String schemeId, String schemeVersion, String jobId) {
        try {
            return datacollect.getSchemeDataset(schemeId, schemeVersion, jobId);
        } catch (Exception ex) {
            return Result.error(ex.getMessage());
        }
    }
    /**
     * 根据适配方案+数据集获取字段列表
     */
    @RequestMapping("getSchemeDatasetCol")
    @ResponseBody
    public Result getSchemeDatasetCol(String schemeId, String schemeVersion, String datasetId) {
        try {
            return datacollect.getSchemeDatasetCol(schemeId, schemeVersion, datasetId);
        } catch (Exception ex) {
            return Result.error(ex.getMessage());
        }
    }
    /*
       新增任务
    */
    @RequestMapping("addJob")
    @ResponseBody
    public Result addJob(HttpServletRequest request) {
        try {
            RsJobConfig obj = new RsJobConfig();
            BeanUtils.populate(obj, request.getParameterMap());
            obj.setValid("1");
            String time=request.getParameter("jobNextTime");
            SimpleDateFormat df = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");
            if(df.parse(time).before(new Date())) {
                return Result.error("任务开始时间不能小于当前时间");
            }
            String cron = request.getParameter("cronExpression");
            String jobDataset = request.getParameter("jobDataset");
            return datacollect.addJob(obj, cron, jobDataset);
        } catch (Exception ex) {
            return Result.error("新增任务失败!");
        }
    }
    /*
       修改任务
    */
    @RequestMapping("updateJob")
    @ResponseBody
    public Result updateJob(HttpServletRequest request) {
        try {
            RsJobConfig obj = new RsJobConfig();
            BeanUtils.populate(obj, request.getParameterMap());
            String time=request.getParameter("jobNextTime");
            SimpleDateFormat df = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");
            if(df.parse(time).before(new Date())) {
                return Result.error("任务开始时间不能小于当前时间");
            }
            String cron = request.getParameter("cronExpression");
            String jobDataset = request.getParameter("jobDataset");
            return datacollect.updateJob(obj, cron, jobDataset);
        } catch (Exception ex) {
            return Result.error("修改任务失败!");
        }
    }
    /*
      修改任务状态
   */
    @RequestMapping("validJob")
    @ResponseBody
    public Result validJob(String jobId, String valid) {
        try {
            return datacollect.validJob(jobId, valid);
        } catch (Exception ex) {
            return Result.error(ex.getMessage());
        }
    }
    /*
      删除任务
   */
    @RequestMapping("deleteJob")
    @ResponseBody
    public Result deleteJob(String jobId) {
        try {
            return datacollect.deleteJob(jobId);
        } catch (Exception ex) {
            return Result.error(ex.getMessage());
        }
    }
    /**
     * 获取任务信息
     *
     * @return
     */
    @RequestMapping("getJobInfo")
    @ResponseBody
    public Result getJobInfo(String jobId) {
        try {
            RsJobConfig job = datacollect.getJobById(jobId);
            if (job != null) {
                ActionResult re = new ActionResult(true, "");
                re.setData(job.getValid());
                return re;
            } else {
                return new ActionResult(false, "非法操作!");
            }
        } catch (Exception ex) {
            return Result.error(ex.getMessage());
        }
    }
    /**
     * 根据任务Id获取相关数据集下拉数据
     *
     * @return
     */
    @RequestMapping("getJobDatasetByJobId")
    @ResponseBody
    public Result getJobDatasetByJobId(String jobId) {
        try {
            return datacollect.getJobDatasetByJobId(jobId);
        } catch (Exception ex) {
            return Result.error(ex.getMessage());
        }
    }
    /*************************
     * 数据集--数据源管理
     ******************************************/
    /*
    数据集配置
     */
    @RequestMapping("configDataset")
    public String configDataset(Model model) {
        try {
            model.addAttribute("stdVersion", "569e19522e3d");
            //获取数据源数据
            model.addAttribute("datasourceList", datasource.getDatasourceByOrg("").toJson());
            model.addAttribute("contentPage", "/datacollect/configDataset");
            return "partView";
        } catch (Exception ex) {
            model.addAttribute("contentPage", "error");
            return "partView";
        }
    }
    /**
     * 数据集数据源管理列表(包含全部数据集)
     *
     * @return
     */
    @RequestMapping("getDatasetSource")
    @ResponseBody
    public Result getDatasetSource(String stdVersion) {
        try {
            return datacollect.getDatasetSource(stdVersion);
        } catch (Exception ex) {
            return Result.error(ex.getMessage());
        }
    }
    /**
     * 更新数据集数据源
     *
     * @return
     */
    @RequestMapping("saveDatasetSource")
    @ResponseBody
    public Result saveDatasetSource(String stdVersion, String json) {
        try {
            return datacollect.saveDatasetSource(json);
        } catch (Exception ex) {
            return Result.error(ex.getMessage());
        }
    }
    /*************************** 任务执行 ************************************************/
    /**
     * 执行任务
     *
     * @return
     */
    @RequestMapping("executeJob")
    @ResponseBody
    public Result executeJob(String jobId) {
        try {
            //获取任务详细信息
            RsJobConfig job = datacollect.getJobById(jobId);
            String content = job.getJobContent();
            Class<?> classType = Class.forName(content);
            Method method = classType.getMethod("execute", new Class[]{String.class});
            method.invoke(classType.newInstance(), jobId);
            return Result.success("处理完成!");
        } catch (Exception ex) {
            return Result.error(ex.getMessage());
        }
    }
    /*
    任务补采界面
     */
    @RequestMapping("repeatDatacollect")
    public String repeatDatacollect(Model model, String jobId, String jobDatasetId, String jobStatus, String jobTimeFrom, String jobTimeTo) {
        try {
            model.addAttribute("jobId", jobId == null ? "" : jobId);
            model.addAttribute("jobDatasetId", jobDatasetId == null ? "" : jobDatasetId);
            model.addAttribute("jobStatus", jobStatus == null ? "" : jobStatus);
            if (jobTimeFrom != null && jobTimeFrom.length() > 0) {
                Date timeFrom = DateConvert.toDate(jobTimeFrom);
                model.addAttribute("jobTimeFrom", DateConvert.toString(timeFrom));
            } else {
                GregorianCalendar gc = new GregorianCalendar();
                gc.setTime(new Date());
                gc.add(5, -1);
                model.addAttribute("jobTimeFrom", DateConvert.toString(gc.getTime()));
            }
            if (jobTimeTo != null && jobTimeTo.length() > 0) {
                Date timeTo = DateConvert.toDate(jobTimeTo);
                model.addAttribute("jobTimeTo", DateConvert.toString(timeTo));
            } else {
                model.addAttribute("jobTimeTo", DateConvert.toString(new Date()));
            }
            model.addAttribute("contentPage", "/datacollect/repeatDatacollect");
            return "partView";
        } catch (Exception ex) {
            model.addAttribute("contentPage", "error");
            return "partView";
        }
    }
    /**
     * 任务详细日志列表
     *
     * @return
     */
    @RequestMapping("getJobLogDetail")
    @ResponseBody
    public Result getJobLogDetail(String jobId, String jobDatasetId, String jobStatus, String jobTimeFrom, String jobTimeTo, int page, int rows) {
        try {
            Map<String, Object> conditionMap = new HashMap<String, Object>();
            conditionMap.put("jobId", jobId);
            conditionMap.put("jobDatasetId", jobDatasetId);
            conditionMap.put("jobStatus", jobStatus);
            conditionMap.put("jobTimeFrom", jobTimeFrom);
            conditionMap.put("jobTimeTo", jobTimeTo);
            return datacollect.getJobLogDetail(conditionMap, page, rows);
        } catch (Exception ex) {
            return Result.error(ex.getMessage());
        }
    }
    /**
     * 补采数据
     *
     * @return
     */
    @RequestMapping("repeat")
    @ResponseBody
    public Result repeat(String ids) {
        try {
            if (ids.length() > 0) {
                String[] logId = ids.split(",");
                if (logId.length == 1) {
                    return datacollectService.repeatJob(ids);
                } else {
                    StringBuilder str = new StringBuilder();
                    for (String id : logId) {
                        ActionResult re = datacollectService.repeatJob(id);
                        if (re.isSuccessFlg()) {
                            str.append(id + "补采成功!\n");
                        } else {
                            str.append(id + "补采失败!\n");
                        }
                    }
                    return Result.success(str.toString());
                }
            } else {
                return Result.error("非法操作!");
            }
        } catch (Exception ex) {
            return Result.error(ex.getMessage());
        }
    }
    /*************************** 任务跟踪 ***********************************/
    /**
     * 任务跟踪界面
     *
     * @return
     */
    @RequestMapping("trackJob")
    public String trackJob(Model model, String jobId) {
        try {
            model.addAttribute("contentPage", "/datacollect/trackJob");
            return "pageView";
        } catch (Exception ex) {
            model.addAttribute("contentPage", "error");
            return "partView";
        }
    }
    /**
     * 任务日志列表
     *
     * @return
     */
    @RequestMapping("getJobLog")
    @ResponseBody
    public Result getJobLog(String jobId, int page, int rows) {
        try {
            Map<String, Object> conditionMap = new HashMap<String, Object>();
            conditionMap.put("jobId", jobId);
            return datacollect.getJobLog(conditionMap, page, rows);
        } catch (Exception ex) {
            return Result.error(ex.getMessage());
        }
    }
    /**
     * 任务详细根据数据集分组
     *
     * @return
     */
    @RequestMapping("getJobLogDataset")
    @ResponseBody
    public Result getJobLogDataset(String logId) {
        try {
            return datacollect.getJobLogDataset(logId);
        } catch (Exception ex) {
            return Result.error(ex.getMessage());
        }
    }
}

+ 41 - 0
hos-admin/src/main/java/com/yihu/hos/datacollect/controller/DataPushController.java

@ -0,0 +1,41 @@
package com.yihu.hos.datacollect.controller;
import com.yihu.hos.datacollect.service.intf.IDatapushService;
import com.yihu.ehr.framework.model.Result;
import com.yihu.ehr.framework.util.controller.BaseController;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Controller;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.ResponseBody;
/**
 * 数据采集配置页面
 * Created by hzp on 2015/8/12.
 */
@RequestMapping("/datapush")
@Controller("dataPushController")
public class DataPushController extends BaseController {
    @Autowired
    IDatapushService datapushService;
    /**************************** 推送数据 ************************************************/
    /*
    全流程推数据
     */
    @RequestMapping("datapush")
    @ResponseBody
    public Result datapush(String dataset,String data,String orgCode) {
        try {
            return datapushService.pushData(dataset, data,orgCode);
        }
        catch (Exception ex)
        {
            return Result.error(ex.getMessage());
        }
    }
}

+ 280 - 0
hos-admin/src/main/java/com/yihu/hos/datacollect/dao/DatacollectDao.java

@ -0,0 +1,280 @@
package com.yihu.hos.datacollect.dao;
import com.yihu.hos.datacollect.dao.intf.IDatacollectDao;
import com.yihu.hos.datacollect.model.DtoJobDataset;
import com.yihu.hos.datacollect.model.RsJobDataset;
import com.yihu.ehr.framework.common.dao.SQLGeneralDAO;
import com.yihu.ehr.framework.constrant.DateConvert;
import com.yihu.ehr.framework.model.DataGridResult;
import com.yihu.ehr.framework.model.SimpleChartItem;
import com.yihu.hos.resource.model.RsDatasourceDataset;
import org.springframework.stereotype.Repository;
import org.springframework.util.StringUtils;
import java.util.Date;
import java.util.GregorianCalendar;
import java.util.List;
import java.util.Map;
/**
 * Created by hzp on 2016/1/11.
 */
@Repository("DatacollectDao")
public class DatacollectDao extends SQLGeneralDAO implements IDatacollectDao {
    /**
     * 通过机构代码获取版本号
     */
    @Override
    public String getVersionByQLC(String orgCode) throws Exception
    {
        try{
            String sql = "select version from adapter_scheme_version v "+
                         " left join system_organization o on o.qlc_adapter_version=v.id "+
                         " where o.qlc_org_code ='"+orgCode+"'";
            return super.scalarBySql(sql, String.class);
        }
        catch (Exception e)
        {
            e.printStackTrace();
            return "";
        }
    }
    /**
     * 根据JobId获取Cron表达式
     */
    @Override
    public String getCronByJobId(String jobId)
    {
        try{
            String sql = "select CRON_EXPRESSION from qrtz_cron_triggers where TRIGGER_NAME='"+jobId+"'";
            return super.scalarBySql(sql, String.class);
        }
        catch (Exception e)
        {
            return "";
        }
    }
    /**
     * 获取任务列表
     * @return
     */
    @Override
    public DataGridResult getJobList(Map<String, Object> conditionMap, Integer page, Integer pageSize) throws Exception
    {
        StringBuilder sb = new StringBuilder();
        sb.append("from RsJobConfig t where 1=1 ");
        if (!StringUtils.isEmpty(conditionMap.get("jobName")))
        {
            sb.append(" and t.jobName like '%" + conditionMap.get("jobName") + "%'");
        }
        return getDataGridResult(sb.toString(),page,pageSize);
    }
    /**
     * 获取任务数据集
     * @return
     * @throws Exception
     */
    @Override
    public List<RsJobDataset> getJobDataset(String jobId) throws Exception
    {
        String sql = "select * from rs_job_dataset where job_id='"+jobId+"'";
        return super.queryListBySql(sql, RsJobDataset.class);
    }
    /**
     * 获取版本名称
     */
    @Override
    public String getSchemeAndVersion(String schemeVersionId) throws Exception
    {
        String sql = "select IFNULL( (select  CONCAT_WS(' -- ',s.name,v.name)  from adapter_scheme_version v " +
                "left join adapter_scheme s on s.id = v.scheme_id " +
                "where v.version='"+schemeVersionId+"'),'')";
        return super.scalarBySql(sql, String.class);
    }
    /**
     * 修改任务状态
     */
    @Override
    public void validJob(String id,String valid) throws Exception
    {
        String sql = "update rs_job_config set valid='"+valid+"' where Id='"+id+"'";
        super.execute(sql);
    }
    /**
     * 删除任务关联数据集
     */
    public void deleteJobDatasetByJobId(String jobId) throws Exception
    {
        List<RsJobDataset> list = super.queryListBySql("select * from rs_job_dataset where job_id='"+jobId+"'",RsJobDataset.class);
        //判断是否存在数据
        if(list!=null && list.size()>0)
        {
            for(RsJobDataset obj :list)
            {
                super.deleteEntity(obj);
            }
        }
    }
    /**
     * 通过版本获取配置的数据集--数据源
     * @throws Exception
     */
    public List<RsDatasourceDataset> getDatasourceDataset(String stdVersion) throws Exception
    {
        String sql = "select * from rs_datasource_dataset where std_version='"+stdVersion+"'";
        return super.queryListBySql(sql, RsDatasourceDataset.class);
    }
    /**
     * 删除数据集数据源关联
     */
    public void deleteDatasourceDatasetById(String id) throws Exception
    {
        String sql = "delete from rs_datasource_dataset where id='"+id+"'";
        super.execute(sql);
    }
    /**
     * 通过任务ID获取相关数据集
     */
    public List<DtoJobDataset> getDatacollectDataset(String jobId) throws Exception
    {
        //通过jobId获取机构版本号
        String sqlVersion ="select a.adapter_std_version\n" +
                "from rs_job_config c,(SELECT s.adapter_std_version,v.version from adapter_scheme s,adapter_scheme_version v where s.id=v.scheme_id) a\n" +
                "where a.version = c.scheme_version\n" +
                "and c.id='"+jobId+"'";
        String version = super.scalarBySql(sqlVersion,String.class);
        String sql = "select d.*,\n" +
                "t.datasource_id,t.datasource_name,t.config,t.type,t.org_code\n" +
                "from rs_job_dataset d\n" +
                "LEFT JOIN (select s.dataset_id,s.datasource_id,ss.name as datasource_name,ss.config,ss.type,ss.org_code \n" +
                "\tfrom rs_datasource_dataset s,(select ds.id,ds.name,ds.config,ds.type,o.code as org_code from system_datasource ds,system_organization o where ds.org_id=o.id) ss where s.datasource_id=ss.id and s.Std_version='"+version+"') t \n" +
                "on d.job_dataset_id = t.dataset_id \n" +
                "where d.job_id='"+jobId+"'\n" +
                "order by d.sort";
        return super.queryListBySql(sql,DtoJobDataset.class);
    }
    /**
     * 获取任务详细日志列表
     */
    @Override
    public DataGridResult getJobLogDetail(Map<String, Object> conditionMap, Integer page, Integer pageSize) throws Exception
    {
        StringBuilder sb = new StringBuilder();
        sb.append("from RsJobLogDetail where 1=1 ");
        if (!StringUtils.isEmpty(conditionMap.get("jobId"))) {
            sb.append(" and job_id ='"+conditionMap.get("jobId")+"'");
        }
        if (!StringUtils.isEmpty(conditionMap.get("jobDatasetId"))) {
            sb.append(" and job_dataset_id ='"+conditionMap.get("jobDatasetId")+"'");
        }
        if (!StringUtils.isEmpty(conditionMap.get("jobStatus"))) {
            sb.append(" and jobStatus ='"+conditionMap.get("jobStatus")+"'");
        }
        if (!StringUtils.isEmpty(conditionMap.get("jobTimeFrom"))) {
            Date time = DateConvert.toDate(conditionMap.get("jobTimeFrom").toString());
            time.setHours(0);
            time.setMinutes(0);
            time.setSeconds(0);
            sb.append(" and startTime >= '"+DateConvert.toString(time)+"'");
        }
        if (!StringUtils.isEmpty(conditionMap.get("jobTimeTo"))) {
            Date time = DateConvert.toDate(conditionMap.get("jobTimeTo").toString());
            time.setHours(0);
            time.setMinutes(0);
            time.setSeconds(0);
            GregorianCalendar gc=new GregorianCalendar();
            gc.setTime(time);
            gc.add(5, 1);
            sb.append(" and startTime < '"+DateConvert.toString(gc.getTime())+"'");
        }
        sb.append("order by startTime desc");
        return getDataGridResult(sb.toString(),page,pageSize);
    }
    /**
     * 获取任务日志列表
     */
    @Override
    public DataGridResult getJobLog(Map<String, Object> conditionMap, Integer page, Integer pageSize) throws Exception
    {
        StringBuilder sb = new StringBuilder();
        sb.append("from RsJobLog where 1=1 ");
        if (!StringUtils.isEmpty(conditionMap.get("jobId"))) {
            sb.append(" and jobId ='"+conditionMap.get("jobId")+"'");
        }
        sb.append("order by jobStartTime desc");
        return getDataGridResult(sb.toString(),page,pageSize);
    }
    /**
     * 获取采集日志统计
     */
    public List<Map<String, Object>> getJobLogCount(String logId) throws Exception
    {
        String sql ="select d.job_log_id,sum(d.Job_dataset_rows) as count,\n" +
                "sum(case d.job_status\n" +
                "    when '1' THEN\n" +
                "         d.Job_dataset_rows\n" +
                "    ELSE\n" +
                "         0\n" +
                "    END) as success,\n" +
                "sum(case d.job_status\n" +
                "    when '3' THEN\n" +
                "         d.Job_dataset_rows\n" +
                "    ELSE\n" +
                "         0\n" +
                "    END) as repeat_num\n" +
                "from rs_job_log_detail d\n";
        if(logId!=null && logId.length()>0)
        {
            sql += "where d.job_log_id ='"+logId+"'\n";
        }
        sql+="group by d.job_log_id ";
        return super.queryListBySql(sql);
    }
    /**
     * 任务详细根据数据集分组
     */
    public List<SimpleChartItem> getJobLogDataset(String logId) throws Exception
    {
        String sql ="select d.job_dataset_name as x,sum(d.Job_dataset_rows) as y1,\n" +
                "                sum(case d.job_status\n" +
                "                    when '1' THEN\n" +
                "                         d.Job_dataset_rows\n" +
                "                    ELSE\n" +
                "                         0\n" +
                "                    END) as y2,\n" +
                "                sum(case d.job_status\n" +
                "                    when '3' THEN\n" +
                "                         d.Job_dataset_rows\n" +
                "                    ELSE\n" +
                "                         0\n" +
                "                    END) as y3\n" +
                "                from rs_job_log_detail d\n" +
                "where d.job_log_id = '"+logId+"'\n" +
                "GROUP BY d.Job_dataset_name";
        return super.queryListBySql(sql,SimpleChartItem.class);
    }
}

+ 86 - 0
hos-admin/src/main/java/com/yihu/hos/datacollect/dao/DatacollectLogDao.java

@ -0,0 +1,86 @@
package com.yihu.hos.datacollect.dao;
import com.yihu.hos.datacollect.dao.intf.IDatacollectDao;
import com.yihu.hos.datacollect.dao.intf.IDatacollectLogDao;
import com.yihu.hos.datacollect.model.DtoJobDataset;
import com.yihu.hos.datacollect.model.RsDatapushLog;
import com.yihu.hos.datacollect.model.RsJobDataset;
import com.yihu.ehr.framework.common.dao.SQLGeneralDAO;
import com.yihu.ehr.framework.constrant.DateConvert;
import com.yihu.ehr.framework.model.ActionResult;
import com.yihu.ehr.framework.model.DataGridResult;
import com.yihu.hos.resource.model.RsDatasourceDataset;
import org.springframework.stereotype.Repository;
import org.springframework.util.StringUtils;
import java.util.Date;
import java.util.GregorianCalendar;
import java.util.List;
import java.util.Map;
/**
 * Created by hzp on 2016/1/11.
 */
@Repository("DatacollectLogDao")
public class DatacollectLogDao extends SQLGeneralDAO implements IDatacollectLogDao {
    /**
     * 修改轮询字段
     */
    @Override
    public void updateJobDatasetKeyvalue(String id,String jobDatasetKeyvalue) throws Exception
    {
        String sql = "update rs_job_dataset set job_dataset_keyvalue='"+jobDatasetKeyvalue+"' where id='"+id+"'";
        super.execute(sql);
    }
    /******************************** 推模式日志 ***************************************/
    /**
     * 推数据新增日志
     */
    @Override
    public void addDataPushLog(String type,String success,String content) throws Exception
    {
        RsDatapushLog log = new RsDatapushLog();
        log.setType(type);
        log.setSuccess(success);
        log.setContent(content);
        log.setDatetime(new Date());
        this.saveEntity(log);
    }
    /**
     * 获取日志列表
     */
    @Override
    public DataGridResult queryDataPushLog(Map<String, Object> conditionMap, Integer page, Integer pageSize) throws Exception
    {
        StringBuilder sb = new StringBuilder();
        sb.append("from RsDatapushLog where 1=1 ");
        if (!StringUtils.isEmpty(conditionMap.get("type"))) {
            sb.append(" and type ='"+conditionMap.get("type")+"'");
        }
        if (!StringUtils.isEmpty(conditionMap.get("datetimeFrom"))) {
            Date time = DateConvert.toDate(conditionMap.get("datetimeFrom").toString());
            time.setHours(0);
            time.setMinutes(0);
            time.setSeconds(0);
            sb.append(" and datetime >= '"+DateConvert.toString(time)+"'");
        }
        if (!StringUtils.isEmpty(conditionMap.get("datetimeTo"))) {
            Date time = DateConvert.toDate(conditionMap.get("datetimeTo").toString());
            time.setHours(0);
            time.setMinutes(0);
            time.setSeconds(0);
            GregorianCalendar gc=new GregorianCalendar();
            gc.setTime(time);
            gc.add(5, 1);
            sb.append(" and datetime < '"+DateConvert.toString(gc.getTime())+"'");
        }
        sb.append("order by datetime desc");
        return getDataGridResult(sb.toString(),page,pageSize);
    }
}

+ 92 - 0
hos-admin/src/main/java/com/yihu/hos/datacollect/dao/intf/IDatacollectDao.java

@ -0,0 +1,92 @@
package com.yihu.hos.datacollect.dao.intf;
import com.yihu.hos.datacollect.model.DtoJobDataset;
import com.yihu.hos.datacollect.model.RsJobDataset;
import com.yihu.ehr.framework.common.dao.XSQLGeneralDAO;
import com.yihu.ehr.framework.model.DataGridResult;
import com.yihu.ehr.framework.model.SimpleChartItem;
import com.yihu.hos.resource.model.RsDatasourceDataset;
import java.util.List;
import java.util.Map;
/**
 * Created by hzp on 2016/1/11.
 */
public interface IDatacollectDao extends XSQLGeneralDAO {
    /**
     * 通过机构代码获取版本号
     */
    public String getVersionByQLC(String orgCode) throws Exception;
    /**
     * 根据JobId获取Cron表达式
     */
    public String getCronByJobId(String jobId);
    /**
     * 获取任务列表
     * @return
     */
    public DataGridResult getJobList(Map<String, Object> conditionMap, Integer page, Integer pageSize) throws Exception;
    /**
     * 获取任务数据集
     * @return
     * @throws Exception
     */
    public List<RsJobDataset> getJobDataset(String jobId) throws Exception;
    /**
     * 获取版本名称
     */
    public String getSchemeAndVersion(String schemeVersionId) throws Exception;
    /**
     * 修改任务状态
     */
    public void validJob(String id, String valid) throws Exception;
    /**
     * 删除任务关联数据集
     */
    public void deleteJobDatasetByJobId(String jobId) throws Exception;
    /**
     * 通过版本获取配置的数据集--数据源
     * @throws Exception
     */
    public List<RsDatasourceDataset> getDatasourceDataset(String stdVersion) throws Exception;
    /**
     * 删除数据集数据源关联
     */
    public void deleteDatasourceDatasetById(String id) throws Exception;
    /**
     * 通过任务ID获取相关数据集
     */
    public List<DtoJobDataset> getDatacollectDataset(String jobId) throws Exception;
    /**
     * 获取任务日志列表
     */
    public DataGridResult getJobLogDetail(Map<String, Object> conditionMap, Integer page, Integer pageSize) throws Exception;
    /**
     * 获取任务日志列表
     */
    public DataGridResult getJobLog(Map<String, Object> conditionMap, Integer page, Integer pageSize) throws Exception;
    /**
     * 获取采集日志统计
     */
    public List<Map<String, Object>> getJobLogCount(String logId) throws Exception;
    /**
     * 任务详细根据数据集分组
     */
    public List<SimpleChartItem> getJobLogDataset(String logId) throws Exception;
}

+ 33 - 0
hos-admin/src/main/java/com/yihu/hos/datacollect/dao/intf/IDatacollectLogDao.java

@ -0,0 +1,33 @@
package com.yihu.hos.datacollect.dao.intf;
import com.yihu.hos.datacollect.model.DtoJobDataset;
import com.yihu.hos.datacollect.model.RsJobDataset;
import com.yihu.ehr.framework.common.dao.XSQLGeneralDAO;
import com.yihu.ehr.framework.model.DataGridResult;
import com.yihu.hos.resource.model.RsDatasourceDataset;
import java.util.List;
import java.util.Map;
/**
 * 采集日志
 * Created by hzp on 2016/1/26.
 */
public interface IDatacollectLogDao extends XSQLGeneralDAO {
    /**
     * 修改
     */
    void updateJobDatasetKeyvalue(String id, String jobDatasetKeyvalue) throws Exception;
    /**
     * 推数据新增日志
     */
    void addDataPushLog(String type,String success,String content) throws Exception;
    /**
     * 获取日志列表
     */
    DataGridResult queryDataPushLog(Map<String, Object> conditionMap, Integer page, Integer pageSize) throws Exception;
}

+ 48 - 0
hos-admin/src/main/java/com/yihu/hos/datacollect/model/DtoDatasetCol.java

@ -0,0 +1,48 @@
package com.yihu.hos.datacollect.model;
import org.json.JSONArray;
/**
 * 数据集字段
 */
public class DtoDatasetCol implements java.io.Serializable {
	private String code;
	private String text;
	private String type;
	private String dict;
	public String getCode() {
		return code;
	}
	public void setCode(String code) {
		this.code = code;
	}
	public String getText() {
		return text;
	}
	public void setText(String text) {
		this.text = text;
	}
	public String getType() {
		return type;
	}
	public void setType(String type) {
		this.type = type;
	}
	public String getDict() {
		return dict;
	}
	public void setDict(String dict) {
		this.dict = dict;
	}
}

+ 48 - 0
hos-admin/src/main/java/com/yihu/hos/datacollect/model/DtoDictCol.java

@ -0,0 +1,48 @@
package com.yihu.hos.datacollect.model;
import org.json.JSONArray;
/**
 * 数据源关联字典
 */
public class DtoDictCol implements java.io.Serializable {
	private String stdMetadataCode;
	private String adapterDataType;
	private String stdDictId;
	private JSONArray dictList;
	public String getStdMetadataCode() {
		return stdMetadataCode;
	}
	public void setStdMetadataCode(String stdMetadataCode) {
		this.stdMetadataCode = stdMetadataCode;
	}
	public String getAdapterDataType() {
		return adapterDataType;
	}
	public void setAdapterDataType(String adapterDataType) {
		this.adapterDataType = adapterDataType;
	}
	public String getStdDictId() {
		return stdDictId;
	}
	public void setStdDictId(String stdDictId) {
		this.stdDictId = stdDictId;
	}
	public JSONArray getDictList() {
		return dictList;
	}
	public void setDictList(JSONArray dictList) {
		this.dictList = dictList;
	}
}

+ 37 - 0
hos-admin/src/main/java/com/yihu/hos/datacollect/model/DtoJobConfig.java

@ -0,0 +1,37 @@
package com.yihu.hos.datacollect.model;
import java.sql.Timestamp;
/**
 * RsJobConfig entity. @author MyEclipse Persistence Tools
 */
public class DtoJobConfig extends RsJobConfig implements java.io.Serializable {
	private String schemeAndVersion;
	private String jobDataset;
	private String jobPlan;
	public String getSchemeAndVersion() {
		return schemeAndVersion;
	}
	public void setSchemeAndVersion(String schemeAndVersion) {
		this.schemeAndVersion = schemeAndVersion;
	}
	public String getJobDataset() {
		return jobDataset;
	}
	public void setJobDataset(String jobDataset) {
		this.jobDataset = jobDataset;
	}
	public String getJobPlan() {
		return jobPlan;
	}
	public void setJobPlan(String jobPlan) {
		this.jobPlan = jobPlan;
	}
}

+ 74 - 0
hos-admin/src/main/java/com/yihu/hos/datacollect/model/DtoJobDataset.java

@ -0,0 +1,74 @@
package com.yihu.hos.datacollect.model;
/**
 * RsJobDataset entity. @author MyEclipse Persistence Tools
 */
public class DtoJobDataset extends RsJobDataset implements java.io.Serializable {
	private String checked;
	private String jobDatasetCode;
	/** 数据源相关 **/
	private String orgCode;
	private String datasourceId;
	private String datasourceName;
	private String config;
	private String type;
	public String getOrgCode() {
		return orgCode;
	}
	public void setOrgCode(String orgCode) {
		this.orgCode = orgCode;
	}
	public String getChecked() {
		return checked;
	}
	public void setChecked(String checked) {
		this.checked = checked;
	}
	public String getJobDatasetCode() {
		return jobDatasetCode;
	}
	public void setJobDatasetCode(String jobDatasetCode) {
		this.jobDatasetCode = jobDatasetCode;
	}
	public String getDatasourceId() {
		return datasourceId;
	}
	public void setDatasourceId(String datasourceId) {
		this.datasourceId = datasourceId;
	}
	public String getDatasourceName() {
		return datasourceName;
	}
	public void setDatasourceName(String datasourceName) {
		this.datasourceName = datasourceName;
	}
	public String getConfig() {
		return config;
	}
	public void setConfig(String config) {
		this.config = config;
	}
	public String getType() {
		return type;
	}
	public void setType(String type) {
		this.type = type;
	}
}

+ 36 - 0
hos-admin/src/main/java/com/yihu/hos/datacollect/model/DtoJobLog.java

@ -0,0 +1,36 @@
package com.yihu.hos.datacollect.model;
/**
 * add by hzp at 20160204
 */
public class DtoJobLog extends RsJobLog implements java.io.Serializable {
	private Integer count;
	private Integer success;
	private Integer repeatNum;
	public Integer getCount() {
		return count;
	}
	public void setCount(Integer count) {
		this.count = count;
	}
	public Integer getSuccess() {
		return success;
	}
	public void setSuccess(Integer success) {
		this.success = success;
	}
	public Integer getRepeatNum() {
		return repeatNum;
	}
	public void setRepeatNum(Integer repeatNum) {
		this.repeatNum = repeatNum;
	}
}

+ 58 - 0
hos-admin/src/main/java/com/yihu/hos/datacollect/model/RsDatapushLog.java

@ -0,0 +1,58 @@
package com.yihu.hos.datacollect.model;
import java.util.Date;
/**
 * RsJobLog entity. @author MyEclipse Persistence Tools
 */
public class RsDatapushLog implements java.io.Serializable {
	// Fields
	private String id;
	private String type;
	private String success;
	private String content;
	private Date datetime;
	public String getId() {
		return id;
	}
	public void setId(String id) {
		this.id = id;
	}
	public String getType() {
		return type;
	}
	public void setType(String type) {
		this.type = type;
	}
	public String getSuccess() {
		return success;
	}
	public void setSuccess(String success) {
		this.success = success;
	}
	public String getContent() {
		return content;
	}
	public void setContent(String content) {
		this.content = content;
	}
	public Date getDatetime() {
		return datetime;
	}
	public void setDatetime(Date datetime) {
		this.datetime = datetime;
	}
}

+ 155 - 0
hos-admin/src/main/java/com/yihu/hos/datacollect/model/RsJobConfig.java

@ -0,0 +1,155 @@
package com.yihu.hos.datacollect.model;
import java.util.Date;
/**
 * RsJobConfig entity. @author MyEclipse Persistence Tools
 */
public class RsJobConfig implements java.io.Serializable {
	// Fields
	private String id;
	private String jobName;
	private String jobInfo;
	private String jobType;
	private String schemeId;
	private String schemeVersion;
	private Date jobNextTime;
	private String valid;
	private String jobContent;
	private String jobContentType;
	private Date repeatStartTime;
	private Date repeatEndTime;
	private Integer delayTime;
	// Constructors
	/** default constructor */
	public RsJobConfig() {
	}
	public String getJobContent() {
		return jobContent;
	}
	public void setJobContent(String jobContent) {
		this.jobContent = jobContent;
	}
	public String getJobContentType() {
		return jobContentType;
	}
	public void setJobContentType(String jobContentType) {
		this.jobContentType = jobContentType;
	}
	/** minimal constructor */
	public RsJobConfig(String jobName, String valid) {
		this.jobName = jobName;
		this.valid = valid;
	}
	/** full constructor */
	public RsJobConfig(String jobName, String jobInfo, String jobType, String schemeId,String schemeVersion, String jobTimeInterval, Date jobNextTime, String valid) {
		this.jobName = jobName;
		this.jobInfo = jobInfo;
		this.jobType = jobType;
		this.schemeId = schemeId;
		this.schemeVersion = schemeVersion;
		this.jobNextTime = jobNextTime;
		this.valid = valid;
	}
	// Property accessors
	public String getId() {
		return this.id;
	}
	public void setId(String id) {
		this.id = id;
	}
	public String getJobName() {
		return this.jobName;
	}
	public void setJobName(String jobName) {
		this.jobName = jobName;
	}
	public String getJobInfo() {
		return this.jobInfo;
	}
	public void setJobInfo(String jobInfo) {
		this.jobInfo = jobInfo;
	}
	public String getJobType() {
		return this.jobType;
	}
	public void setJobType(String jobType) {
		this.jobType = jobType;
	}
	public String getSchemeId() {
		return schemeId;
	}
	public void setSchemeId(String schemeId) {
		this.schemeId = schemeId;
	}
	public String getSchemeVersion() {
		return schemeVersion;
	}
	public void setSchemeVersion(String schemeVersion) {
		this.schemeVersion = schemeVersion;
	}
	public Date getJobNextTime() {
		return this.jobNextTime;
	}
	public void setJobNextTime(Date jobNextTime) {
		this.jobNextTime = jobNextTime;
	}
	public String getValid() {
		return this.valid;
	}
	public void setValid(String valid) {
		this.valid = valid;
	}
	public Date getRepeatStartTime() {
		return repeatStartTime;
	}
	public void setRepeatStartTime(Date repeatStartTime) {
		this.repeatStartTime = repeatStartTime;
	}
	public Date getRepeatEndTime() {
		return repeatEndTime;
	}
	public void setRepeatEndTime(Date repeatEndTime) {
		this.repeatEndTime = repeatEndTime;
	}
	public Integer getDelayTime() {
		return delayTime;
	}
	public void setDelayTime(Integer delayTime) {
		this.delayTime = delayTime;
	}
}

+ 117 - 0
hos-admin/src/main/java/com/yihu/hos/datacollect/model/RsJobDataset.java

@ -0,0 +1,117 @@
package com.yihu.hos.datacollect.model;
/**
 * RsJobDataset entity. @author MyEclipse Persistence Tools
 */
public class RsJobDataset implements java.io.Serializable {
	// Fields
	private String id;
	private String jobId;
	private String jobDatasetId;
	private String jobDatasetName;
	private String jobDatasetKey;
	private String jobDatasetKeytype;
	private String jobDatasetKeyvalue;
	private String jobDatasetCondition;
	private Integer sort;
	// Constructors
	/** default constructor */
	public RsJobDataset() {
	}
	public String getJobDatasetKey() {
		return jobDatasetKey;
	}
	public void setJobDatasetKey(String jobDatasetKey) {
		this.jobDatasetKey = jobDatasetKey;
	}
	public String getJobDatasetKeytype() {
		return jobDatasetKeytype;
	}
	public void setJobDatasetKeytype(String jobDatasetKeytype) {
		this.jobDatasetKeytype = jobDatasetKeytype;
	}
	/** minimal constructor */
	public RsJobDataset(String jobId, String jobDatasetId) {
		this.jobId = jobId;
		this.jobDatasetId = jobDatasetId;
	}
	/** full constructor */
	public RsJobDataset(String jobId, String jobDatasetId, String jobDatasetName, String jobDatasetKeyvalue, String jobDatasetCondition, Integer sort) {
		this.jobId = jobId;
		this.jobDatasetId = jobDatasetId;
		this.jobDatasetName = jobDatasetName;
		this.jobDatasetKeyvalue = jobDatasetKeyvalue;
		this.jobDatasetCondition = jobDatasetCondition;
		this.sort = sort;
	}
	// Property accessors
	public String getId() {
		return this.id;
	}
	public void setId(String id) {
		this.id = id;
	}
	public String getJobId() {
		return this.jobId;
	}
	public void setJobId(String jobId) {
		this.jobId = jobId;
	}
	public String getJobDatasetId() {
		return this.jobDatasetId;
	}
	public void setJobDatasetId(String jobDatasetId) {
		this.jobDatasetId = jobDatasetId;
	}
	public String getJobDatasetName() {
		return this.jobDatasetName;
	}
	public void setJobDatasetName(String jobDatasetName) {
		this.jobDatasetName = jobDatasetName;
	}
	public String getJobDatasetKeyvalue() {
		return this.jobDatasetKeyvalue;
	}
	public void setJobDatasetKeyvalue(String jobDatasetKeyvalue) {
		this.jobDatasetKeyvalue = jobDatasetKeyvalue;
	}
	public String getJobDatasetCondition() {
		return this.jobDatasetCondition;
	}
	public void setJobDatasetCondition(String jobDatasetCondition) {
		this.jobDatasetCondition = jobDatasetCondition;
	}
	public Integer getSort() {
		return this.sort;
	}
	public void setSort(Integer sort) {
		this.sort = sort;
	}
}

+ 87 - 0
hos-admin/src/main/java/com/yihu/hos/datacollect/model/RsJobLog.java

@ -0,0 +1,87 @@
package com.yihu.hos.datacollect.model;
import java.sql.Timestamp;
import java.util.Date;
/**
 * RsJobLog entity. @author MyEclipse Persistence Tools
 */
public class RsJobLog implements java.io.Serializable {
	// Fields
	private String id;
	private Date jobStartTime;
	private Date jobEndTime;
	private String jobId;
	private String jobContent;
	private Integer jobDatasetCount;
	private Integer jobDatasetSuccess;
	// Constructors
	/** default constructor */
	public RsJobLog() {
	}
	// Property accessors
	public String getId() {
		return this.id;
	}
	public void setId(String id) {
		this.id = id;
	}
	public Date getJobStartTime() {
		return jobStartTime;
	}
	public void setJobStartTime(Date jobStartTime) {
		this.jobStartTime = jobStartTime;
	}
	public Date getJobEndTime() {
		return jobEndTime;
	}
	public void setJobEndTime(Date jobEndTime) {
		this.jobEndTime = jobEndTime;
	}
	public Integer getJobDatasetCount() {
		return jobDatasetCount;
	}
	public void setJobDatasetCount(Integer jobDatasetCount) {
		this.jobDatasetCount = jobDatasetCount;
	}
	public Integer getJobDatasetSuccess() {
		return jobDatasetSuccess;
	}
	public void setJobDatasetSuccess(Integer jobDatasetSuccess) {
		this.jobDatasetSuccess = jobDatasetSuccess;
	}
	public String getJobId() {
		return this.jobId;
	}
	public void setJobId(String jobId) {
		this.jobId = jobId;
	}
	public String getJobContent() {
		return this.jobContent;
	}
	public void setJobContent(String jobContent) {
		this.jobContent = jobContent;
	}
}

+ 191 - 0
hos-admin/src/main/java/com/yihu/hos/datacollect/model/RsJobLogDetail.java

@ -0,0 +1,191 @@
package com.yihu.hos.datacollect.model;
import java.sql.Timestamp;
import java.util.Date;
/**
 * RsJobLogDetail entity. @author MyEclipse Persistence Tools
 */
public class RsJobLogDetail implements java.io.Serializable {
	// Fields
	private String id;
	private Date startTime;
	private Date endTime;
	private String jobLogId;
	private String jobStatus;
	private String jobContent;
	private String datasourceId;
	private String config;
	private String stdDatasetCode;
	private String jobDatasetId;
	private String jobId;
	private String jobDatasetName;
	private String jobSql;
	private Integer jobNum;
	private Date repeatStartTime;
	private Date repeatEndTime;
	private String repeatJobContent;
	private String schemeVersion;
	private Integer jobDatasetRows;
	// Constructors
	/** default constructor */
	public RsJobLogDetail() {
	}
	// Property accessors
	public String getId() {
		return this.id;
	}
	public void setId(String id) {
		this.id = id;
	}
	public Date getStartTime() {
		return this.startTime;
	}
	public void setStartTime(Date startTime) {
		this.startTime = startTime;
	}
	public Date getEndTime() {
		return this.endTime;
	}
	public void setEndTime(Date endTime) {
		this.endTime = endTime;
	}
	public String getJobLogId() {
		return jobLogId;
	}
	public void setJobLogId(String jobLogId) {
		this.jobLogId = jobLogId;
	}
	public String getJobStatus() {
		return jobStatus;
	}
	public void setJobStatus(String jobStatus) {
		this.jobStatus = jobStatus;
	}
	public String getJobContent() {
		return this.jobContent;
	}
	public void setJobContent(String jobContent) {
		this.jobContent = jobContent;
	}
	public String getDatasourceId() {
		return datasourceId;
	}
	public void setDatasourceId(String datasourceId) {
		this.datasourceId = datasourceId;
	}
	public String getConfig() {
		return config;
	}
	public void setConfig(String config) {
		this.config = config;
	}
	public String getStdDatasetCode() {
		return stdDatasetCode;
	}
	public void setStdDatasetCode(String stdDatasetCode) {
		this.stdDatasetCode = stdDatasetCode;
	}
	public String getJobDatasetId() {
		return jobDatasetId;
	}
	public void setJobDatasetId(String jobDatasetId) {
		this.jobDatasetId = jobDatasetId;
	}
	public String getJobId() {
		return jobId;
	}
	public void setJobId(String jobId) {
		this.jobId = jobId;
	}
	public String getJobDatasetName() {
		return jobDatasetName;
	}
	public void setJobDatasetName(String jobDatasetName) {
		this.jobDatasetName = jobDatasetName;
	}
	public String getJobSql() {
		return jobSql;
	}
	public void setJobSql(String jobSql) {
		this.jobSql = jobSql;
	}
	public Integer getJobNum() {
		return jobNum;
	}
	public void setJobNum(Integer jobNum) {
		this.jobNum = jobNum;
	}
	public Date getRepeatStartTime() {
		return repeatStartTime;
	}
	public void setRepeatStartTime(Date repeatStartTime) {
		this.repeatStartTime = repeatStartTime;
	}
	public Date getRepeatEndTime() {
		return repeatEndTime;
	}
	public void setRepeatEndTime(Date repeatEndTime) {
		this.repeatEndTime = repeatEndTime;
	}
	public String getRepeatJobContent() {
		return repeatJobContent;
	}
	public void setRepeatJobContent(String repeatJobContent) {
		this.repeatJobContent = repeatJobContent;
	}
	public String getSchemeVersion() {
		return schemeVersion;
	}
	public void setSchemeVersion(String schemeVersion) {
		this.schemeVersion = schemeVersion;
	}
	public Integer getJobDatasetRows() {
		return jobDatasetRows;
	}
	public void setJobDatasetRows(Integer jobDatasetRows) {
		this.jobDatasetRows = jobDatasetRows;
	}
}

+ 575 - 0
hos-admin/src/main/java/com/yihu/hos/datacollect/service/DatacollectManager.java

@ -0,0 +1,575 @@
package com.yihu.hos.datacollect.service;
import com.yihu.hos.datacollect.dao.intf.IDatacollectDao;
import com.yihu.hos.datacollect.model.*;
import com.yihu.hos.datacollect.service.intf.IDatacollectManager;
import com.yihu.ehr.framework.model.ActionResult;
import com.yihu.hos.common.Services;
import com.yihu.ehr.framework.model.DataGridResult;
import com.yihu.ehr.framework.model.DictItem;
import com.yihu.ehr.framework.model.SimpleChartItem;
import com.yihu.ehr.framework.util.quartz.QuartzManager;
import com.yihu.hos.resource.model.RsDatasourceDataset;
import com.yihu.hos.resource.service.IStdService;
import com.yihu.hos.standard.model.adapter.AdapterDatasetModel;
import com.yihu.hos.standard.model.standard.StdDataSetModel;
import net.sf.json.JSONArray;
import net.sf.json.JSONObject;
import org.springframework.beans.BeanUtils;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Transactional;
import javax.annotation.Resource;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
@Service(Services.Datacollect)
public class DatacollectManager implements IDatacollectManager {
    @Resource(name = "DatacollectDao")
    private IDatacollectDao datacollectDao;
    @Resource(name = Services.StdService)
    private IStdService stdManager;
    @Autowired
    private QuartzManager quartzManager;
    /*********************** 任务管理 ****************************************/
    /**
     * 根据JobId获取Cron表达式
     */
    @Override
    public String getCronByJobId(String jobId) throws Exception
    {
        return datacollectDao.getCronByJobId(jobId);
    }
    /**
     * 根据ID获取任务详细
     * @return
     */
    @Override
    public RsJobConfig getJobById(String id) throws Exception
    {
        return datacollectDao.getEntity(RsJobConfig.class, id);
    }
    /**
     * 翻译Cron表达式
     * @return
     */
    private String translationCron(String cron){
        try {
            if (cron != null && cron.length() > 0) {
                String re = "";
                String[] items = cron.split(" ");
                if (!items[5].equals("?")) //周
                {
                    String[] weekDay = items[5].split(",");
                    for(String day :weekDay)
                    {
                        String WeekDay = day;
                        if(day.equals("1"))
                        {
                            WeekDay = "日";
                        }
                        else if(day.equals("2"))
                        {
                            WeekDay = "一";
                        }
                        else if(day.equals("3"))
                        {
                            WeekDay = "二";
                        }
                        else if(day.equals("4"))
                        {
                            WeekDay = "三";
                        }
                        else if(day.equals("5"))
                        {
                            WeekDay = "四";
                        }
                        else if(day.equals("6"))
                        {
                            WeekDay = "五";
                        }
                        else if(day.equals("7"))
                        {
                            WeekDay = "六";
                        }
                        re+="星期" + WeekDay +",";
                    }
                    re = re.substring(0,re.length()-1);
                }
                else{
                    if (!items[3].equals("*"))
                    {
                        String v = items[3];
                        if(v.indexOf('/')>0) //天
                        {
                            String[] varry = v.split("/");
                            re = "每隔" + varry[1] + "天";
                        }
                        else{//月
                            if(v.equals("1"))
                            {
                                re = "每月第一天";
                            }
                            else if(v.equals("L")){
                                re = "每月最后一天";
                            }
                            else{
                                re = "每月第"+v+"天";
                            }
                        }
                    }
                    else{
                        String v1 = items[1];
                        String v2 = items[2];
                        if(v1.indexOf('/')>0) //分
                        {
                            String[] varry = v1.split("/");
                            re =  "每隔" + varry[1] + "分";
                        }
                        else{ //时
                            String[] varry = v2.split("/");
                            re =  "每隔" + varry[1] + "时";
                        }
                    }
                }
                return re+"执行";
            }
            return cron;
        }
        catch (Exception ex)
        {
            return cron;
        }
    }
    /**
     * 获取任务列表
     */
    @Override
    public DataGridResult getJobList(Map<String, Object> conditionMap,int page, int rows) throws Exception
    {
        DataGridResult re = datacollectDao.getJobList(conditionMap,page,rows);
        //获取任务列表
        List<RsJobConfig> list = re.getDetailModelList();
        if(list!=null && list.size()>0)
        {
            List<DtoJobConfig> dtoList = new ArrayList<>();
            for(RsJobConfig job :list)
            {
                DtoJobConfig dto = new DtoJobConfig();
                dto.setId(job.getId());
                dto.setJobContent(job.getJobContent());
                dto.setJobContentType(job.getJobContentType());
                dto.setJobInfo(job.getJobInfo());
                dto.setJobName(job.getJobName());
                dto.setJobNextTime(job.getJobNextTime());
                //dto.setJobTimeInterval(job.getJobTimeInterval());
                dto.setJobType(job.getJobType());
                dto.setSchemeId(job.getSchemeId());
                dto.setValid(job.getValid());
                //获取版本名称
                String schemeAndVersion = datacollectDao.getSchemeAndVersion(job.getSchemeVersion());
                dto.setSchemeAndVersion(schemeAndVersion);
                //获取关联数据集
                List<RsJobDataset> datasetList = datacollectDao.getJobDataset(job.getId());
                if(datasetList!=null && datasetList.size()>0)
                {
                    String ds = "";
                    for(RsJobDataset dataset :datasetList)
                    {
                        ds += dataset.getJobDatasetName() + ",";
                    }
                    ds= ds.substring(0,ds.length()-1);
                    dto.setJobDataset(ds);
                }
                String cron = datacollectDao.getCronByJobId(job.getId());
                //执行计划翻译
                dto.setJobPlan(translationCron(cron));
                dtoList.add(dto);
            }
            re.setDetailModelList(dtoList);
        }
        return re;
    }
    /**
     * 根据适配方案获取数据集列表
     */
    @Override
    public DataGridResult getSchemeDataset(String schemeId,String schemeVersion,String jobId) throws Exception
    {
        //获取适配数据集总和
        List<AdapterDatasetModel> datasetString = stdManager.getDatasetByScheme(schemeVersion);
        JSONArray jsonArray =  JSONArray.fromObject(datasetString);
        //获取任务数据集
        List<RsJobDataset> jobDataset = datacollectDao.getJobDataset(jobId);
        DataGridResult re = new DataGridResult();
        List<DtoJobDataset> list = new ArrayList<>();
        for(Object item : jsonArray)
        {
            JSONObject jsonItem = JSONObject.fromObject(item);
            String datasetId= jsonItem.getString("adapterDatasetId");
            //配置完整才显示
            if(datasetId.length()>0&&jsonItem.getString("adapterDatasetName").length()>0&&jsonItem.getString("adapterDatasetName")!="null"&&jsonItem.getString("adapterDatasetCode").length()>0&&jsonItem.getString("adapterDatasetCode")!="null")
            {
                DtoJobDataset obj = new DtoJobDataset();
                obj.setJobDatasetName(jsonItem.getString("adapterDatasetName"));
                obj.setJobDatasetCode(jsonItem.getString("adapterDatasetCode"));
                obj.setJobDatasetId(datasetId);
                //是否关联任务
                if(jobDataset!=null&&jobDataset.size()>0)
                {
                    for(RsJobDataset jd :jobDataset)
                    {
                        if(jd.getJobDatasetId().equals(datasetId))
                        {
                            obj.setId(jd.getId());
                            obj.setJobId(jobId);
                            obj.setJobDatasetKeyvalue(jd.getJobDatasetKeyvalue());
                            obj.setJobDatasetKey(jd.getJobDatasetKey());
                            obj.setJobDatasetKeytype(jd.getJobDatasetKeytype());
                            obj.setChecked("1");
                            obj.setJobDatasetCondition(jd.getJobDatasetCondition());
                            break;
                        }
                    }
                }
                list.add(obj);
            }
        }
        re.setDetailModelList(list);
        return re;
    }
    /**
     * 根据适配方案获取字段列表
     */
    @Override
    public DataGridResult getSchemeDatasetCol(String schemeId,String schemeVersion,String datasetId) throws Exception
    {
        //获取适配字段
        List datacolString = stdManager.getDatacolByScheme(schemeVersion, datasetId);
        JSONArray datacolList = JSONArray.fromObject(datacolString);
        DataGridResult re = new DataGridResult();
        List<DtoDatasetCol> list = new ArrayList<>();
        for(Object item : datacolList)
        {
            JSONObject jsonItem = JSONObject.fromObject(item);
            DtoDatasetCol obj = new DtoDatasetCol();
            obj.setCode(jsonItem.getString("adapterMetadataCode"));
            obj.setText(jsonItem.getString("adapterMetadataName"));
            obj.setType(jsonItem.getString("adapterMetadataType"));
            obj.setDict(jsonItem.getString("stdDictId"));
            list.add(obj);
        }
        re.setDetailModelList(list);
        return re;
    }
    /**
     * 根据任务Id获取相关数据集下拉数据
     */
    @Override
    public DataGridResult getJobDatasetByJobId(String jobId) throws Exception
    {
        //获取任务数据集
        List<RsJobDataset> jobDataset = datacollectDao.getJobDataset(jobId);
        DataGridResult re = new DataGridResult();
        List<DictItem> list = new ArrayList<>();
        for(RsJobDataset obj :jobDataset)
        {
            DictItem item = new DictItem();
            item.setValue(obj.getJobDatasetName());
            item.setCode(obj.getJobDatasetId());
            list.add(item);
        }
        re.setDetailModelList(list);
        return re;
    }
    /**
     * 保存任务关联数据集
     */
    private void saveJobDataset(String jobId,String jobDataset) throws Exception{
        if(jobDataset!=null&&jobDataset.length()>0) {
            //清空任务关联数据集
            datacollectDao.deleteJobDatasetByJobId(jobId);
            JSONArray array = JSONArray.fromObject(jobDataset);
            if (array != null && array.size() > 0) {
                for (Object item : array) {
                    JSONObject obj = JSONObject.fromObject(item);
                    RsJobDataset rs = new RsJobDataset();
                    if(obj.get("jobDatasetCondition")!=null&&obj.getString("jobDatasetCondition")!="null")
                    {
                        rs.setJobDatasetCondition(obj.getString("jobDatasetCondition"));
                    }
                    if(obj.get("jobDatasetId")!=null&&obj.getString("jobDatasetId")!="null") {
                        rs.setJobDatasetId(obj.getString("jobDatasetId"));
                    }
                    if(obj.get("jobDatasetKey")!=null&&obj.getString("jobDatasetKey")!="null") {
                        rs.setJobDatasetKey(obj.getString("jobDatasetKey"));
                    }
                    if(obj.get("jobDatasetKeytype")!=null&&obj.getString("jobDatasetKeytype")!="null") {
                        rs.setJobDatasetKeytype(obj.getString("jobDatasetKeytype"));
                    }
                    if(obj.get("jobDatasetKeyvalue")!=null&&obj.getString("jobDatasetKeyvalue")!="null") {
                        rs.setJobDatasetKeyvalue(obj.getString("jobDatasetKeyvalue"));
                    }
                    if(obj.get("jobDatasetName")!=null&&obj.getString("jobDatasetName")!="null") {
                        rs.setJobDatasetName(obj.getString("jobDatasetName"));
                    }
                    rs.setJobId(jobId);
                    datacollectDao.saveEntity(rs);
                }
            }
        }
        else{
            return;
        }
    }
    /**
     * 新增任务
     */
    @Override
    @Transactional
    public ActionResult addJob(RsJobConfig obj,String cron,String jobDataset) throws Exception
    {
        datacollectDao.saveEntity(obj);
        saveJobDataset(obj.getId(),jobDataset);
        //quartz新增任务
        quartzManager.addJob(obj.getId(),obj.getJobContentType(),obj.getJobContent(),obj.getJobNextTime(),cron);
        return new ActionResult(true,"新增成功!");
    }
    /**
     * 修改任务
     */
    @Override
    @Transactional
    public ActionResult updateJob(RsJobConfig obj,String cron,String jobDataset) throws Exception
    {
        datacollectDao.updateEntity(obj);
        saveJobDataset(obj.getId(),jobDataset);
        //quartz修改cron表达式
        quartzManager.modifyJob(obj.getId(),obj.getJobContentType(),obj.getJobContent(),obj.getJobNextTime(),cron);
        return new ActionResult(true,"修改成功!");
    }
    /**
     * 修改任务
     */
    @Override
    @Transactional
    public ActionResult updateJob(RsJobConfig obj) throws Exception
    {
        datacollectDao.updateEntity(obj);
        return new ActionResult(true,"修改成功!");
    }
    /**
     * 修改任务状态
     */
    @Transactional
    public ActionResult validJob(String jobId,String valid) throws Exception
    {
        datacollectDao.validJob(jobId, valid);
        //暂停Quartz任务
        if(valid.equals("0"))
        {
            quartzManager.pauseJob(jobId);
        }
        else{ //恢复Quartz任务
            quartzManager.resumeJob(jobId);
        }
        return new ActionResult(true,"状态修改成功!");
    }
    /**
     * 删除任务
     */
    @Transactional
    public ActionResult deleteJob(String jobId) throws Exception {
        //清空任务关联数据集
        datacollectDao.deleteJobDatasetByJobId(jobId);
        datacollectDao.deleteEntity(RsJobConfig.class, jobId);
        //删除Quartz任务
        quartzManager.removeJob(jobId);
        return new ActionResult(true, "删除成功!");
    }
    /************************* 数据集数据源管理 ***************************************************/
    /**
     * 数据集数据源管理列表(包含全部数据集)
     */
    @Override
    public DataGridResult getDatasetSource(String stdVersion) throws Exception
    {
        //获取版本下全部数据集
        List<StdDataSetModel> stdDataSetModelList = stdManager.getDatasetByVersion(stdVersion);
        JSONArray datasetList = JSONArray.fromObject(stdDataSetModelList);
        //获取已配数据集
        List<RsDatasourceDataset> jobDataset = datacollectDao.getDatasourceDataset(stdVersion);
        DataGridResult re = new DataGridResult();
        List<JSONObject> list = new ArrayList<>();
        for(Object item : datasetList)
        {
            JSONObject obj = JSONObject.fromObject(item);
            JSONObject dd = new JSONObject();
            dd.put("datasetId",obj.getString("id"));
            dd.put("datasetCode",obj.getString("code"));
            dd.put("datasetName", obj.getString("name"));
            dd.put("stdVersion", stdVersion);
            if(jobDataset!=null&&jobDataset.size()>0)
            {
                for(RsDatasourceDataset rdd:jobDataset)
                {
                    if(rdd.getDatasetId().equals(obj.getString("id"))) {
                        dd.put("orgId", rdd.getOrgId());
                        dd.put("datasourceId", rdd.getDatasourceId());
                        dd.put("id", rdd.getId());
                        break;
                    }
                }
            }
            list.add(dd);
        }
        re.setDetailModelList(list);
        return re;
    }
    /**
     * 保存数据集数据源配置
     */
    @Override
    @Transactional
    public ActionResult saveDatasetSource(String json) throws Exception
    {
        JSONArray jsonList = JSONArray.fromObject(json);
        for(Object item : jsonList)
        {
            JSONObject obj = JSONObject.fromObject(item);
            if(obj.containsKey("id") && obj.getString("id").length()>0)
            {
                String id = obj.getString("id");
                //修改
                RsDatasourceDataset dd = (RsDatasourceDataset)JSONObject.toBean(obj,RsDatasourceDataset.class);
                datacollectDao.updateEntity(dd);
//                if(obj.containsKey("datasourceId") && obj.getString("datasourceId").length()>0)
//                {
//                    RsDatasourceDataset dd = (RsDatasourceDataset)JSONObject.toBean(obj,RsDatasourceDataset.class);
//                    datacollectDao.updateEntity(dd);
//                }
//                //删除
//                else{
//                    datacollectDao.deleteEntity(RsDatasourceDataset.class,id);
//                }
            }
            else{
                //新增
                RsDatasourceDataset dd = (RsDatasourceDataset)JSONObject.toBean(obj,RsDatasourceDataset.class);
                datacollectDao.saveEntity(dd);
            }
        }
        return new ActionResult(true,"保存成功!");
    }
    /********************** 任务日志管理 *******************************************/
    /**
     * 获取任务详细日志列表
     */
    @Override
    public DataGridResult getJobLogDetail(Map<String, Object> conditionMap,int page, int rows) throws Exception
    {
        return datacollectDao.getJobLogDetail(conditionMap, page,rows);
    }
    /**
     * 获取任务日志列表
     */
    @Override
    public DataGridResult getJobLog(Map<String, Object> conditionMap,int page, int rows) throws Exception
    {
        DataGridResult re = datacollectDao.getJobLog(conditionMap, page, rows);
        List<RsJobLog> logList = re.getDetailModelList();
        List<DtoJobLog> list = new ArrayList<>();
        for(RsJobLog log:logList)
        {
            DtoJobLog dto = new DtoJobLog();
            BeanUtils.copyProperties(log,dto);
            List<Map<String,Object>> maps = datacollectDao.getJobLogCount(log.getId());
            if(maps!=null && maps.size()>0)
            {
                int count = Integer.parseInt(String.valueOf(maps.get(0).get("count")));
                int success =  Integer.parseInt(String.valueOf(maps.get(0).get("success")));
                int repeat_num =  Integer.parseInt(String.valueOf(maps.get(0).get("repeat_num")));
                dto.setCount(count);
                dto.setSuccess(success);
                dto.setRepeatNum(repeat_num);
            }
            else{
                dto.setCount(0);
                dto.setSuccess(0);
                dto.setRepeatNum(0);
            }
            list.add(dto);
        }
        re.setDetailModelList(list);
        return re;
    }
    /**
     * 任务详细根据数据集分组
     * @return
     */
    @Override
    public DataGridResult getJobLogDataset(String logId) throws Exception{
        DataGridResult re = new DataGridResult();
        List<SimpleChartItem> maps = datacollectDao.getJobLogDataset(logId);
        re.setDetailModelList(maps);
        return re;
    }
}

+ 1043 - 0
hos-admin/src/main/java/com/yihu/hos/datacollect/service/DatacollectService.java

@ -0,0 +1,1043 @@
package com.yihu.hos.datacollect.service;
import com.fasterxml.jackson.databind.JavaType;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.mongodb.client.MongoCollection;
import com.yihu.hos.common.Services;
import com.yihu.hos.crawler.model.config.SysConfig;
import com.yihu.hos.crawler.model.patient.PatientIdentity;
import com.yihu.hos.datacollect.dao.intf.IDatacollectDao;
import com.yihu.hos.datacollect.dao.intf.IDatacollectLogDao;
import com.yihu.hos.datacollect.model.*;
import com.yihu.hos.datacollect.service.intf.IDatacollectManager;
import com.yihu.hos.datacollect.service.intf.IDatacollectService;
import com.yihu.ehr.dbhelper.common.QueryCondition;
import com.yihu.ehr.dbhelper.common.enums.DBType;
import com.yihu.ehr.dbhelper.common.sqlparser.*;
import com.yihu.ehr.dbhelper.jdbc.DBHelper;
import com.yihu.ehr.dbhelper.mongodb.MongodbFactory;
import com.yihu.ehr.dbhelper.mongodb.MongodbHelper;
import com.yihu.ehr.framework.constrant.Constants;
import com.yihu.ehr.framework.constrant.DateConvert;
import com.yihu.ehr.framework.model.ActionResult;
import com.yihu.ehr.framework.util.httpclient.HttpHelper;
import com.yihu.ehr.framework.util.httpclient.HttpResponse;
import com.yihu.ehr.framework.util.log.LogService;
import com.yihu.ehr.framework.util.operator.CollectionUtil;
import com.yihu.ehr.framework.util.webservice.WebserviceUtil;
import com.yihu.hos.resource.service.IStdService;
import org.apache.axis.client.Call;
import org.apache.cxf.endpoint.Client;
import org.apache.cxf.jaxws.endpoint.dynamic.JaxWsDynamicClientFactory;
import org.dom4j.Document;
import org.dom4j.DocumentHelper;
import org.dom4j.Element;
import org.dom4j.io.SAXReader;
import org.json.JSONObject;
import org.json.JSONArray;
import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Transactional;
import org.xml.sax.InputSource;
import javax.annotation.Resource;
import javax.xml.namespace.QName;
import java.io.ByteArrayInputStream;
import java.net.HttpURLConnection;
import java.net.URL;
import java.text.ParseException;
import java.text.SimpleDateFormat;
import java.util.*;
/**
 * 数据采集执行服务
 */
@Service(Services.DatacollectService)
public class DatacollectService implements IDatacollectService {
    @Resource(name = Services.Datacollect)
    private IDatacollectManager datacollect;
    @Resource(name = Services.StdService)
    private IStdService stdService;
    @Resource(name = "DatacollectDao")
    private IDatacollectDao datacollectDao;
    @Resource(name = "DatacollectLogDao")
    private IDatacollectLogDao datacollectLogDao;
    MongodbHelper mongoOrigin = new MongodbHelper("origin");
    MongodbHelper mongo = new MongodbHelper();
    String dateFormat = "yyyy-MM-dd HH:mm:ss"; //默认时间字符串格式
    int maxNum = 1000; //查询条数限制
    /**
     * 根据连接字符串获取数据库类型
     */
    private static DBType getDbType(String uri) {
        return uri.startsWith("jdbc:mysql")?DBType.Mysql:(uri.startsWith("jdbc:oracle")?DBType.Oracle:(uri.startsWith("jdbc:hive2")?DBType.Hive:(uri.startsWith("jdbc:microsoft:sqlserver")?DBType.Sqlserver:DBType.Mysql)));
    }
    /**
     * 根据数据库类型获取时间sql
     * @return
     */
    private String getDateSqlByDBType(DBType dbType,Date date) throws Exception
    {
        String val = DateConvert.toString(date, dateFormat);
        if(dbType.equals(DBType.Mysql))
        {
            return "date_format(\'" + val + "\',\'" + dateFormat + "\')";
        }
        else if(dbType.equals(DBType.Oracle))
        {
            return "to_date(\'" + val + "\',\'" + dateFormat + "\')";
        }
        else
        {
            return val;
        }
    }
    /**
     * 根据数据库类型获取转换数值型sql
     */
    private String getToNumberSqlByDBType(DBType dbType,String key)  throws Exception
    {
        if(dbType.equals(DBType.Mysql))
        {
            return "cast("+key+" as signed integer)";
        }
        else if(dbType.equals(DBType.Oracle))
        {
            return "to_number(" + key + ")";
        }
        else
        {
            return key;
        }
    }
    /**
     * 根据数据库类型获取分页sql
     * @return
     */
    private String getPageSqlByDBType(DBType dbType,String sql,int start,int rows) throws Exception
    {
        if(dbType.equals(DBType.Mysql))
        {
            return sql + " LIMIT " + start + "," + rows;
        }
        else if(dbType.equals(DBType.Oracle))
        {
            return " select * from (select t.*,ROWNUM RSCOM_RN from (" + sql + ") t where ROWNUM<" + (start+rows+1) + ") where RSCOM_RN>= " + (start+1);
        }
        else
        {
            return sql;
        }
    }
    /**
     * 字典全转换成中文
     */
    private List<JSONObject> translateDictCN(List<JSONObject> list,JSONArray colList,String schemeVersion) throws Exception
    {
        //获取字典列表
        List<DtoDictCol> dictColList = new ArrayList<>();
        for(int i=0; i< colList.length();i++)
        {
            JSONObject col = colList.getJSONObject(i);
            String dictId = col.optString("adapterDictId");
            if(dictId!=null && dictId.length()>0)
            {
                String dictType = col.optString("adapterDataType");
                String stdMetadataCode = col.optString("stdMetadataCode");
                DtoDictCol dictCol = new DtoDictCol();
                dictCol.setStdMetadataCode(stdMetadataCode);
                dictCol.setStdDictId(dictId);
                dictCol.setAdapterDataType(dictType.length() > 0 ? dictType : "1");//默认通过code转换字典
                //获取字典数据
                List dictString = stdService.getDictByScheme(schemeVersion,dictId);
                JSONArray dictAdapterArray = new JSONArray(dictString);
                dictCol.setDictList(dictAdapterArray);
                dictColList.add(dictCol);
            }
        }
        //翻译列表
        for(JSONObject data :list)
        {
            //遍历字典字段
            for (DtoDictCol col : dictColList) {
                String colNmae = col.getStdMetadataCode();
                String oldValue = data.optString(colNmae);
                String newValue = translateDictValueCN(oldValue,col.getAdapterDataType(),col.getDictList());
                if(newValue!=null && newValue.length()>0)
                {
                    data.put(colNmae,newValue);
                }
            }
        }
        return list;
    }
    /**
     * 转译字典成中文
     * @return
     */
    private String translateDictValueCN(String oldValue,String type,JSONArray dictAdapterList) throws Exception
    {
        if(type.equals("0")) //原本就是值
        {
            return oldValue;
        }
        //遍历字典数据(编码->名称)
        for(int i=0; i< dictAdapterList.length();i++)
        {
            JSONObject dictItem = dictAdapterList.getJSONObject(i);
            if(oldValue!=null && dictItem.has("stdEntryCode")) {
                if (oldValue.equals(dictItem.getString("stdEntryCode"))) {
                    String newValue = dictItem.getString("stdEntryValue"); //名称
                    return newValue;
                }
            }
        }
        return oldValue;
    }
    /**
     * 字典转换
     * @param list
     * @param colList
     * @return
     * @throws Exception
     */
    private List<JSONObject> translateDict(List<JSONObject> list,JSONArray colList,String schemeVersion) throws Exception
    {
        //获取字典列表
        List<DtoDictCol> dictColList = new ArrayList<>();
        for(int i=0; i< colList.length();i++)
        {
            JSONObject col = colList.getJSONObject(i);
            String dictId = col.optString("adapterDictId");
            if(dictId!=null && dictId.length()>0)
            {
                String dictType = col.optString("adapterDataType");
                String stdMetadataCode = col.optString("stdMetadataCode");
                DtoDictCol dictCol = new DtoDictCol();
                dictCol.setStdMetadataCode(stdMetadataCode);
                dictCol.setStdDictId(dictId);
                dictCol.setAdapterDataType(dictType.length()>0?dictType:"1");//默认通过code转换字典
                //获取字典数据
                List dictString = stdService.getDictByScheme(schemeVersion,dictId);
                JSONArray dictAdapterArray = new JSONArray(dictString);
                dictCol.setDictList(dictAdapterArray);
                dictColList.add(dictCol);
            }
        }
        //翻译列表
        for(JSONObject data :list)
        {
            //遍历字典字段
            for (DtoDictCol col : dictColList) {
                String colNmae = col.getStdMetadataCode();
                String oldValue = data.optString(colNmae);
                String newValue = translateDictValue(oldValue,col.getAdapterDataType(),col.getDictList());
                if(newValue!=null && newValue.length()>0)
                {
                    data.put(colNmae,newValue);
                }
            }
        }
        return list;
    }
    /**
     * 转译字典
     * @return
     */
    private String translateDictValue(String oldValue,String type,JSONArray dictAdapterList) throws Exception
    {
        //应用标准字段
        String colName = "adapterEntryCode";
        if(type.equals("0")) //通过name转译
        {
            colName = "adapterEntryValue";
        }
        //遍历字典数据
        for(int i=0; i< dictAdapterList.length();i++)
        {
            JSONObject dictItem = dictAdapterList.getJSONObject(i);
            if(oldValue!=null && dictItem.has(colName)) {
                if (oldValue.equals(dictItem.getString(colName))) {
                    String newValue = dictItem.getString("stdEntryCode");
                    return newValue;
                }
            }
        }
        //找不到适配字典数据则返回空
        return "";
    }
    /**
     * 获取过滤条件
     * @return
     */
    private String getCondition(DBType dbType,String conditionString){
        JSONArray array = new JSONArray(conditionString);
        if(array!=null && array.length()>0)
        {
            List<QueryCondition> conditions = new ArrayList<>();
            for(Object item : array)
            {
                JSONObject obj = (JSONObject)item;
                String logical = obj.getString("andOr");
                String operation= obj.getString("condition");
                String field= obj.getString("field");
                String keyword = obj.getString("value");
                conditions.add(new QueryCondition(logical, operation, field, keyword));
            }
            //条件语句转换
            ParserSql ps;
            switch (dbType)
            {
                case Oracle:
                    ps = new ParserOracle();
                    break;
                case Sqlserver:
                    ps = new ParserSqlserver();
                    break;
                default:
                    ps = new ParserMysql();
            }
            return ps.getConditionSql(conditions);
        }
        return "";
    }
    /**
     * 获取条件SQL
     * @param dbType
     * @param conditionString
     * @return
     * @throws ParseException
     */
    private String getConditionSql(DBType dbType,String conditionString) throws ParseException {
        String conditionSql = "";
        JSONArray conditions = new JSONArray(conditionString);
        Iterator iterator = conditions.iterator();
        while (iterator.hasNext())
        {
            JSONObject condition = (JSONObject)iterator.next();
            String logic = condition.getString("condition");
            String andOr = condition.getString("andOr");
            String field = condition.getString("field");
            String value = condition.getString("value");
            String fieldType = condition.getString("type");
            String keys = "";
            if(andOr.equals(" AND "))
            {
                conditionSql = conditionSql + " and ";
            }
            else
            {
                conditionSql = conditionSql + " or ";
            }
            if(logic.equals(" IN ") || logic.equals(" NOT IN "))
            {
                String[] keywords = value.split(",");
                for(String key : keywords)
                {
                    keys += "'" + key + "',";
                }
                keys = " (" + keys.substring(0,keys.length() - 1) + ") ";
            }
            else if(logic.equals(" LIKE "))
            {
                keys += " '%" + value + "%' ";
            }
            else
            {
                if(fieldType.equals("DATE"))
                {
                    keys  += getDateFormatSql(dbType,value);
                }
                else
                {
                    keys += " '" + value + "' ";
                }
            }
            conditionSql += field + logic + keys;
        }
        return conditionSql;
    }
    /**
     * 获取对应数据库时间格式
     * @param dbType
     * @param key
     * @return
     * @throws ParseException
     */
    private String getDateFormatSql(DBType dbType,String key) throws ParseException {
        String dateFormat = "yyyy-MM-dd HH:mm:ss";
        SimpleDateFormat formatDate = new SimpleDateFormat("yyyy-MM-dd");
        Date d = formatDate.parse(key);
        SimpleDateFormat format = new SimpleDateFormat(dateFormat);
        switch (dbType)
        {
            case Oracle:
                key = "to_date(\'" + format.format(d) + "\',\'YYYY-MM-DD HH24:MI:SS\')";
                break;
            case Sqlserver:
                break;
            default:
                key = "date_format(\'" + format.format(d) + "\',\'%y-%m-%d %T\')";
        }
        return key;
    }
    /**
     * 采集入库
     * @return
     */
    private String intoMongodb(List<JSONObject> list,String schemeVersion,String stdDatasetCode,JSONArray colList)
    {
        String patientIdCode = Constants.PATIENT_ID.toUpperCase();
        String eventNoCode = Constants.EVENT_NO.toUpperCase();
        PatientIdentity patientIdentity = SysConfig.getInstance().getPatientIdentity(stdDatasetCode);
        if (patientIdentity != null) {
            patientIdCode = patientIdentity.getPatientIDCode();
            eventNoCode = patientIdentity.getEventNoCode();
        }
        try{
            if(!mongo.createIndex(stdDatasetCode, "patientIndex", patientIdCode, eventNoCode)) {
                return "Mongodb索引创建失败!(表:"+stdDatasetCode+")";
            }
            if(list!=null && list.size()>0)
            {
                //字典未转换前采集到原始库
                boolean b = mongoOrigin.insert(stdDatasetCode,translateDictCN(list, colList,schemeVersion));
                //字典转换
                list = translateDict(list, colList,schemeVersion);
                //采集到mongodb
                b = mongo.insert(stdDatasetCode,list);
                if(!b)
                {
                    if(mongo.errorMessage!=null && mongo.errorMessage.length()>0)
                    {
                        System.out.print(mongo.errorMessage);
                        return mongo.errorMessage;
                    }
                    else {
                        return "Mongodb保存失败!(表:"+stdDatasetCode+")";
                    }
                }
            }
        }
        catch (Exception e)
        {
            return e.getMessage();
        }
        return "";
    }
    /**
     * 数据库表采集
     * @return
     */
    private String collectTable(DtoJobDataset ds,String schemeVersion,String logId) throws Exception
    {
        String message = "";
        String datasetId = ds.getJobDatasetId();
        String jobDatasetName = ds.getJobDatasetName();
        String condition=ds.getJobDatasetCondition();
        String key=ds.getJobDatasetKey();
        String keytype=ds.getJobDatasetKeytype();
        String keyvalue=ds.getJobDatasetKeyvalue();
        String orgCode = ds.getOrgCode();
        String datasourceId = ds.getDatasourceId();
        String config = ds.getConfig(); //数据库连接
        DBHelper db = new DBHelper(datasourceId,config);
        DBType dbType = db.dbType;
        //获取数据集映射
        List datasetString = stdService.getDatasetByScheme(schemeVersion, datasetId);
        JSONArray datasetList = new JSONArray(datasetString);
        if(datasetList!=null &&datasetList.length()>0)
        {
            String stdTableName = datasetList.getJSONObject(0).optString("stdDatasetCode");
            String adapterTableName = datasetList.getJSONObject(0).optString("adapterDatasetCode");
            //获取数据集字段映射结构
            List colString = stdService.getDatacolByScheme(schemeVersion,datasetId);
            JSONArray colList = new JSONArray(colString);
            if(colList!=null && colList.length()>0)
            {
                //拼接查询sql
                String strSql = "Select '" + orgCode +"' as RSCOM_ORG_CODE";
                for(int i=0; i< colList.length();i++)
                {
                    JSONObject col = colList.getJSONObject(i);
                    String adapterMetadataCode = col.optString("adapterMetadataCode");
                    if(adapterMetadataCode.length()>0)
                    {
                        strSql+= ","+adapterMetadataCode +" as " + col.optString("stdMetadataCode") ;
                    }
                }
                strSql += " from " +adapterTableName;
                String strWhere = " where 1=1";
                //采集范围
                if(condition!=null && condition.length()>0)
                {
                    strWhere += getConditionSql(dbType,condition);
                }
                //增量采集
                String maxKey = "0";
                if(key!=null && key.length()>0)
                {
                    maxKey = key;
                    if(keytype.toUpperCase().equals("DATE")) //时间类型
                    {
                        if(keyvalue!=null && keyvalue.length()>0) {
                            Date keyDate = new Date();
                            //字符串转时间
                            keyDate = DateConvert.toDate(keyvalue);
                            //根据数据库类型获取时间sql
                            strWhere += " and "+ maxKey + ">'"+getDateSqlByDBType(dbType,keyDate)+"'";
                        }
                    }
                    else if(keytype.toUpperCase().equals("VARCHAR")) //字符串类型
                    {
                        maxKey = getToNumberSqlByDBType(dbType,key);
                        if(keyvalue!=null && keyvalue.length()>0) {
                            strWhere += " and "+ maxKey + ">'" + keyvalue + "'";
                        }
                    }
                    else{
                        if(keyvalue!=null && keyvalue.length()>0) {
                            strWhere += " and "+ maxKey + ">'" + keyvalue + "'";
                        }
                    }
                    strWhere += " order by " + maxKey;
                }
                strSql += strWhere;
                //总条数
                String sqlCount = "select count(1) as COUNT from (" + strSql+")";
                String sqlMax = "select max(" + maxKey + ") as MAX_KEYVALUE from " + adapterTableName + strWhere;
                JSONObject objCount = db.load(sqlCount);
                if(objCount==null)
                {
                    if(db.errorMessage.length()>0)
                    {
                        throw new Exception(db.errorMessage);
                    }
                    else{
                        throw new Exception("查询异常:"+sqlCount);
                    }
                }
                else{
                    int count = objCount.getInt("COUNT");
                    if(count==0) //0条记录,无需采集
                    {
                        message = "0条记录,无需采集。";
                    }
                    else
                    {
                        //获取最大值
                        JSONObject objMax = db.load(sqlMax);
                        int successCount = 0;
                        String maxKeyvalue = objMax.optString("MAX_KEYVALUE");
                        //修改最大值
                        if(maxKeyvalue!=null&& maxKeyvalue.length()>0)
                        {
                            datacollectLogDao.updateJobDatasetKeyvalue(ds.getId(),maxKeyvalue);
                            LogService.getLogger().info("修改任务数据集最大值为"+maxKeyvalue+"。"); //文本日志
                        }
                        int countPage = 1;
                        if(count > maxNum) //分页采集
                        {
                            countPage = count/maxNum+1;
                        }
                        for(int i=0;i<countPage;i++)
                        {
                            int rows = maxNum;
                            if(i+1==countPage){
                                rows = count-i*maxNum;
                            }
                            String sql = getPageSqlByDBType(dbType,strSql,i*maxNum,rows); //获取分页sql语句
                            RsJobLogDetail detail = new RsJobLogDetail();
                            detail.setStartTime(new Date());
                            detail.setJobLogId(logId);
                            detail.setDatasourceId(datasourceId);
                            detail.setConfig(config);
                            detail.setStdDatasetCode(stdTableName);
                            detail.setJobDatasetId(datasetId);
                            detail.setJobDatasetName(ds.getJobDatasetName());
                            detail.setJobId(ds.getJobId());
                            detail.setJobSql(sql);
                            detail.setJobNum(i+1);
                            detail.setJobDatasetRows(rows);
                            detail.setSchemeVersion(schemeVersion);
                            List<JSONObject> list = db.query(sql);
                            String msg = "";
                            if(list!=null)
                            {
                                msg = intoMongodb(list,schemeVersion,stdTableName,colList); //返回信息
                            }
                            else{
                                if(db.errorMessage.length()>0)
                                {
                                    msg = db.errorMessage;
                                }
                                else{
                                    msg = "查询数据为空!";
                                }
                            }
                            if(msg.length()>0)
                            {
                                //任务日志细表异常操作
                                detail.setJobStatus("0");
                                detail.setJobContent(msg);
                                LogService.getLogger().info(msg); //文本日志
                            }
                            else{
                                detail.setJobStatus("1");
                                detail.setJobContent("采集成功!");
                                successCount += rows;
                            }
                            detail.setEndTime(new Date());
                            datacollectLogDao.saveEntity(detail);
                        }
                        message = jobDatasetName + "采集成功"+successCount+"条数据,总条数"+count+"条。";
                    }
                }
            }
            else
            {
                throw new Exception(jobDatasetName + "数据集字段映射为空!");
            }
        }
        else{
            throw new Exception(jobDatasetName + "数据集映射为空!");
        }
        LogService.getLogger().info(message);
        return message;
    }
    /**
     * XML转JSONList
     * @return
     */
    private List<JSONObject> getListFromXml(String xml) throws Exception
    {
        SAXReader reader = new SAXReader();
        Document doc = reader.read(new ByteArrayInputStream(xml.getBytes("UTF-8")));
        Element root = doc.getRootElement();
        List<JSONObject> re = new ArrayList<>();
        //xml数据列表
        Iterator iter = root.elementIterator("Data");
        while (iter.hasNext())
        {
            JSONObject obj = new JSONObject();
            Element el =(Element)iter.next();
            Iterator cols = el.elementIterator();
            while (cols.hasNext())
            {
                Element col =(Element)cols.next();
                obj.put(col.getName().toUpperCase(),col.getStringValue());
            }
            re.add(obj);
        }
        return re;
    }
    /**
     * webservice采集
     * @return
     */
    private String collectWebservice(DtoJobDataset ds,String schemeVersion,String logId) throws Exception
    {
        String message = "";
        String datasetId = ds.getJobDatasetId();
        String jobDatasetName = ds.getJobDatasetName();
        String condition=ds.getJobDatasetCondition();
        String key=ds.getJobDatasetKey();
        String keytype=ds.getJobDatasetKeytype();
        String keyvalue=ds.getJobDatasetKeyvalue();
        String orgCode = ds.getOrgCode();
        String datasourceId = ds.getDatasourceId();
        String config = ds.getConfig(); //数据库连接
        DBType dbType = DBType.Oracle;//********** 先定死Oracle ****************************
        //webservice地址
        ObjectMapper objectMapper = new ObjectMapper();
        Map<String,String> mapConfig = objectMapper.readValue(config,Map.class);
        if(mapConfig.containsKey("protocol") && mapConfig.containsKey("url")) {
            String url = mapConfig.get("protocol") + "://" + mapConfig.get("url");
            //获取数据集映射
            List datasetString = stdService.getDatasetByScheme(schemeVersion, datasetId);
            JSONArray datasetList = new JSONArray(datasetString);
            if (datasetList != null && datasetList.length() > 0) {
                String stdTableName = datasetList.getJSONObject(0).optString("stdDatasetCode");
                String adapterTableName = datasetList.getJSONObject(0).optString("adapterDatasetCode");
                //获取数据集字段映射结构
                List colString = stdService.getDatacolByScheme(schemeVersion, datasetId);
                JSONArray colList = new JSONArray(colString);
                if (colList != null && colList.length() > 0) {
                    //拼接查询sql
                    String strSql = "Select '" + orgCode + "' as RSCOM_ORG_CODE";
                    for (int i = 0; i < colList.length(); i++) {
                        JSONObject col = colList.getJSONObject(i);
                        String adapterMetadataCode = col.optString("adapterMetadataCode");
                        if (adapterMetadataCode.length() > 0) {
                            strSql += "," + adapterMetadataCode + " as " + col.optString("stdMetadataCode");
                        }
                    }
                    strSql += " from " + adapterTableName;
                    String strWhere = " where 1=1";
                    //采集范围
                    if (condition != null && condition.length() > 0) {
                        strWhere += getConditionSql(dbType, condition);
                    }
                    //增量采集
                    String maxKey = "0";
                    String keyValue = ds.getJobDatasetKeyvalue();
                    if (key != null && key.length() > 0) {
                        maxKey = key;
                        if (keytype.toUpperCase().equals("DATE")) //时间类型
                        {
                            Date keyDate = new Date();
                            if (keyvalue != null && keyvalue.length() > 0) {
                                //字符串转时间
                                keyDate = DateConvert.toDate(keyvalue);
                                //根据数据库类型获取时间sql
                                strWhere += " and " + key + ">'" + getDateSqlByDBType(dbType, keyDate) + "'";
                                strWhere += " order by " + key;
                            }
                        } else if (keytype.toUpperCase().equals("VARCHAR")) //字符串类型
                        {
                            maxKey = getToNumberSqlByDBType(dbType, key);
                            if (keyvalue != null && keyvalue.length() > 0) {
                                strWhere += " and " + maxKey + ">'" + keyvalue + "'";
                                strWhere += " order by " + maxKey;
                            }
                        } else {
                            if (keyvalue != null && keyvalue.length() > 0) {
                                strWhere += " and " + key + ">'" + keyvalue + "'";
                                strWhere += " order by " + key;
                            }
                        }
                    }
                    strSql += strWhere;
                    //总条数和最大值查询
                    String sqlCount = "select count(1) as COUNT from (" + strSql+")";
                    String sqlMax = "select max(" + maxKey + ") as MAX_KEYVALUE from " + adapterTableName + strWhere;
                    //webservice获取数据总条数
                    String strCount = WebserviceUtil.request(url,"ExcuteSQL",new Object[]{"",sqlCount});
                    List<JSONObject> dataCount = getListFromXml(strCount);
                    if (dataCount!=null &&dataCount.size()>0) {
                        Integer count = Integer.parseInt(dataCount.get(0).getString("COUNT"));
                        if (count == 0) //0条记录,无需采集
                        {
                            message = "0条记录,无需采集。";
                        }
                        else {
                            //webservice获取最大值
                            String strMax = WebserviceUtil.request(url,"ExcuteSQL",new Object[]{"",sqlMax});
                            List<JSONObject> dataMax = getListFromXml(strCount);
                            int successCount = 0;
                            String maxKeyvalue = dataMax.get(0).getString("MAX_KEYVALUE");
                            //修改最大值
                            if (maxKeyvalue != null && maxKeyvalue.length() > 0) {
                                datacollectLogDao.updateJobDatasetKeyvalue(ds.getId(), maxKeyvalue);
                                LogService.getLogger().info("修改任务数据集最大值为" + maxKeyvalue + "。"); //文本日志
                            }
                            int countPage = 1;
                            if (count > maxNum) //分页采集
                            {
                                countPage = count / maxNum + 1;
                            }
                            for (int i = 0; i < countPage; i++) {
                                int rows = maxNum;
                                if (i + 1 == countPage) {
                                    rows = count - i * maxNum;
                                }
                                String sql = getPageSqlByDBType(dbType, strSql, i * maxNum, rows); //获取分页sql语句
                                RsJobLogDetail detail = new RsJobLogDetail();
                                detail.setStartTime(new Date());
                                detail.setJobLogId(logId);
                                detail.setDatasourceId(datasourceId);
                                detail.setConfig(config);
                                detail.setStdDatasetCode(stdTableName);
                                detail.setJobDatasetId(datasetId);
                                detail.setJobDatasetName(ds.getJobDatasetName());
                                detail.setJobId(ds.getJobId());
                                detail.setJobSql(sql);
                                detail.setJobNum(i + 1);
                                detail.setJobDatasetRows(rows);
                                detail.setSchemeVersion(schemeVersion);
                                String msg = "";
                                try {
                                    //获取分页数据
                                    String strList = WebserviceUtil.request(url, "ExcuteSQL", new Object[]{"", sql});
                                    List<JSONObject> list = getListFromXml(strList);
                                    if (list != null) {
                                        msg = intoMongodb(list, schemeVersion, stdTableName, colList); //返回信息
                                    } else {
                                        msg = "查询数据为空!";
                                    }
                                    if (msg.length() > 0) {
                                        //任务日志细表异常操作
                                        detail.setJobStatus("0");
                                        detail.setJobContent(msg);
                                        LogService.getLogger().info(msg); //文本日志
                                    } else {
                                        detail.setJobStatus("1");
                                        detail.setJobContent("采集成功!");
                                        successCount += rows;
                                    }
                                }
                                catch (Exception ex)
                                {
                                    msg=ex.getMessage();
                                }
                                detail.setEndTime(new Date());
                                datacollectLogDao.saveEntity(detail);
                            }
                            message = jobDatasetName + "采集成功" + successCount + "条数据,总条数" + count + "条。";
                        }
                    }
                } else {
                    throw new Exception(jobDatasetName + "数据集字段映射为空!");
                }
            } else {
                throw new Exception(jobDatasetName + "数据集映射为空!");
            }
        }
        else{
            throw new Exception("非法webservice路径!");
        }
        LogService.getLogger().info(message);
        return message;
    }
    /**
     * 执行任务
     */
    @Override
    public void executeJob(String jobId) throws Exception{
        //获取任务详细信息
        RsJobConfig job = datacollect.getJobById(jobId);
        RsJobLog log = new RsJobLog();
        log.setJobId(jobId);
        log.setJobStartTime(new Date());
        datacollectLogDao.saveEntity(log);
        String logId = log.getId();
        LogService.getLogger().info("任务"+jobId+"开始采集,新增日志"+logId+"。");
        StringBuilder logStr = new StringBuilder();
        int count = 0;
        int success = 0;
        try {
            String schemeVersion = job.getSchemeVersion();
            //获取任务相关数据集
            List<DtoJobDataset> list = datacollectDao.getDatacollectDataset(jobId);
            LogService.getLogger().info("获取任务相关数据集,数量"+list.size()+"。");
            if (list != null && list.size() > 0) {
                count = list.size();
                logStr.append("/*********** 开始采集 *******************/\n");
                //遍历数据集
                for (DtoJobDataset ds : list) {
                    try {
                        String type = ds.getType();
                        String message = "";
                        logStr.append(DateConvert.toString(new Date(), dateFormat) + " " + ds.getJobDatasetName());
                        if (type != null) {
                            if (type.equals("1")) //Web Service
                            {
                                message = collectWebservice(ds, schemeVersion, logId) + "\n";
                            } else if (type.equals("2"))//文件系统
                            {
                                message = "文件系统采集。\n";
                            } else { //数据库
                                message = collectTable(ds, schemeVersion, logId) + "\n";
                            }
                        } else {
                            message = ds.getJobDatasetName() + "未关联数据源!\n";
                        }
                        LogService.getLogger().info(message); //文本日志
                        logStr.append(message);
                        success++;
                    }
                    catch (Exception ex)
                    {
                        LogService.getLogger().info("异常:" + ex.getMessage());
                        logStr.append(ex.getMessage() + "\n");
                    }
                }
                logStr.append("/*********** 结束采集 *******************/\n");
            }
        } catch (Exception ex) {
            ex.printStackTrace();
            LogService.getLogger().info("异常:" + ex.getMessage());
            logStr.append(ex.getMessage() + "\n");
            logStr.append("/*********** 出现异常,中断采集 *******************/\n");
        }
        //任务主日志成功
        String jobContent = logStr.toString().replace("\"", "\\\"");
        if(jobContent.length()>4000)
        {
            jobContent = jobContent.substring(0,4000);
        }
        log.setJobContent(jobContent);
        log.setJobEndTime(new Date());
        log.setJobDatasetCount(count);
        log.setJobDatasetSuccess(success);
        LogService.getLogger().info("任务结束," + count + "个数据集成功采集" + success + "个。");
        datacollectLogDao.updateEntity(log);
    }
    /**
     * 根据日志详细补采数据
     */
    @Override
    @Transactional
    public ActionResult repeatJob(String id) throws Exception
    {
        RsJobLogDetail log = datacollectLogDao.getEntity(RsJobLogDetail.class, id);
        if(log.getJobStatus().equals("2")) {
            return new ActionResult(false,"数据补采中!");
        }
        if(!log.getJobStatus().equals("0")){
            return new ActionResult(false,"数据无需补采!");
        }
        try {
            log.setRepeatStartTime(new Date());
            log.setJobStatus("2"); //设置采集中状态
            datacollectLogDao.updateEntity(log);
        }
        catch (Exception e){
            return new ActionResult(false,"补采失败!");
        }
        log.setJobStatus("0");
        datacollectLogDao.updateEntity(log);
        String stdDatasetCode = log.getStdDatasetCode();
        String sql = log.getJobSql();
        //数据库连接
        String datasourceId = log.getDatasourceId();
        String config = log.getConfig();
        DBHelper db = new DBHelper(datasourceId,config);
        //获取数据集字段映射结构
        String schemeVersion = log.getSchemeVersion();
        String datasetId = log.getJobDatasetId();
        List colString = stdService.getDatacolByScheme(schemeVersion,datasetId);
        JSONArray colList = new JSONArray(colString);
        List<JSONObject> list = db.query(sql);
        String message = intoMongodb(list,schemeVersion,stdDatasetCode,colList);
        if(message.length()>0 || db.errorMessage.length()>0)
        {
            log.setJobStatus("0");
            log.setRepeatEndTime(new Date());
            if(message.length()>0)
            {
                log.setRepeatJobContent(message);
            }
            else{
                db.errorMessage.length();
            }
            datacollectLogDao.updateEntity(log);
            return new ActionResult(false,"补采失败!");
        }
        else{
            log.setJobStatus("3");
            log.setRepeatEndTime(new Date());
            log.setRepeatJobContent("补采成功!");
            datacollectLogDao.updateEntity(log);
            return new ActionResult(true,"补采成功!");
        }
    }
    public static void main(String[] args) throws Exception{
        //namespace是命名空间,methodName是方法名
        String sql = "select count(1) as COUNT,max(to_number(HDSD03_01_031)) as MAX_KEYVALUE from HDSC01_02 where 1=1 order by to_number(HDSD03_01_031)";
        //调用web Service//输出调用结果
        System.out.println(WebserviceUtil.request("http://172.19.103.71:8080/service/sql?wsdl", "ExcuteSQL", new Object[]{"", sql}));
    }
}

+ 168 - 0
hos-admin/src/main/java/com/yihu/hos/datacollect/service/DatapushService.java

@ -0,0 +1,168 @@
package com.yihu.hos.datacollect.service;
import com.yihu.hos.common.Services;
import com.yihu.hos.datacollect.dao.intf.IDatacollectDao;
import com.yihu.hos.datacollect.dao.intf.IDatacollectLogDao;
import com.yihu.hos.datacollect.model.*;
import com.yihu.hos.datacollect.service.intf.IDatapushService;
import com.yihu.ehr.dbhelper.mongodb.MongodbHelper;
import com.yihu.ehr.framework.model.ActionResult;
import com.yihu.ehr.framework.model.Result;
import com.yihu.hos.resource.service.IStdService;
import com.yihu.hos.standard.model.adapter.AdapterDatasetModel;
import com.yihu.hos.standard.model.adapter.resultModel.AdapterMetadataResultDetailModel;
import org.json.JSONArray;
import org.json.JSONObject;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Transactional;
import java.util.ArrayList;
import java.util.List;
/**
 * 推数据服务
 */
@Service(Services.DatapushService)
public class DatapushService implements IDatapushService {
    @Autowired
    private IDatacollectDao datacollect;
    @Autowired
    private IDatacollectLogDao datacollectLog;
    @Autowired
    private IStdService stdService;
    MongodbHelper mongo = new MongodbHelper();
    /**
     * 转译字典
     */
    private String translateDictValue(String oldValue,String type,JSONArray dictAdapterList) throws Exception
    {
        //应用标准字段
        String colName = "adapterEntryCode";
        if(type.equals("0")) //通过name转译
        {
            colName = "adapterEntryValue";
        }
        //遍历字典数据
        for(int i=0; i< dictAdapterList.length();i++)
        {
            JSONObject dictItem = dictAdapterList.getJSONObject(i);
            if(oldValue.equals(dictItem.getString(colName)))
            {
                String newValue = dictItem.getString("stdEntryCode");
                return newValue;
            }
        }
        return oldValue;
    }
    /*****************************************************************************************************/
    /**
     * 数据入库
     */
    @Override
    @Transactional
    public Result pushData(String datasetCode,String dataString,String orgCode) throws Exception
    {
        //机构标准版本
        String version = datacollect.getVersionByQLC(orgCode);
        //通过标准数据集名称获取适配关系
        AdapterDatasetModel dataset = stdService.getDatasetByCode(version,datasetCode);
        String msg = "";
        if(dataset!=null)
        {
            String datasetId = dataset.getAdapterDatasetId().toString();
            List<AdapterMetadataResultDetailModel> list = stdService.getDatacolByScheme(version,datasetId);
            if(list!=null && list.size()>0)
            {
                JSONArray array = new JSONArray(dataString);
                List<JSONObject> dataList = new ArrayList<>();
                if(array!=null && array.length()>0)
                {
                    for (int i=0;i<array.length();i++)
                    {
                        JSONObject obj = new JSONObject();
                        JSONObject data = (JSONObject)array.get(i);
                        //标准转换
                        for(AdapterMetadataResultDetailModel metadata : list)
                        {
                            String stdColName = metadata.getStdMetadataCode();
                            String adapterColName = metadata.getAdapterMetadataCode();
                            if(data.has(adapterColName))
                            {
                                String val = data.optString(adapterColName);
                                String newValue =val;
                                //判断是否字典
                                if(metadata.getStdDictId()!=null&&metadata.getStdDictId()!=0)
                                {
                                    //获取字典列表
                                    List dictString = stdService.getDictByScheme(version,metadata.getStdDictId().toString());
                                    JSONArray dictAdapterArray = new JSONArray(dictString);
                                    String type = "";
                                    if(metadata.getAdapterDataType()!=null)
                                    {
                                        type = metadata.getAdapterDataType().toString();
                                    }
                                    newValue = translateDictValue(val,type,dictAdapterArray);
                                }
                                obj.put(stdColName,newValue);
                            }
                        }
                        //org_code字段
                        obj.put("RSCOM_ORG_CODE",orgCode);
                        dataList.add(obj);
                    }
                }
                //MongoDB入库
                boolean b = mongo.insert(datasetCode,dataList);
                if(!b)
                {
                    if(mongo.errorMessage!=null && mongo.errorMessage.length()>0)
                    {
                        System.out.print(mongo.errorMessage);
                        msg = "Mongodb保存失败!(表:"+datasetCode+",数据:"+dataString+")"+mongo.errorMessage;
                    }
                    else {
                        msg ="Mongodb保存失败!(表:"+datasetCode+",数据:"+dataString+")";
                    }
                }
            }
        }
        else{
            msg ="适配标准不完善!";
        }
        //是否成功
        if(msg.length()>0)
        {
            //日志记录
            datacollectLog.addDataPushLog("1","0",msg);
            return ActionResult.error(msg);
        }
        else{
            msg = "数据入库成功!(表:"+datasetCode+")";
            //日志记录
            datacollectLog.addDataPushLog("1","1",msg);
            return ActionResult.success(msg);
        }
    }
}

+ 96 - 0
hos-admin/src/main/java/com/yihu/hos/datacollect/service/intf/IDatacollectManager.java

@ -0,0 +1,96 @@
package com.yihu.hos.datacollect.service.intf;
import com.yihu.ehr.framework.model.ActionResult;
import com.yihu.hos.datacollect.model.RsJobConfig;
import com.yihu.ehr.framework.model.DataGridResult;
import java.util.Map;
/**
 * Created by hzp on 2015/12/25.
 */
public interface IDatacollectManager {
    /**
     * 根据JobId获取Cron表达式
     */
    public String getCronByJobId(String jobId) throws Exception;
    /**
     * 根据ID获取任务详细
     * @return
     */
    public RsJobConfig getJobById(String id) throws Exception;
    /**
     * 获取任务列表
     */
    public DataGridResult getJobList(Map<String, Object> conditionMap, int page, int rows) throws Exception;
    /**
     * 根据适配方案获取数据集列表
     */
    public DataGridResult getSchemeDataset(String schemeId, String schemeVersion, String jobId) throws Exception;
    /**
     * 根据适配方案获取字段列表
     */
    public DataGridResult getSchemeDatasetCol(String schemeId, String schemeVersion, String datasetId) throws Exception;
    /**
     * 根据任务Id获取相关数据集下拉数据
     */
    public DataGridResult getJobDatasetByJobId(String jobId) throws Exception;
    /**
     * 新增任务
     */
    public ActionResult addJob(RsJobConfig obj, String cron, String jobDataset) throws Exception;
    /**
     * 修改任务
     */
    public ActionResult updateJob(RsJobConfig obj, String cron, String jobDataset) throws Exception;
    /**
     * 修改任务
     */
    public ActionResult updateJob(RsJobConfig obj) throws Exception;
    /**
     * 删除任务
     */
    public ActionResult deleteJob(String id) throws Exception;
    /**
     * 修改任务状态
     */
    public ActionResult validJob(String jobId, String valid) throws Exception;
    /**
     * 数据集数据源管理列表(包含全部数据集)
     */
    public DataGridResult getDatasetSource(String stdVersion) throws Exception;
    /**
     * 保存数据集数据源配置
     */
    public ActionResult saveDatasetSource(String json) throws Exception;
    /**
     * 获取任务日志列表
     */
    public DataGridResult getJobLogDetail(Map<String, Object> conditionMap, int page, int rows) throws Exception;
    /**
     * 获取任务日志列表
     */
    public DataGridResult getJobLog(Map<String, Object> conditionMap, int page, int rows) throws Exception;
    /**
     * 任务详细根据数据集分组
     */
    public DataGridResult getJobLogDataset(String logId) throws Exception;
}

+ 24 - 0
hos-admin/src/main/java/com/yihu/hos/datacollect/service/intf/IDatacollectService.java

@ -0,0 +1,24 @@
package com.yihu.hos.datacollect.service.intf;
import com.yihu.hos.datacollect.model.RsJobConfig;
import com.yihu.ehr.framework.model.ActionResult;
import com.yihu.ehr.framework.model.DataGridResult;
import java.util.Map;
/**
 * Created by hzp on 2015/12/25.
 */
public interface IDatacollectService {
    /**
     * 执行任务
     */
    public void executeJob(String jobId) throws Exception;
    /**
     * 根据日志详细补采数据
     */
    public ActionResult repeatJob(String id) throws Exception;
}

+ 15 - 0
hos-admin/src/main/java/com/yihu/hos/datacollect/service/intf/IDatapushService.java

@ -0,0 +1,15 @@
package com.yihu.hos.datacollect.service.intf;
import com.yihu.ehr.framework.model.Result;
/**
 * Created by hzp on 2016/4/14.
 */
public interface IDatapushService {
    /**
     * 数据入库
     */
    Result pushData(String dataset,String data,String orgCode) throws Exception;
}

+ 143 - 0
hos-admin/src/main/java/com/yihu/hos/resource/controller/RsCategoryController.java

@ -0,0 +1,143 @@
package com.yihu.hos.resource.controller;
import com.yihu.ehr.framework.model.Result;
import com.yihu.ehr.framework.util.controller.BaseController;
import com.yihu.hos.resource.model.RsResourceCategory;
import com.yihu.hos.resource.service.IRsCategoryService;
import org.apache.commons.beanutils.BeanUtils;
import org.springframework.stereotype.Controller;
import org.springframework.ui.Model;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.ResponseBody;
import javax.annotation.Resource;
import javax.servlet.http.HttpServletRequest;
import java.util.HashMap;
import java.util.Map;
/**
 * Created by chenweida on 2015/12/15.
 */
@Controller
@RequestMapping("/resource/rsCategory")
public class RsCategoryController extends BaseController {
    @Resource(name = "categoryService")
    private IRsCategoryService rsCategoryService;
    //跳转到列表页
    @RequestMapping("initial")
    public String initial(Model model) {
        model.addAttribute("contentPage", "/resource/category/rsCategoryManage");
        return "partView";
    }
    @RequestMapping("/rsCategoryInfo")
    public String rsCategoryInfo(Model model, String id, String mode) {
        RsResourceCategory rrrd;
        try {
            if ("view".equals(mode)) {
                rrrd = rsCategoryService.getCategoryById(id);
            } else if ("modify".equals(mode)) {
                rrrd = rsCategoryService.getCategoryById(id);
            } else {
                rrrd = new RsResourceCategory();
            }
            model.addAttribute("resourceId", id);
            model.addAttribute("mode", mode);
            String s = rrrd.getRemark();
            StringBuilder s1 = new StringBuilder();
            if (s != null) {
                for (int i = 0; i < s.length(); i++) {
                    if (s.charAt(i) == '\'' || s.charAt(i) == '\"') {
                        s1.append("\\");
                        s1.append(s.charAt(i));
                    } else
                        s1.append(s.charAt(i));
                }
            }
           // rrrd.setRemark(s1.toString());
            model.addAttribute("model", rrrd);
            model.addAttribute("contentPage", "resource/category/rsCategoryInfoDialog");
        } catch (Exception e) {
            e.printStackTrace();
        }
        return "pageView";
    }
    @RequestMapping("/searchRsCategory")
    @ResponseBody
    public Result getList(String searchNm, String parentid, String status, int page, int rows) {
        Map<String, Object> conditionMap = new HashMap<>();
        conditionMap.put("name", searchNm);
        conditionMap.put("page", page);
        conditionMap.put("rows", rows);
        Result result = null;
        try {
            result = rsCategoryService.getList(conditionMap);
        } catch (Exception e) {
            e.printStackTrace();
            result = Result.error(e.getMessage());
        }
        return result;
    }
    @RequestMapping("/createCategory")
    @ResponseBody
    public Result createCategory(HttpServletRequest request) {
        try {
            RsResourceCategory rrc = new RsResourceCategory();
            BeanUtils.populate(rrc, request.getParameterMap());
            return rsCategoryService.createCategory(rrc);
        } catch (Exception e) {
            e.printStackTrace();
            return Result.error("新增失败");
        }
    }
    @RequestMapping("/updateCategory")
    @ResponseBody
    public Result updateCategory(HttpServletRequest request) {
        try {
            RsResourceCategory rrc = new RsResourceCategory();
            BeanUtils.populate(rrc, request.getParameterMap());
            return rsCategoryService.updateCategory(rrc);
        } catch (Exception e) {
            e.printStackTrace();
            return Result.error("更新失败");
        }
    }
    @RequestMapping("/deleteCategory")
    @ResponseBody
    public Result deleteCategory(HttpServletRequest request) {
        try {
            String id = request.getParameter("id");
            return rsCategoryService.deleteCategory(id);
        } catch (Exception e) {
            e.printStackTrace();
            return Result.error("更新失败");
        }
    }
    @RequestMapping("/getCategoryWithOutId")
    @ResponseBody
    public Result getCategoryWithOutId(HttpServletRequest request) {
        try {
            String id = request.getParameter("id");
            return rsCategoryService.getCategoryWithOutId(id);
        } catch (Exception e) {
            e.printStackTrace();
            return Result.error("失败");
        }
    }
}

+ 283 - 0
hos-admin/src/main/java/com/yihu/hos/resource/controller/RsDimensionController.java

@ -0,0 +1,283 @@
package com.yihu.hos.resource.controller;
import com.yihu.ehr.framework.model.Result;
import com.yihu.hos.resource.model.RsDemensionCategory;
import com.yihu.hos.resource.model.RsDimension;
import com.yihu.hos.resource.service.IRsDimensionService;
import org.apache.commons.beanutils.BeanUtils;
import org.springframework.stereotype.Controller;
import org.springframework.ui.Model;
import org.springframework.util.StringUtils;
import org.springframework.web.bind.annotation.PathVariable;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestMethod;
import org.springframework.web.bind.annotation.ResponseBody;
import javax.annotation.Resource;
import javax.servlet.http.HttpServletRequest;
import java.net.URLDecoder;
import java.util.HashMap;
import java.util.Map;
/**
 * Created by chenweida on 2016/1/26.
 */
@Controller
@RequestMapping("/dimension")
public class RsDimensionController {
    @Resource(name = "rsDimensionService")
    private IRsDimensionService rsDimensionService;
    /**
     * @param model
     * @param mode
     * @return
     */
    @RequestMapping("/dimension")
    public String dimension(Model model, String mode) {
        model.addAttribute("mode", mode);
        model.addAttribute("contentPage", "dimension/dimension/dimension");
        return "partView";
    }
    /**
     * @param model
     * @param mode
     * @return
     */
    @RequestMapping("/dimensioncatetory")
    public String dimensioncatetory(Model model, String mode) {
        model.addAttribute("mode", mode);
        model.addAttribute("contentPage", "dimension/dimensioncatetory/dimensioncatetory");
        return "partView";
    }
    /**
     * 跳转到编辑页面
     *
     * @param model
     * @param id
     * @return
     */
    @RequestMapping("/editorDimension")
    public String editorDimension(Model model, String id,String category) {
        try {
            if (!StringUtils.isEmpty(id))
                    model.addAttribute("model", rsDimensionService.getDimensionById(id));
            if(category!=null)
                 model.addAttribute("category",category);
            }
        catch (Exception e) {
            }
        model.addAttribute("contentPage", "dimension/dimension/editorDimension");
        return "pageView";
    }
    /**
     * 跳转到编辑页面
     *
     * @param model
     * @param id
     * @return
     */
    @RequestMapping("/editorDimensionCatetory")
    public String editorDimensionCatetory(Model model, String id) {
        if (!StringUtils.isEmpty(id)) {
            try {
                model.addAttribute("model", rsDimensionService.getDimensionCatetoryById(id));
            } catch (Exception e) {
            }
        }
        model.addAttribute("contentPage", "dimension/dimensioncatetory/editorDimensionCatetory");
        return "pageView";
    }
    /**
     * 维度类别列表
     *
     * @param request
     * @return
     */
    @RequestMapping("/getDimensionCategoryList")
    @ResponseBody
    public Result getDimensionCategoryList(HttpServletRequest request) {
        try {
            return rsDimensionService.getDimensionCategoryList();
        } catch (Exception e) {
            e.printStackTrace();
            return Result.error(e.getMessage());
        }
    }
    /**
     * 维度列表
     *
     * @param request
     * @return
     */
    @RequestMapping("/getDimensionList")
    @ResponseBody
    public Result getDimensionList(HttpServletRequest request) {
        try {
            Map<String, Object> conditionMap = new HashMap<String, Object>();
            conditionMap.put("category", request.getParameter("dimensionCategoryId"));
            conditionMap.put("name", request.getParameter("name"));
            String page = (String) conditionMap.get("page");
            String rows = (String) conditionMap.get("rows");
            conditionMap.put("page", StringUtils.isEmpty(page) ? 1 : Integer.valueOf(page));
            conditionMap.put("rows", StringUtils.isEmpty(rows) ? 10 : Integer.valueOf(rows));
            return rsDimensionService.getDimensionList(conditionMap);
        } catch (Exception e) {
            e.printStackTrace();
            return Result.error(e.getMessage());
        }
    }
    /**
     * 维度类别列表
     *
     * @param request
     * @return
     */
    @RequestMapping("/getDimensionCatecoryList")
    @ResponseBody
    public Result getDimensionCatecoryList(HttpServletRequest request) {
        try {
            Map<String, Object> conditionMap = new HashMap<String, Object>();
            conditionMap.put("name", request.getParameter("name"));
            String page = (String) conditionMap.get("page");
            String rows = (String) conditionMap.get("rows");
            conditionMap.put("page", StringUtils.isEmpty(page) ? 1 : Integer.valueOf(page));
            conditionMap.put("rows", StringUtils.isEmpty(rows) ? 10 : Integer.valueOf(rows));
            return rsDimensionService.getDimensionCatecoryList(conditionMap);
        } catch (Exception e) {
            e.printStackTrace();
            return Result.error(e.getMessage());
        }
    }
    /**
     * 更新维度
     *
     * @param request
     * @return
     */
    @RequestMapping("/updateDimension")
    @ResponseBody
    public Result updateDimension(HttpServletRequest request) {
        try {
            RsDimension rsDimension = new RsDimension();
            BeanUtils.populate(rsDimension, request.getParameterMap());
            return rsDimensionService.updateDimension(rsDimension);
        } catch (Exception e) {
            e.printStackTrace();
            return Result.error(e.getMessage());
        }
    }
    /**
     * 新增维度
     *
     * @param request
     * @return
     */
    @RequestMapping("/addDimension")
    @ResponseBody
    public Result addDimension(HttpServletRequest request) {
        try {
            RsDimension rsDimension = new RsDimension();
            BeanUtils.populate(rsDimension, request.getParameterMap());
            return rsDimensionService.addDimension(rsDimension);
        } catch (Exception e) {
            e.printStackTrace();
            return Result.error(e.getMessage());
        }
    }
    /**
     * 删除维度
     *
     * @param request
     * @return
     */
    @RequestMapping("/deleteDimension")
    @ResponseBody
    public Result deleteDimension(HttpServletRequest request) {
        try {
            String id = request.getParameter("id");
            return rsDimensionService.deleteDimensionById(id);
        } catch (Exception e) {
            e.printStackTrace();
            return Result.error(e.getMessage());
        }
    }
    /**
     * 删除维度类别
     *
     * @param request
     * @return
     */
    @RequestMapping("/deleteDimensionCatetory")
    @ResponseBody
    public Result deleteDimensionCatetory(HttpServletRequest request) {
        try {
            String id = request.getParameter("id");
            return rsDimensionService.deleteDimensionCatetory(id);
        } catch (Exception e) {
            e.printStackTrace();
            return Result.error(e.getMessage());
        }
    }
    /**
     * 新增维度类别
     *
     * @param request
     * @return
     */
    @RequestMapping("/addDimensionCatetroy")
    @ResponseBody
    public Result addDimensionCatetroy(HttpServletRequest request) {
        try {
            RsDemensionCategory r = new RsDemensionCategory();
            BeanUtils.populate(r, request.getParameterMap());
            return rsDimensionService.addDimensionCatetroy(r);
        } catch (Exception e) {
            e.printStackTrace();
            return Result.error(e.getMessage());
        }
    }
    /**
     * 新增维度类别
     *
     * @param request
     * @return
     */
    @RequestMapping("/updateDimensionCatetroy")
    @ResponseBody
    public Result updateDimensionCatetroy(HttpServletRequest request) {
        try {
            RsDemensionCategory r = new RsDemensionCategory();
            BeanUtils.populate(r, request.getParameterMap());
            return rsDimensionService.updateDimensionCatetroy(r);
        } catch (Exception e) {
            e.printStackTrace();
            return Result.error(e.getMessage());
        }
    }
}

+ 826 - 0
hos-admin/src/main/java/com/yihu/hos/resource/controller/RsResourceController.java

@ -0,0 +1,826 @@
package com.yihu.hos.resource.controller;
import com.yihu.hos.common.JXLUtil;
import com.yihu.ehr.framework.model.ActionResult;
import com.yihu.ehr.framework.model.DataGridResult;
import com.yihu.ehr.framework.model.Result;
import com.yihu.ehr.framework.util.controller.BaseController;
import com.yihu.hos.resource.model.*;
import com.yihu.hos.resource.service.IRsResourceRestService;
import com.yihu.hos.resource.service.IRsResourceService;
import com.yihu.hos.resource.viewresult.RsResourceDeatilModel;
import net.sf.json.JSONArray;
import net.sf.json.JSONObject;
import org.apache.commons.beanutils.BeanUtils;
import org.springframework.stereotype.Controller;
import org.springframework.ui.Model;
import org.springframework.util.StringUtils;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.ResponseBody;
import javax.annotation.Resource;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
/**
 * Created by chenweida on 2015/12/15.
 */
@RequestMapping("/resource")
@Controller
public class RsResourceController extends BaseController {
    @Resource(name = "resourceService")
    private IRsResourceService resourceService;
    @Resource(name = "resourceRestService")
    private IRsResourceRestService resourceRestService;
    /**
     * 访问resourcebrowse/resource.jsp页面
     *
     * @param model
     * @param mode
     * @return
     */
    @RequestMapping("/resourcePage")
    public String resourcePage(Model model, String mode) {
        model.addAttribute("mode", mode);
        model.addAttribute("contentPage", "resource/resourcebrowse/resource");
        return "partView";
    }
    @RequestMapping("/resourceBrowse")
    public String resourceBrowse(Model model, String id) {
        try {
            if (!StringUtils.isEmpty(id))
                model.addAttribute("resourceModel", resourceService.getRsResourceDeatilModel(id));
            model.addAttribute("contentPage", "resource/resourceregister/resourceBrowse");
        } catch (Exception e) {
            e.printStackTrace();
        }
        return "partView";
    }
    /**
     * 查找树List
     *
     * @return
     */
    @RequestMapping("/resourceTreeList")
    @ResponseBody
    public Result resourceTreeList(String name) {
        try {
            ActionResult actionResult = new ActionResult();
            actionResult.setData(resourceService.getResourceTreeList(name));
            return actionResult;
        } catch (Exception e) {
            e.printStackTrace();
            return Result.error(e.getMessage());
        }
    }
    @RequestMapping("/resourceCategoryTreeList")
    @ResponseBody
    public Result resourceCategoryTreeList(String name) {
        try {
            ActionResult actionResult = new ActionResult();
            actionResult.setData(resourceService.resourceCategoryTreeList(name));
            return actionResult;
        } catch (Exception e) {
            e.printStackTrace();
            return Result.error(e.getMessage());
        }
    }
    //跳转到列表页
    @RequestMapping("/resource/initial")
    public String initial(Model model) {
        model.addAttribute("contentPage", "/resource/resourceregister/rsResourceManage");
        return "partView";
    }
    //跳转到资源配置页
    @RequestMapping("/resource/configInitial")
    public String resourceConfigInitial(String id, Model model) {
        try {
            model.addAttribute("resourceModel", resourceService.getRsResourceDeatilModel(id));
            model.addAttribute("model", model);
            model.addAttribute("contentPage", "/resource/resourceregister/rsResourceConfig");
        } catch (Exception e) {
            e.printStackTrace();
        }
        return "partView";
    }
    //跳转到资源授权页
    @RequestMapping("/resource/authorizeInitial")
    public String rsResourceAuthorizeInitial(String id, Model model) {
        try {
            model.addAttribute("resourceModel", resourceService.getRsResourceDeatilModel(id));
            model.addAttribute("contentPage", "/resource/resourceregister/rsResourceAuthorize");
        } catch (Exception e) {
            e.printStackTrace();
        }
        return "partView";
    }
    /**
     * 跳转到资源授权的dialog
     *
     * @param model
     * @param id
     * @param mode
     * @return
     */
    @RequestMapping("/resource/rsResourceAuthorize")
    public String rsResourceAuthorize(Model model, String id, String mode) {
        try {
            model.addAttribute("resourceId", id);
            model.addAttribute("mode", mode);
            model.addAttribute("contentPage", "/resource/resourceregister/rsResourceAuthorizeDialog");
        } catch (Exception e) {
            e.printStackTrace();
        }
        return "pageView";
    }
    /**
     * 跳转到导入数据集的界面
     *
     * @param model
     * @param mode
     * @return
     */
    @RequestMapping("/resource/rsResourceImportDataset")
    public String rsResourceImportDataset(Model model, String mode, String resourceId, String datasetId) {
        try {
            model.addAttribute("mode", mode);
            model.addAttribute("resourceId", resourceId);
            RsResourceDeatilModel rrsdm = resourceService.getRsResourceDeatilModel(resourceId);
            model.addAttribute("datasetId", rrsdm.getDatasetCode());
            if (!StringUtils.isEmpty(datasetId)) {
                RsResourceDataset ds = resourceService.getLocalDataSetById(datasetId);
                model.addAttribute("datasetCode", ds.getCode());
            }
            model.addAttribute("contentPage", "/resource/resourceregister/rsResourceImportDatasetDialog");
        } catch (Exception e) {
            e.printStackTrace();
        }
        return "pageView";
    }
    /**
     * 打开新增或者编辑资源字段的界面
     *
     * @param model
     * @param id
     * @param mode
     * @return
     */
    @RequestMapping("/resource/rsResourceFieldInfo")
    public String rsResourceFieldInfo(Model model, String id, String mode, String datasetId, String resourceId) {
        try {
            RsResourceMetadata rsResourceMetadata = null;
            //mode定义:new modify view三种模式,新增,修改,查看
            if (mode.equals("view")) {
                rsResourceMetadata = resourceService.getRsResourceMetadata(id);
            } else if (mode.equals("modify")) {
                rsResourceMetadata = resourceService.getRsResourceMetadata(id);
            } else {
                rsResourceMetadata = new RsResourceMetadata();
            }
            model.addAttribute("entity", rsResourceMetadata);
            model.addAttribute("mode", mode);
            model.addAttribute("datasetId", datasetId);
            model.addAttribute("resourceId", resourceId);
            model.addAttribute("contentPage", "/resource/resourceregister/rsResourceFieldInfoDialog");
        } catch (Exception e) {
            e.printStackTrace();
        }
        return "pageView";
    }
    /**
     * 跳转到资源维度配置的dialog
     *
     * @param model
     * @param id
     * @param resourceId
     * @return
     */
    @RequestMapping("/resource/rsResourceDimension")
    public String rsResourceDimension(Model model, String id, String resourceId) {
        try {
            RsAppResourceDetail rard = resourceService.getAppResourceDetailId(id);
            model.addAttribute("appResourceDetail", rard);
            model.addAttribute("metadataName", resourceService.getRsResourceMetadata(rard.getMetadataId()));
            model.addAttribute("resultSelect", JSONArray.fromObject(resourceService.getDimensionByResourceId(resourceId)).toString());
            model.addAttribute("resourceId", resourceId);
            model.addAttribute("contentPage", "/resource/resourceregister/rsResourceDimensionDialog");
        } catch (Exception e) {
            e.printStackTrace();
        }
        return "pageView";
    }
    @RequestMapping("/resource/searchRsResource")
    @ResponseBody
    public Result getResourceList(HttpServletRequest request) {
        try {
            String page = request.getParameter("page");
            String rows = request.getParameter("rows");
            String categoryId = request.getParameter("categoryId");
            String resourceName = request.getParameter("resourceName");
            DataGridResult result = new DataGridResult(Integer.valueOf(page), Integer.valueOf(rows));
            Map<String, Object> conditionMap = new HashMap<>();
            conditionMap.put("categoryId", categoryId);
            conditionMap.put("resourceName", resourceName);
            result = resourceService.getResourceList(conditionMap, Integer.valueOf(page), Integer.valueOf(rows));
            return result;
        } catch (Exception e) {
            return Result.error(e.getMessage());
        }
    }
    /**
     * 资源浏览界面 查找datagrid
     *
     * @param request
     * @return
     */
    @RequestMapping("/resource/searchRomoteResourceList")
    @ResponseBody
    public Result searchRomoteResourceList(HttpServletRequest request) {
        DataGridResult result = new DataGridResult();
        String id = request.getParameter("id");//资源ID
        if (StringUtils.isEmpty(id)) {
            return result;
        }
        try {
            //现在最多导出五十条数据
            String pageSize = StringUtils.isEmpty(request.getParameter("rows")) ? "10" : request.getParameter("rows");
            String currPage = StringUtils.isEmpty(request.getParameter("page")) ? "1" : request.getParameter("page");
            String queryParams = request.getParameter("queryParams");
            //得到资源对象
            RsResourceDeatilModel resource = resourceService.getRsResourceDeatilModel(id);
            //设置参数
            Map<String, Object> params = new HashMap<String, Object>();
            params.put("rows", Integer.valueOf(pageSize));
            params.put("page", Integer.valueOf(currPage));
            if (!StringUtils.isEmpty(resource.getDatasetCode())) {
                RsResourceRestDetail rrd = resourceRestService.getResourceRestDetailByCode(resource.getCode());
                if ("resource.getMongoDBData".equals(rrd.getNamespace())) {
                    //如果是访问mongo 则加入表的参数
                    String datasetId = resource.getDatasetCode();
                    if (!StringUtils.isEmpty(datasetId)) {
                        RsResourceDataset rd = resourceService.getLocalDataSetById(datasetId);
                        params.put("tableCode", rd.getCode());
                    } else {
                        return result;
                    }
                }
            }
            if (!StringUtils.isEmpty(queryParams)) {
                params.put("condition", queryParams);
            } else {
                params.put("condition", "{}");
            }
            //访问服务得到数据
            String response_params = resourceService.searchRomoteResourceList(resource.getCode(), params);
            if (response_params.startsWith("[")) {
                //如果是数组
                Integer count = JSONArray.fromObject(response_params).size();//得到总数
                //访问服务得到元数据
                //设置动态datagrid值
                result.setDetailModelList(JSONArray.fromObject(response_params.toString().replace(":null", ":\"\"")));
                result.setTotalCount(count);
                result.setCurrPage(Integer.valueOf(currPage));
                result.setPageSize(count);
                result.setSuccessFlg(true);
            } else if (response_params.startsWith("{")) {
                Integer count = (Integer) JSONObject.fromObject(response_params).get("count");//得到总数
                //访问服务得到元数据
                //设置动态datagrid值
                String s = JSONObject.fromObject(response_params).get("detailModelList").toString().replace(":null", ":\"\"");
                result.setDetailModelList(JSONArray.fromObject(s));
                result.setTotalCount(count);
                result.setCurrPage(Integer.valueOf(currPage));
                result.setPageSize(Integer.valueOf(pageSize));
                result.setSuccessFlg(true);
            } else {
                result.setSuccessFlg(false);
                result.setMessage("获取数据失败");
            }
        } catch (Exception e) {
            e.printStackTrace();
            result.setSuccessFlg(false);
            result.setMessage("获取数据失败");
        }
        return result;
    }
    /**
     * 资源浏览界面 查找datagrid列
     *
     * @param request
     * @return
     */
    @RequestMapping("/resource/searchResourceDatagridColunm")
    @ResponseBody
    public String searchResourceDatagridColunm(HttpServletRequest request, HttpServletResponse response) {
        Map<String, Object> mapParam = new HashMap<String, Object>();
        String id = request.getParameter("id");//资源ID
        try {
            //得到资源对象
            RsResourceDeatilModel resource = resourceService.getRsResourceDeatilModel(id);
            //得到列信息
            List<RsResourceMetadata> rsResourceMetadatas = resourceService.getResourceMetadataListColumn(resource.getId());
            List<String> colunmName = new ArrayList<String>();
            List<String> colunmCode = new ArrayList<String>();
            List<String> colunmType = new ArrayList<String>();
            List<String> colunmDict = new ArrayList<String>();
            for (RsResourceMetadata r : rsResourceMetadatas) {
                colunmName.add(r.getName());
                colunmCode.add(r.getCode());
                colunmType.add(r.getColumnType());
                colunmDict.add(r.getDictId() + "");
            }
            //设置动态datagrid值
            mapParam.put("colunmName", colunmName);
            mapParam.put("colunmCode", colunmCode);
            mapParam.put("colunmDict", colunmDict);
            mapParam.put("colunmType", colunmType);
        } catch (Exception e) {
            e.printStackTrace();
        }
        return JSONObject.fromObject(mapParam).toString();
    }
    /**
     * 找出Field列表
     *
     * @param request
     * @return
     */
    @RequestMapping("/resource/searchResourceFieldList")
    @ResponseBody
    public Result searchResourceFieldList(HttpServletRequest request) {
        try {
            String resourceId = request.getParameter("resourceId");
            String page = request.getParameter("page");
            String rows = request.getParameter("rows");
            DataGridResult result = new DataGridResult(Integer.valueOf(page), Integer.valueOf(rows));
            Map<String, Object> conditionMap = new HashMap<>();
            conditionMap.put("resourceId", resourceId);
            result = resourceService.getResourceFiled(conditionMap, Integer.valueOf(page), Integer.valueOf(rows));
            return result;
        } catch (Exception e) {
            e.printStackTrace();
            return Result.error(e.getMessage());
        }
    }
    /**
     * 跳转到新增或者修改资源的界面
     *
     * @param model
     * @param id
     * @param mode
     * @return
     */
    @RequestMapping("/resource/rsResourceInfo")
    public String rsResourceInfoTemplate(Model model, String id, String mode, String categoryId) {
        try {
            RsResourceDeatilModel rsResource = null;
            //mode定义:new modify view三种模式,新增,修改,查看
            if (mode.equals("view")) {
                rsResource = resourceService.getRsResourceDeatilModel(id);
            } else if (mode.equals("modify")) {
                rsResource = resourceService.getRsResourceDeatilModel(id);
            } else {
                rsResource = new RsResourceDeatilModel();
            }
            model.addAttribute("entity", rsResource);
            model.addAttribute("categoryId", categoryId);
            model.addAttribute("mode", mode);
            model.addAttribute("contentPage", "/resource/resourceregister/rsResourceInfoDialog");
        } catch (Exception e) {
            e.printStackTrace();
        }
        return "pageView";
    }
    /**
     * 修改资源
     *
     * @param request
     * @return
     */
    @RequestMapping("/resource/updateRsResource")
    @ResponseBody
    public Result updateRsResource(HttpServletRequest request) {
        try {
            String category = request.getParameter("category");
            String rate = request.getParameter("rate");
            String code = request.getParameter("code");
            String name = request.getParameter("name");
            String supportType = request.getParameter("inp_resource_support_type");
            String type = request.getParameter("inp_resource_type");
            String id = request.getParameter("id");
            RsResource rs = new RsResource();
            rs.setId(id);
            rs.setCode(code);
            rs.setType(type);
            rs.setName(name);
            rs.setServiceRate(rate);
            rs.setCategory(category);
            rs.setDataSupportType(supportType);
            resourceService.updateRsResource(rs);
            return Result.success("修改成功!");
        } catch (Exception e) {
            e.printStackTrace();
            return Result.error("修改失败!");
        }
    }
    /**
     * 新增资源
     *
     * @param request
     * @return
     */
    @RequestMapping("/resource/createRsResource")
    @ResponseBody
    public Result createRsResource(HttpServletRequest request) {
        try {
            String category = request.getParameter("category");
            String rate = request.getParameter("rate");
            String code = request.getParameter("code");
            String name = request.getParameter("name");
            String supportType = request.getParameter("inp_resource_support_type");
            String type = request.getParameter("inp_resource_type");
            RsResource rs = new RsResource();
            rs.setCode(code);
            rs.setType(type);
            rs.setName(name);
            rs.setServiceRate(rate);
            rs.setCategory(category);
            rs.setDataSupportType(supportType);
            resourceService.createRsResource(rs);
            return Result.success("新增成功!");
        } catch (Exception e) {
            e.printStackTrace();
            return Result.error("新增失败!");
        }
    }
    @RequestMapping("/resource/deleteResource")
    @ResponseBody
    public Result deleteResource(HttpServletRequest request) {
        try {
            String id = request.getParameter("id");
            resourceService.deleteResource(id);
            return Result.success("删除成功!");
        } catch (Exception e) {
            e.printStackTrace();
            return Result.error("删除失败!");
        }
    }
    @RequestMapping("/resource/getDataSet")
    @ResponseBody
    public Result getDataSet(HttpServletRequest request) {
        try {
            Map<String, Object> params = new HashMap<String, Object>();
            String datasetId = request.getParameter("datasetId");
            String name = request.getParameter("name");
            String page = request.getParameter("page");
            String rsResourceImportDatasetDialogName = request.getParameter("rsResourceImportDatasetDialogName");
            String rows = request.getParameter("rows");
            params.put("datasetId", datasetId);
            params.put("name", name);
            params.put("rsResourceImportDatasetDialogName", rsResourceImportDatasetDialogName);
            params.put("page", page);
            params.put("rows", rows);
            return resourceService.getDataSet(params);
        } catch (Exception e) {
            e.printStackTrace();
            return Result.error("删除失败!");
        }
    }
    @RequestMapping("/resource/getMetaData")
    @ResponseBody
    public Result getMetaData(HttpServletRequest request) {
        try {
            Map<String, Object> params = new HashMap<String, Object>();
            String id = request.getParameter("id");
            params.put("id", id);
            return resourceService.getMetaData(params);
        } catch (Exception e) {
            e.printStackTrace();
            return Result.error("删除失败!");
        }
    }
    /**
     * 导入全部数据集
     *
     * @param request
     * @return
     */
    @RequestMapping("/resource/improtAllMetadata")
    @ResponseBody
    public Result improtAllMetadata(HttpServletRequest request) {
        try {
            String reourceId = request.getParameter("reourceId");//资源ID
            String datasetId = request.getParameter("datasetId");//资源ID
            System.out.println("datasetId" + datasetId);
            RsResourceDataset dataset = new RsResourceDataset();
            BeanUtils.populate(dataset, request.getParameterMap());
            return resourceService.improtAllMetadata(dataset, reourceId);
        } catch (Exception e) {
            e.printStackTrace();
            return Result.error(e.getMessage());
        }
    }
    /**
     * 导入全部数据集
     *
     * @param request
     * @return
     */
    @RequestMapping("/resource/improtSameMetadata")
    @ResponseBody
    public Result improtSameMetadata(HttpServletRequest request) {
        try {
            String dataset = request.getParameter("dataset");//数据集
            String metaData = request.getParameter("selecrMedatata");//选中的metaData
            String reourceId = request.getParameter("reourceId");//资源ID
            String datasetId = request.getParameter("datasetId");//数据集ID
            return resourceService.improtSameMetadata(reourceId, metaData, dataset, datasetId);
        } catch (Exception e) {
            e.printStackTrace();
            return Result.error(e.getMessage());
        }
    }
    /**
     * 导入数据集
     *
     * @param request
     * @return
     */
    @RequestMapping("/resource/deleteMetadata")
    @ResponseBody
    public Result deleteMetadata(HttpServletRequest request) {
        try {
            String id = request.getParameter("id");//资源ID
            return resourceService.deleteMetadata(id);
        } catch (Exception e) {
            e.printStackTrace();
            return Result.error(e.getMessage());
        }
    }
    @RequestMapping("/resource/createMetaData")
    @ResponseBody
    public Result createMetaData(HttpServletRequest request) {
        try {
            if (StringUtils.isEmpty(request.getParameter("inp_field_is_PK"))) {
                throw new Exception("请选择是否为主键");
            }
            if (StringUtils.isEmpty(request.getParameter("inp_field_is_null"))) {
                throw new Exception("请选择是否为空");
            }
            RsResourceMetadata rmd = new RsResourceMetadata();
            rmd.setCode(request.getParameter("columnCode"));
            rmd.setColumnName(request.getParameter("columnCode"));
            rmd.setColumnLength(request.getParameter("columnLength"));
            rmd.setColumnType(request.getParameter("columnType"));
            rmd.setDefinition(request.getParameter("definition"));
            rmd.setNullable(Integer.valueOf(request.getParameter("inp_field_is_null")));
            rmd.setPrimaryKey(Integer.valueOf(request.getParameter("inp_field_is_PK")));
            rmd.setDataSupportType(2);
            rmd.setLogicDataSource(request.getParameter("logicDataSource"));
            rmd.setResourceId(request.getParameter("resourceId"));
            rmd.setName(request.getParameter("name"));
            // BeanUtils.populate(rmd, request.getParameterMap("logicDataSource"));
            return resourceService.createMetaData(rmd);
        } catch (Exception e) {
            e.printStackTrace();
            return Result.error(e.getMessage());
        }
    }
    @RequestMapping("/resource/updateMetaData")
    @ResponseBody
    public Result updateMetaData(HttpServletRequest request) {
        try {
            RsResourceMetadata rmd = new RsResourceMetadata();
            rmd.setId(request.getParameter("id"));
            rmd.setCode(request.getParameter("columnCode"));
            rmd.setColumnName(request.getParameter("columnCode"));
            rmd.setColumnLength(request.getParameter("columnLength"));
            rmd.setColumnType(request.getParameter("columnType"));
            rmd.setDefinition(request.getParameter("definition"));
            if (StringUtils.isEmpty(request.getParameter("inp_field_is_PK"))) {
                throw new Exception("请选择是否为主键");
            }
            if (StringUtils.isEmpty(request.getParameter("inp_field_is_null"))) {
                throw new Exception("请选择是否为空");
            }
            rmd.setNullable(Integer.valueOf(request.getParameter("inp_field_is_null")));
            rmd.setPrimaryKey(Integer.valueOf(request.getParameter("inp_field_is_PK")));
            rmd.setDataSupportType(2);
            rmd.setLogicDataSource(request.getParameter("logicDataSource"));
            rmd.setResourceId(request.getParameter("resourceId"));
            rmd.setName(request.getParameter("name"));
            return resourceService.updateMetaData(rmd);
        } catch (Exception e) {
            e.printStackTrace();
            return Result.error(e.getMessage());
        }
    }
    @RequestMapping("/resource/getMetaDataByDatasetId")
    @ResponseBody
    public Result getMetaDataByDatasetId(HttpServletRequest request) {
        try {
            String datasetCode = request.getParameter("datasetCode");
            String datasetId = request.getParameter("datasetId");
            return resourceService.getMetaDataByDatasetId(datasetCode, datasetId);
        } catch (Exception e) {
            e.printStackTrace();
            return Result.error(e.getMessage());
        }
    }
    @RequestMapping("/resource/getResourceAuthorize")
    @ResponseBody
    public Result getResourceAuthorize(HttpServletRequest request) {
        try {
            String resourceId = request.getParameter("resourceId");
            String appId = request.getParameter("appId");
            if (StringUtils.isEmpty(resourceId) || StringUtils.isEmpty(appId)) {
                return new DataGridResult();
            }
            return resourceService.getResourceAuthorize(resourceId, appId);
        } catch (Exception e) {
            e.printStackTrace();
            return Result.error(e.getMessage());
        }
    }
    /**
     * 资源授权页面 进入应用授权时 查询授权和未授权的应用
     *
     * @param request
     * @return
     */
    @RequestMapping("/resource/getResourceApp")
    @ResponseBody
    public Result getResourceApp(HttpServletRequest request) {
        try {
            String id = request.getParameter("id");
            return resourceService.getResourceApp(id);
        } catch (Exception e) {
            e.printStackTrace();
            return Result.error(e.getMessage());
        }
    }
    /**
     * 更新资源授权的应用
     *
     * @param request
     * @return
     */
    @RequestMapping("/resource/updateResourceApp")
    @ResponseBody
    public Result updateResourceApp(HttpServletRequest request) {
        try {
            String ids = request.getParameter("ids");
            String resourceId = request.getParameter("resourceId");
            return resourceService.updateResourceApp(ids, resourceId);
        } catch (Exception e) {
            e.printStackTrace();
            return Result.error(e.getMessage());
        }
    }
    /**
     * 更新资源授权的应用
     *
     * @param request
     * @return
     */
    @RequestMapping("/resource/getResourceAppList")
    @ResponseBody
    public Result getResourceAppList(HttpServletRequest request) {
        try {
            String resourceId = request.getParameter("resourceId");
            return resourceService.getResourceAppList(resourceId);
        } catch (Exception e) {
            e.printStackTrace();
            return Result.error(e.getMessage());
        }
    }
    /**
     * 全部禁止1 全部允许2 选中禁止3 选中允许4
     *
     * @param request
     * @return
     */
    @RequestMapping("/resource/changeAppResource")
    @ResponseBody
    public Result changeAppResource(HttpServletRequest request) {
        try {
            String resourceId = request.getParameter("resourceId");
            String appId = request.getParameter("appId");
            String ids = request.getParameter("ids");
            String flag = request.getParameter("flag");
            return resourceService.changeAppResource(flag, appId, ids, resourceId);
        } catch (Exception e) {
            e.printStackTrace();
            return Result.error(e.getMessage());
        }
    }
    @RequestMapping("/resource/saveResourceDimension")
    @ResponseBody
    public Result saveResourceDimension(HttpServletRequest request) {
        try {
            String appResourceDeatilId = request.getParameter("appResourceDeatilId");
            String value = request.getParameter("value");
            String dimensionId = request.getParameter("dimensionId");
            RsAppResourceDetail appResourceDetail = new RsAppResourceDetail();
            appResourceDetail.setId(appResourceDeatilId);
            appResourceDetail.setDimensionValue(value);
            appResourceDetail.setDimensionId(dimensionId);
            return resourceService.saveResourceDimension(appResourceDetail);
        } catch (Exception e) {
            e.printStackTrace();
            return Result.error(e.getMessage());
        }
    }
    @RequestMapping("/exportAllExcel")
    public void exportAllExcel(HttpServletRequest request, HttpServletResponse response) {
        try {
            String field = request.getParameter("field");
            String name = request.getParameter("name");
            Map<String, String> fieldMap = new HashMap<String, String>();
            for (Object o : JSONArray.fromObject(field)) {
                JSONObject jo = ((JSONObject) o);
                fieldMap.put(String.valueOf(jo.get("key")), String.valueOf(jo.get("value")));
            }
            Result e = this.searchRomoteResourceList(request);
            JXLUtil.listMapToExcel(((DataGridResult) e).getDetailModelList(), fieldMap, name, response);
        } catch (Exception e) {
            e.printStackTrace();
        }
    }
    @RequestMapping("/exportSomeExcel")
    public void exportSomeExcel(HttpServletRequest request, HttpServletResponse response) {
        try {
            String field = request.getParameter("field");
            String data = request.getParameter("data");
            String name = request.getParameter("name");
            Map<String, String> fieldMap = new HashMap<String, String>();
            for (Object o : JSONArray.fromObject(field)) {
                JSONObject jo = ((JSONObject) o);
                fieldMap.put(String.valueOf(jo.get("key")), String.valueOf(jo.get("value")));
            }
            JXLUtil.listMapToExcel(JSONArray.fromObject(data), fieldMap, name, response);
        } catch (Exception e) {
            e.printStackTrace();
        }
    }
}

+ 200 - 0
hos-admin/src/main/java/com/yihu/hos/resource/controller/RsResourceRestController.java

@ -0,0 +1,200 @@
package com.yihu.hos.resource.controller;
import com.yihu.ehr.framework.model.DataGridResult;
import com.yihu.ehr.framework.model.Result;
import com.yihu.hos.resource.model.RsResourceRest;
import com.yihu.hos.resource.model.RsResourceRestDetail;
import com.yihu.hos.resource.service.IRsResourceRestService;
import org.apache.commons.beanutils.BeanUtils;
import org.springframework.stereotype.Controller;
import org.springframework.ui.Model;
import org.springframework.util.StringUtils;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.ResponseBody;
import javax.annotation.Resource;
import javax.servlet.http.HttpServletRequest;
import java.util.HashMap;
import java.util.Map;
/**
 * Created by chenweida on 2016/1/30.
 */
@RequestMapping("/resourceRest")
@Controller
public class RsResourceRestController {
    @Resource(name = "resourceRestService")
    private IRsResourceRestService resourceRestService;
    @RequestMapping("/initial")
    public String initial(Model model) {
        model.addAttribute("contentPage", "resource/resourcerest/resourceRest");
        return "partView";
    }
    @RequestMapping("/resourceRestDialog")
    public String resourceRestDialog(Model model, String id, String mode) {
        RsResourceRest rrr;
        try {
            if ("view".equals(mode)) {
                rrr = resourceRestService.getResourceRestById(id);
            } else if ("modify".equals(mode)) {
                rrr = resourceRestService.getResourceRestById(id);
            } else {
                rrr = new RsResourceRest();
            }
            model.addAttribute("resourceId", id);
            model.addAttribute("mode", mode);
            model.addAttribute("model", rrr);
            model.addAttribute("contentPage", "resource/resourcerest/resourceRestDialog");
        } catch (Exception e) {
            e.printStackTrace();
        }
        return "pageView";
    }
    @RequestMapping("/resourceRestDetailDialog")
    public String resourceRestDetailDialog(Model model, String id, String mode) {
        RsResourceRestDetail rrrd;
        try {
            if ("view".equals(mode)) {
                rrrd = resourceRestService.getResourceRestDetail(id);
            } else if ("modify".equals(mode)) {
                rrrd = resourceRestService.getResourceRestDetail(id);
            } else {
                rrrd= new RsResourceRestDetail();
            }
            model.addAttribute("resourceId", id);
            model.addAttribute("mode", mode);
            model.addAttribute("model", rrrd);
            model.addAttribute("contentPage", "resource/resourcerest/resourceRestDetailDialog");
        } catch (Exception e) {
            e.printStackTrace();
        }
        return "pageView";
    }
    @RequestMapping("/getResourceRestDetilList")
    @ResponseBody
    public Result getResourceRestDetilList(HttpServletRequest request) {
        try {
            String page = StringUtils.isEmpty(request.getParameter("page"))?"1":request.getParameter("page");
            String rows = StringUtils.isEmpty(request.getParameter("rows"))?"10":request.getParameter("rows");
            String resourceRestId = request.getParameter("resourceRestId");
            String id = request.getParameter("id");
            String resourceRestName = request.getParameter("resourceRestName");
            DataGridResult result = new DataGridResult(Integer.valueOf(page), Integer.valueOf(rows));
            Map<String, Object> conditionMap = new HashMap<>();
            conditionMap.put("resourceRestId",resourceRestId);
            conditionMap.put("id",id);
            conditionMap.put("resourceRestName",resourceRestName);
            result = resourceRestService.getResourceRestDetilList(conditionMap, Integer.valueOf(page), Integer.valueOf(rows));
            return result;
        } catch (Exception e) {
            e.printStackTrace();
            return Result.error(e.getMessage());
        }
    }
    @RequestMapping("/getResourceRestList")
    @ResponseBody
    public Result getResourceRestList(HttpServletRequest request) {
        try {
            String page = request.getParameter("page");
            String rows = request.getParameter("rows");
            String categoryId = request.getParameter("categoryId");
            DataGridResult result = new DataGridResult(Integer.valueOf(page), Integer.valueOf(rows));
            Map<String, Object> conditionMap = new HashMap<>();
            conditionMap.put("categoryId",categoryId);
            result = resourceRestService.getResourceRestList(conditionMap, Integer.valueOf(page), Integer.valueOf(rows));
            return result;
        } catch (Exception e) {
            e.printStackTrace();
            return Result.error(e.getMessage());
        }
    }
    @RequestMapping("/deleteResourceRest")
    @ResponseBody
    public Result deleteResourceRest(HttpServletRequest request) {
        try {
            String id = request.getParameter("id");
            return  resourceRestService.deleteResourceRestById(id);
        } catch (Exception e) {
            e.printStackTrace();
            return Result.error("删除失败");
        }
    }
    @RequestMapping("/deleteResourceRestDetail")
    @ResponseBody
    public Result deleteResourceRestDetail(HttpServletRequest request) {
        try {
            String id = request.getParameter("id");
            return resourceRestService.deleteResourceRestDetail(id);
        } catch (Exception e) {
            e.printStackTrace();
            return Result.error("删除失败");
        }
    }
    @RequestMapping("/addResourceRest")
    @ResponseBody
    public Result addResourceRest(HttpServletRequest request) {
        try {
            RsResourceRest rr=new RsResourceRest();
            BeanUtils.populate(rr, request.getParameterMap());
            return resourceRestService.addResourceRest(rr);
        } catch (Exception e) {
            e.printStackTrace();
            return Result.error("新增失败");
        }
    }
    @RequestMapping("/updateResourceRest")
    @ResponseBody
    public Result updateResourceRest(HttpServletRequest request) {
        try {
            RsResourceRest rr=new RsResourceRest();
            BeanUtils.populate(rr, request.getParameterMap());
            return resourceRestService.updateResourceRest(rr);
        } catch (Exception e) {
            e.printStackTrace();
            return Result.error("更新失败");
        }
    }
    @RequestMapping("/addResourceRestDetail")
    @ResponseBody
    public Result addResourceRestDetail(HttpServletRequest request) {
        try {
            RsResourceRestDetail rrd=new RsResourceRestDetail();
            BeanUtils.populate(rrd, request.getParameterMap());
            return resourceRestService.addResourceRestDetail(rrd);
        } catch (Exception e) {
            e.printStackTrace();
            return Result.error("新增失败");
        }
    }
    @RequestMapping("/updateResourceRestDetail")
    @ResponseBody
    public Result updateResourceRestDetail(HttpServletRequest request) {
        try {
            RsResourceRestDetail rrd=new RsResourceRestDetail();
            BeanUtils.populate(rrd, request.getParameterMap());
            return resourceRestService.updateResourceRestDetail(rrd);
        } catch (Exception e) {
            e.printStackTrace();
            return Result.error("更新失败");
        }
    }
}

+ 45 - 0
hos-admin/src/main/java/com/yihu/hos/resource/controller/StdController.java

@ -0,0 +1,45 @@
package com.yihu.hos.resource.controller;
import com.yihu.hos.common.Services;
import com.yihu.ehr.framework.model.Result;
import com.yihu.ehr.framework.util.controller.BaseController;
import com.yihu.hos.resource.service.IStdService;
import org.springframework.stereotype.Controller;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.ResponseBody;
import javax.annotation.Resource;
/**
 * 数据采集配置页面
 * Created by hzp on 2015/8/12.
 */
@RequestMapping("/std")
@Controller("stdController")
public class StdController extends BaseController {
    @Resource(name = Services.StdService)
    IStdService stdService;
    /**************************** 标准字典 ************************************************/
    /**
     * 通过字典ID获取字段
     * @return
     */
    @RequestMapping("getDictByScheme")
    @ResponseBody
    public Result getDictByScheme(String version, String dictId){
        try {
            return stdService.getDictResultByVersion(version, dictId);
        }
        catch (Exception ex)
        {
            return Result.error(ex.getMessage());
        }
    }
}

+ 21 - 0
hos-admin/src/main/java/com/yihu/hos/resource/dao/IRsAppResourceDao.java

@ -0,0 +1,21 @@
package com.yihu.hos.resource.dao;
import com.yihu.ehr.framework.common.dao.XSQLGeneralDAO;
import com.yihu.hos.resource.model.RsAppResource;
import java.util.List;
/**
 * Created by chenweida on 2016/1/25.
 */
public interface IRsAppResourceDao extends XSQLGeneralDAO {
    List<RsAppResource> getAppResourceByResourceId(String resourceid) throws Exception;
    void deleteByResourceId(String resourceId)throws Exception;
    RsAppResource getAppResourceByResourceIdAndAppId(String resourceId, String appId)throws Exception;
    void deleteEntityList(List<RsAppResource> deleteList)throws Exception;
    void saveEntityList(List<RsAppResource> newList)throws Exception;
}

+ 17 - 0
hos-admin/src/main/java/com/yihu/hos/resource/dao/IRsAppResourceDetailDao.java

@ -0,0 +1,17 @@
package com.yihu.hos.resource.dao;
import com.yihu.ehr.framework.common.dao.XSQLGeneralDAO;
import com.yihu.hos.resource.model.RsAppResourceDetail;
import java.util.List;
/**
 * Created by chenweida on 2016/1/25.
 */
public interface IRsAppResourceDetailDao extends XSQLGeneralDAO {
    List<RsAppResourceDetail> getAppResourceDetailByAppResouceId(String id) throws Exception;
    void deleteByAppResourceId(String id) throws Exception;
}

+ 27 - 0
hos-admin/src/main/java/com/yihu/hos/resource/dao/IRsCategoryDao.java

@ -0,0 +1,27 @@
package com.yihu.hos.resource.dao;
import com.yihu.ehr.framework.common.dao.XSQLGeneralDAO;
import com.yihu.ehr.framework.model.Result;
import com.yihu.hos.resource.model.RsDemensionCategory;
import com.yihu.hos.resource.model.RsResourceCategory;
import java.util.List;
import java.util.Map;
/**
 * Created by chenweida on 2015/12/15.
 */
public interface IRsCategoryDao extends XSQLGeneralDAO {
    RsDemensionCategory[] getAllCategory() throws Exception;
    Result getList(Map<String, Object> conditionMap) throws Exception;
    RsResourceCategory getCategoryByNameAndPid(String name, String pid) throws Exception;
    RsResourceCategory getCategoryByNameAndPidWithOutThis(String name, String pid, String id) throws Exception;
    List<RsResourceCategory> getCategoryByPid(String id) throws Exception;
    List<RsResourceCategory> getCategoryWithOutId(String id) throws Exception;
}

+ 17 - 0
hos-admin/src/main/java/com/yihu/hos/resource/dao/IRsDemensionCategoryDao.java

@ -0,0 +1,17 @@
package com.yihu.hos.resource.dao;
import com.yihu.ehr.framework.common.dao.XSQLGeneralDAO;
import com.yihu.ehr.framework.model.Result;
import com.yihu.hos.resource.model.RsDemensionCategory;
import java.util.List;
import java.util.Map;
/**
 * Created by chenweida on 2016/1/26.
 */
public interface IRsDemensionCategoryDao extends XSQLGeneralDAO {
    List<RsDemensionCategory> getDimensionCategoryList() throws Exception;
    Result getDimensionCatecoryList(Map<String, Object> conditionMap) throws Exception;
}

+ 10 - 0
hos-admin/src/main/java/com/yihu/hos/resource/dao/IRsDimensionDao.java

@ -0,0 +1,10 @@
package com.yihu.hos.resource.dao;
import com.yihu.ehr.framework.common.dao.SQLGeneralDAO;
import com.yihu.ehr.framework.common.dao.XSQLGeneralDAO;
/**
 * Created by chenweida on 2016/1/26.
 */
public interface IRsDimensionDao extends XSQLGeneralDAO {
}

+ 0 - 0
hos-admin/src/main/java/com/yihu/hos/resource/dao/IRsResourceCategoryDao.java


Some files were not shown because too many files changed in this diff