wangzhinan 1 год назад
Родитель
Сommit
7e54723624
100 измененных файлов с 9786 добавлено и 1185 удалено
  1. 2 2
      business/base-service/src/main/java/com/yihu/jw/hospital/prescription/service/entrance/YkyyEntranceService.java
  2. 27 0
      common/common-request-mapping/src/main/java/com/yihu/jw/rm/base/BaseRequestMapping.java
  3. 0 1
      common/common-rest-model/src/main/java/com/yihu/jw/restmodel/emergency/EmergencyOrderVO.java
  4. 5 5
      common/commons-cat/pom.xml
  5. 0 26
      common/commons-cat/src/main/java/com/yihu/jw/cat/CatContext.java
  6. 0 16
      common/commons-cat/src/main/java/com/yihu/jw/cat/CatErrorConstants.java
  7. 0 62
      common/commons-cat/src/main/java/com/yihu/jw/cat/CatHeaderRequestWrapper.java
  8. 0 45
      common/commons-cat/src/main/java/com/yihu/jw/cat/CatOkHttpInterceptor.java
  9. 0 108
      common/commons-cat/src/main/java/com/yihu/jw/cat/CatServletFilter.java
  10. 0 26
      common/commons-cat/src/main/java/com/yihu/jw/cat/config/CatFilterConfig.java
  11. 0 34
      common/commons-cat/src/main/java/com/yihu/jw/cat/config/FeignOkHttpConfig.java
  12. 26 0
      common/commons-cat/src/main/java/com/yihu/jw/ehr/cat/CatContext.java
  13. 16 0
      common/commons-cat/src/main/java/com/yihu/jw/ehr/cat/CatErrorConstants.java
  14. 62 0
      common/commons-cat/src/main/java/com/yihu/jw/ehr/cat/CatHeaderRequestWrapper.java
  15. 45 0
      common/commons-cat/src/main/java/com/yihu/jw/ehr/cat/CatOkHttpInterceptor.java
  16. 108 0
      common/commons-cat/src/main/java/com/yihu/jw/ehr/cat/CatServletFilter.java
  17. 26 0
      common/commons-cat/src/main/java/com/yihu/jw/ehr/cat/config/CatFilterConfig.java
  18. 31 0
      common/commons-cat/src/main/java/com/yihu/jw/ehr/cat/config/FeignOkHttpConfig.java
  19. 57 0
      common/commons-data-elasticsearch/pom.xml
  20. 78 0
      common/commons-data-elasticsearch/src/main/java/com/yihu/jw/ehr/elasticsearch/ElasticSearchPool.java
  21. 781 0
      common/commons-data-elasticsearch/src/main/java/com/yihu/jw/ehr/elasticsearch/ElasticSearchUtil.java
  22. 45 0
      common/commons-data-elasticsearch/src/main/java/com/yihu/jw/ehr/elasticsearch/config/ElasticSearchConfig.java
  23. 46 0
      common/commons-data-fastdfs/pom.xml
  24. 53 0
      common/commons-data-fastdfs/src/main/java/com/yihu/jw/ehr/fastdfs/FastDFSPoolEhr.java
  25. 357 0
      common/commons-data-fastdfs/src/main/java/com/yihu/jw/ehr/fastdfs/FastDFSUtilEhr.java
  26. 192 0
      common/commons-data-fastdfs/src/main/java/com/yihu/jw/ehr/fastdfs/config/FastDFSConfigEhr.java
  27. 57 0
      common/commons-data-hbase/pom.xml
  28. 32 0
      common/commons-data-hbase/src/main/java/com/yihu/jw/hbase/AbstractHBaseClient.java
  29. 213 0
      common/commons-data-hbase/src/main/java/com/yihu/jw/hbase/HBaseAdmin.java
  30. 395 0
      common/commons-data-hbase/src/main/java/com/yihu/jw/hbase/HBaseDao.java
  31. 158 0
      common/commons-data-hbase/src/main/java/com/yihu/jw/hbase/TableBundle.java
  32. 86 0
      common/commons-data-hbase/src/main/java/com/yihu/jw/hbase/config/HbaseConfig.java
  33. 13 0
      common/commons-data-hbase/src/main/resources/hbase-site.xml
  34. 163 0
      common/commons-data-hbase/src/main/resources/hbase/core-site.xml
  35. 243 0
      common/commons-data-hbase/src/main/resources/hbase/hbase-site.xml
  36. 348 0
      common/commons-data-hbase/src/main/resources/hbase/hdfs-site.xml
  37. 66 0
      common/commons-data-mysql/pom.xml
  38. 219 0
      common/commons-data-mysql/src/main/java/com/yihu/jw/ehr/parm/PageModel.java
  39. 244 0
      common/commons-data-mysql/src/main/java/com/yihu/jw/query/BaseJpaService.java
  40. 198 0
      common/commons-data-mysql/src/main/java/com/yihu/jw/query/FieldCondition.java
  41. 28 0
      common/commons-data-mysql/src/main/java/com/yihu/jw/query/ReturnIdPstCreator.java
  42. 303 0
      common/commons-data-mysql/src/main/java/com/yihu/jw/query/URLHqlQueryParser.java
  43. 267 0
      common/commons-data-mysql/src/main/java/com/yihu/jw/query/URLQueryParser.java
  44. 63 0
      common/commons-data-mysql/src/main/java/com/yihu/jw/query/UpdatePstCallback.java
  45. 28 0
      common/commons-data-mysql/src/test/com/yihu/ehr/query/URLQueryBuilderTest.java
  46. 63 0
      common/commons-data-query/pom.xml
  47. 24 0
      common/commons-data-query/src/main/java/com/yihu/jw/query/common/enums/SolrIndexEnum.java
  48. 83 0
      common/commons-data-query/src/main/java/com/yihu/jw/query/common/model/SolrGroupEntity.java
  49. 76 0
      common/commons-data-query/src/main/java/com/yihu/jw/query/common/model/SolrJoinEntity.java
  50. 401 0
      common/commons-data-query/src/main/java/com/yihu/jw/query/services/HbaseQuery.java
  51. 1164 0
      common/commons-data-query/src/main/java/com/yihu/jw/query/services/SolrQuery.java
  52. 23 0
      common/commons-data-redis/src/main/java/com/yihu/jw/ehr/redis/annotation/Mapping.java
  53. 17 0
      common/commons-data-redis/src/main/java/com/yihu/jw/ehr/redis/annotation/Table.java
  54. 190 0
      common/commons-data-redis/src/main/java/com/yihu/jw/ehr/redis/client/RedisClient.java
  55. 39 0
      common/commons-data-redis/src/main/java/com/yihu/jw/ehr/redis/config/RedisContext.java
  56. 37 0
      common/commons-data-redis/src/main/java/com/yihu/jw/ehr/redis/schema/AddressDictSchema.java
  57. 20 0
      common/commons-data-redis/src/main/java/com/yihu/jw/ehr/redis/schema/HealthArchiveSchema.java
  58. 15 0
      common/commons-data-redis/src/main/java/com/yihu/jw/ehr/redis/schema/HealthProblemDictKeySchema.java
  59. 66 0
      common/commons-data-redis/src/main/java/com/yihu/jw/ehr/redis/schema/Icd10KeySchema.java
  60. 17 0
      common/commons-data-redis/src/main/java/com/yihu/jw/ehr/redis/schema/IndicatorsDictKeySchema.java
  61. 92 0
      common/commons-data-redis/src/main/java/com/yihu/jw/ehr/redis/schema/KeySchema.java
  62. 83 0
      common/commons-data-redis/src/main/java/com/yihu/jw/ehr/redis/schema/OrgKeySchema.java
  63. 27 0
      common/commons-data-redis/src/main/java/com/yihu/jw/ehr/redis/schema/RsAdapterMetaKeySchema.java
  64. 16 0
      common/commons-data-redis/src/main/java/com/yihu/jw/ehr/redis/schema/RsMetadataKeySchema.java
  65. 15 0
      common/commons-data-redis/src/main/java/com/yihu/jw/ehr/redis/schema/StdCdaVersionKeySchema.java
  66. 68 0
      common/commons-data-redis/src/main/java/com/yihu/jw/ehr/redis/schema/StdDataSetKeySchema.java
  67. 143 0
      common/commons-data-redis/src/main/java/com/yihu/jw/ehr/redis/schema/StdMetaDataKeySchema.java
  68. 0 23
      common/commons-data-redis/src/main/java/com/yihu/jw/redis/annotation/Mapping.java
  69. 0 17
      common/commons-data-redis/src/main/java/com/yihu/jw/redis/annotation/Table.java
  70. 0 163
      common/commons-data-redis/src/main/java/com/yihu/jw/redis/client/RedisClient.java
  71. 0 39
      common/commons-data-redis/src/main/java/com/yihu/jw/redis/config/RedisContext.java
  72. 0 37
      common/commons-data-redis/src/main/java/com/yihu/jw/redis/schema/AddressDictSchema.java
  73. 0 20
      common/commons-data-redis/src/main/java/com/yihu/jw/redis/schema/HealthArchiveSchema.java
  74. 0 15
      common/commons-data-redis/src/main/java/com/yihu/jw/redis/schema/HealthProblemDictKeySchema.java
  75. 0 66
      common/commons-data-redis/src/main/java/com/yihu/jw/redis/schema/Icd10KeySchema.java
  76. 0 17
      common/commons-data-redis/src/main/java/com/yihu/jw/redis/schema/IndicatorsDictKeySchema.java
  77. 0 92
      common/commons-data-redis/src/main/java/com/yihu/jw/redis/schema/KeySchema.java
  78. 0 83
      common/commons-data-redis/src/main/java/com/yihu/jw/redis/schema/OrgKeySchema.java
  79. 0 27
      common/commons-data-redis/src/main/java/com/yihu/jw/redis/schema/RsAdapterMetaKeySchema.java
  80. 0 16
      common/commons-data-redis/src/main/java/com/yihu/jw/redis/schema/RsMetadataKeySchema.java
  81. 0 15
      common/commons-data-redis/src/main/java/com/yihu/jw/redis/schema/StdCdaVersionKeySchema.java
  82. 0 63
      common/commons-data-redis/src/main/java/com/yihu/jw/redis/schema/StdDataSetKeySchema.java
  83. 0 140
      common/commons-data-redis/src/main/java/com/yihu/jw/redis/schema/StdMetaDataKeySchema.java
  84. 47 0
      common/commons-data-solr/pom.xml
  85. 115 0
      common/commons-data-solr/src/main/java/com/yihu/jw/solr/SolrAdmin.java
  86. 53 0
      common/commons-data-solr/src/main/java/com/yihu/jw/solr/SolrPool.java
  87. 668 0
      common/commons-data-solr/src/main/java/com/yihu/jw/solr/SolrUtil.java
  88. 40 0
      common/commons-data-solr/src/main/java/com/yihu/jw/solr/config/SolrConfig.java
  89. 106 27
      common/commons-profile-core/src/main/java/com/yihu/jw/profile/family/ResourceCells.java
  90. 47 0
      common/commons-rest-model/pom.xml
  91. 100 0
      common/commons-rest-model/src/main/java/com/yihu/jw/ehr/model/adaption/MAdapterDataSet.java
  92. 177 0
      common/commons-rest-model/src/main/java/com/yihu/jw/ehr/model/adaption/MAdapterDataVo.java
  93. 92 0
      common/commons-rest-model/src/main/java/com/yihu/jw/ehr/model/adaption/MAdapterDict.java
  94. 160 0
      common/commons-rest-model/src/main/java/com/yihu/jw/ehr/model/adaption/MAdapterDictVo.java
  95. 76 0
      common/commons-rest-model/src/main/java/com/yihu/jw/ehr/model/adaption/MAdapterOrg.java
  96. 90 0
      common/commons-rest-model/src/main/java/com/yihu/jw/ehr/model/adaption/MAdapterPlan.java
  97. 39 0
      common/commons-rest-model/src/main/java/com/yihu/jw/ehr/model/adaption/MAdapterRelationship.java
  98. 108 0
      common/commons-rest-model/src/main/java/com/yihu/jw/ehr/model/adaption/MOrgDataSet.java
  99. 108 0
      common/commons-rest-model/src/main/java/com/yihu/jw/ehr/model/adaption/MOrgDict.java
  100. 0 0
      common/commons-rest-model/src/main/java/com/yihu/jw/ehr/model/adaption/MOrgDictItem.java

+ 2 - 2
business/base-service/src/main/java/com/yihu/jw/hospital/prescription/service/entrance/YkyyEntranceService.java

@ -443,7 +443,7 @@ public class YkyyEntranceService {
    /**
     * 检查信息列表
     * 检验信息列表
     * @param patient
     * @param page
     * @param size
@ -789,7 +789,7 @@ public class YkyyEntranceService {
    /**
     * HIS就诊患者历史查询
     * HIS就诊患者检查历史查询
     * @throws Exception
     */
    public JSONArray findV_ZKSG_JCCX(String brid,String conNo,boolean demoFlag) throws Exception{

+ 27 - 0
common/common-request-mapping/src/main/java/com/yihu/jw/rm/base/BaseRequestMapping.java

@ -935,4 +935,31 @@ public class BaseRequestMapping {
        public static final String SHOWTYPE = "/showType";
    }
    public static class Resource extends Basic{
        public static final String PREFIX = "/resources";
        public static final String DictList = "/dict/list";
        public static final String DictBatch = "/dict/batch";
        public static final String Dict = "/dict/{id}";
        public static final String DictCode = "/dict/code";
        public static final String DictExistence = "/dict/existence";
        public static final String DictEntryBatch = "/dict/entry/batch";
        public static final String DictCodesExistence = "/dict/codes/existence";
        public static final String DictEntries = "/dict_entries";
        public static final String NoPageDictEntries = "/noPage_dict_entries";
        public static final String DictEntry = "/dict_entries/{id}";
        public static final String DictEntriesByDictCode = "/dict/code/dict_entries";
        public static final String DictEntriesExistence = "/dict_entries/existence";
        public static final String CategoryUpdate = "/resources/category/update";
        public static final String Category = "/resources/category/{id}";
        public static final String CategoriesByPid = "/resources/categories/pid";
        public static final String CategoriesByCodeAndPid = "/resources/categories/codeAndPid";
        public static final String CategoryTree = "/resources/categories/tree";
        public static final String CategoryExitSelfAndParent = "/resources/categories/selfAndParent";
        public static final String CategoriesAll = "/resources/categories/all";
        public static final String CategoriesSearch = "/resources/categories/search";
    }
}

+ 0 - 1
common/common-rest-model/src/main/java/com/yihu/jw/restmodel/emergency/EmergencyOrderVO.java

@ -2,7 +2,6 @@ package com.yihu.jw.restmodel.emergency;
import com.alibaba.fastjson.annotation.JSONField;
import com.fasterxml.jackson.annotation.JsonFormat;
import com.yihu.jw.entity.care.contacts.PatientSosContactsDO;
import com.yihu.jw.entity.care.securitymonitoring.BaseEmergencyWarnLogDO;
import com.yihu.jw.entity.care.securitymonitoring.EmergencyWarnConclusionDO;

+ 5 - 5
common/commons-cat/pom.xml

@ -2,13 +2,13 @@
<project xmlns="http://maven.apache.org/POM/4.0.0"
         xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
         xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
    <modelVersion>4.0.0</modelVersion>
    <parent>
        <artifactId>commons</artifactId>
        <groupId>com.yihu.ehr</groupId>
        <groupId>com.yihu.jw</groupId>
        <artifactId>wlyy-parent-pom</artifactId>
        <version>2.4.0</version>
        <relativePath>../../wlyy-parent-pom/pom.xml</relativePath>
        <relativePath>../../wlyy-lib-parent-pom/pom.xml</relativePath>
    </parent>
    <modelVersion>4.0.0</modelVersion>
    <artifactId>commons-cat</artifactId>
    <packaging>jar</packaging>
@ -44,7 +44,7 @@
            <groupId>com.fasterxml.jackson.core</groupId>
            <artifactId>jackson-databind</artifactId>
        </dependency>
      	<dependency>
        <dependency>
            <groupId>io.netty</groupId>
            <artifactId>netty-all</artifactId>
        </dependency>

+ 0 - 26
common/commons-cat/src/main/java/com/yihu/jw/cat/CatContext.java

@ -1,26 +0,0 @@
package com.yihu.jw.cat;
import com.dianping.cat.Cat;
import java.util.HashMap;
import java.util.Map;
/**
 * Context - CAT上下文
 * Created by progr1mmer on 2018/9/4.
 */
public class CatContext implements Cat.Context {
    private Map<String, String> properties = new HashMap<>();
    @Override
    public void addProperty(String key, String value) {
        properties.put(key, value);
    }
    @Override
    public String getProperty(String key) {
        return properties.get(key);
    }
}

+ 0 - 16
common/commons-cat/src/main/java/com/yihu/jw/cat/CatErrorConstants.java

@ -1,16 +0,0 @@
package com.yihu.jw.cat;
/**
 * Constants - 常量
 * Created by progr1mmer on 2018/9/5.
 */
public class CatErrorConstants {
    /*
     * 异常在过滤链中被捕获的时候可以通过
     * request.setAttribute(ERROR_FOR_CAT, e)
     * 将异常传递给 Transaction
     */
    public static final String ERROR_FOR_CAT = "EFC";
}

+ 0 - 62
common/commons-cat/src/main/java/com/yihu/jw/cat/CatHeaderRequestWrapper.java

@ -1,62 +0,0 @@
package com.yihu.jw.cat;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletRequestWrapper;
import java.util.*;
/**
 * Wrapper - 添加请求头
 * Created by progr1mmer on 2018/9/4.
 */
public class CatHeaderRequestWrapper extends HttpServletRequestWrapper {
    private final Map<String, String> customHeaders;
    CatHeaderRequestWrapper(HttpServletRequest request) {
        super(request);
        this.customHeaders = new HashMap<>();
    }
    void putHeader(String name, String value) {
        this.customHeaders.put(name, value);
    }
    @Override
    public String getHeader(String name) {
        // check the custom headers first
        String headerValue = customHeaders.get(name);
        if (headerValue != null){
            return headerValue;
        }
        // else return from into the original wrapped object
        return ((HttpServletRequest) getRequest()).getHeader(name);
    }
    @Override
    public Enumeration<String> getHeaders(String name) {
        if (customHeaders.containsKey(name)) {
            Set<String> set = new HashSet<>();
            set.add(getHeader(name));
            return Collections.enumeration(set);
        }
        return super.getHeaders(name);
    }
    @Override
    public Enumeration<String> getHeaderNames() {
        // create a set of the custom header names
        Set<String> set = new HashSet<>(customHeaders.keySet());
        // now add the headers from the wrapped request object
        Enumeration<String> e = ((HttpServletRequest) getRequest()).getHeaderNames();
        while (e.hasMoreElements()) {
            // add the names of the request headers into the list
            String n = e.nextElement();
            set.add(n);
        }
        // create an enumeration from the set and return
        return Collections.enumeration(set);
    }
}

+ 0 - 45
common/commons-cat/src/main/java/com/yihu/jw/cat/CatOkHttpInterceptor.java

@ -1,45 +0,0 @@
package com.yihu.jw.cat;
import com.dianping.cat.Cat;
import com.dianping.cat.CatConstants;
import com.dianping.cat.message.Transaction;
import okhttp3.Interceptor;
import okhttp3.Request;
import okhttp3.Response;
import java.io.IOException;
/**
 * Interceptor - 拦截http调用过程
 * Created by progr1mmer on 2018/9/5.
 */
public class CatOkHttpInterceptor implements Interceptor {
    @Override
    public Response intercept(Chain chain) throws IOException {
        Request request = chain.request();
        Transaction t = Cat.newTransaction(CatConstants.TYPE_REMOTE_CALL, request.url().toString());
        try {
            //保存和传递CAT调用链上下文
            CatContext ctx = new CatContext();
            Cat.logRemoteCallClient(ctx);
            Request.Builder builder = request.newBuilder();
            builder.header(Cat.Context.ROOT, ctx.getProperty(Cat.Context.ROOT));
            builder.header(Cat.Context.PARENT, ctx.getProperty(Cat.Context.PARENT));
            builder.header(Cat.Context.CHILD, ctx.getProperty(Cat.Context.CHILD));
            request = builder.build();
            //执行请求
            Response response = chain.proceed(request);
            t.setStatus(Transaction.SUCCESS);
            return response;
        } catch (Exception e) {
            //记录异常
            t.setStatus(e);
            Cat.getProducer().logError(e);
            throw e;
        } finally {
            //当前Transaction须完成
            t.complete();
        }
    }
}

+ 0 - 108
common/commons-cat/src/main/java/com/yihu/jw/cat/CatServletFilter.java

@ -1,108 +0,0 @@
package com.yihu.jw.cat;
import com.dianping.cat.Cat;
import com.dianping.cat.CatConstants;
import com.dianping.cat.message.Transaction;
import com.fasterxml.jackson.databind.ObjectMapper;
import javax.servlet.*;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import java.io.IOException;
/**
 * http://tx.cat.weimob.com/cat/doc.html - 部分说明文档
 * Filter - Cat基础过滤器
 * Created by progr1mmer on 2018/9/4.
 */
public class CatServletFilter implements Filter {
    private final ObjectMapper objectMapper = new ObjectMapper();
    @Override
    public void init(FilterConfig filterConfig) throws ServletException {
    }
    @Override
    public void doFilter(ServletRequest servletRequest, ServletResponse servletResponse, FilterChain filterChain) throws IOException, ServletException {
        HttpServletRequest request = (HttpServletRequest) servletRequest;
        CatHeaderRequestWrapper headerRequestWrapper = new CatHeaderRequestWrapper(request);
        if (isRoot(request)) {
            Transaction t = Cat.newTransaction(CatConstants.TYPE_REMOTE_CALL, request.getRequestURL().toString());
            try {
                Cat.logEvent("Request.remoteHost", request.getRemoteHost());
                Cat.logEvent("Request.params", objectMapper.writeValueAsString(request.getParameterMap()));
                CatContext catContext = new CatContext();
                Cat.logRemoteCallClient(catContext);
                headerRequestWrapper.putHeader(Cat.Context.ROOT, catContext.getProperty(Cat.Context.ROOT));
                headerRequestWrapper.putHeader(Cat.Context.PARENT, catContext.getProperty(Cat.Context.PARENT));
                headerRequestWrapper.putHeader(Cat.Context.CHILD, catContext.getProperty(Cat.Context.CHILD));
                filterChain.doFilter(headerRequestWrapper, servletResponse);
                if (null == request.getAttribute(CatErrorConstants.ERROR_FOR_CAT)) {
                    Integer status = ((HttpServletResponse) servletResponse).getStatus();
                    if (status != 500) {
                        t.setStatus(Transaction.SUCCESS);
                    } else {
                        Cat.logError(new IllegalStateException(status.toString()));
                        t.setStatus(new IllegalStateException(status.toString()));
                    }
                } else {
                    Cat.logError((Exception)request.getAttribute(CatErrorConstants.ERROR_FOR_CAT));
                    t.setStatus((Exception)request.getAttribute(CatErrorConstants.ERROR_FOR_CAT));
                }
            } catch (Exception e) {
                Cat.logError(e);
                t.setStatus(e);
                throw e;
            } finally {
                t.complete();
            }
        } else {
            CatContext catContext = new CatContext();
            catContext.addProperty(Cat.Context.ROOT, request.getHeader(Cat.Context.ROOT));
            catContext.addProperty(Cat.Context.PARENT, request.getHeader(Cat.Context.PARENT));
            catContext.addProperty(Cat.Context.CHILD, request.getHeader(Cat.Context.CHILD));
            Cat.logRemoteCallServer(catContext);
            Transaction t = Cat.newTransaction(CatConstants.TYPE_SERVICE, request.getRequestURL().toString());
            try {
                Cat.logEvent("Request.params", objectMapper.writeValueAsString(request.getParameterMap()));
                filterChain.doFilter(headerRequestWrapper, servletResponse);
                if (null == request.getAttribute(CatErrorConstants.ERROR_FOR_CAT)) {
                    Integer status = ((HttpServletResponse) servletResponse).getStatus();
                    if (status != 500) {
                        t.setStatus(Transaction.SUCCESS);
                    } else {
                        Cat.logError(new IllegalStateException(status.toString()));
                        t.setStatus(new IllegalStateException(status.toString()));
                    }
                } else {
                    Cat.logError((Exception)request.getAttribute(CatErrorConstants.ERROR_FOR_CAT));
                    t.setStatus((Exception)request.getAttribute(CatErrorConstants.ERROR_FOR_CAT));
                }
            } catch (Exception e) {
                Cat.logError(e);
                t.setStatus(e);
                throw e;
            } finally {
                t.complete();
            }
        }
    }
    @Override
    public void destroy() {
    }
    private boolean isRoot(HttpServletRequest request) {
        /*return request.getHeader(CatHttpConstants.CAT_HTTP_HEADER_ROOT_MESSAGE_ID) != null &&
                request.getHeader(CatHttpConstants.CAT_HTTP_HEADER_PARENT_MESSAGE_ID) != null &&
                request.getHeader(CatHttpConstants.CAT_HTTP_HEADER_CHILD_MESSAGE_ID) != null;*/
        return request.getHeader(Cat.Context.ROOT) == null &&
                request.getHeader(Cat.Context.PARENT) == null &&
                request.getHeader(Cat.Context.CHILD) == null;
    }
}

+ 0 - 26
common/commons-cat/src/main/java/com/yihu/jw/cat/config/CatFilterConfig.java

@ -1,26 +0,0 @@
package com.yihu.jw.cat.config;
import com.yihu.jw.cat.CatServletFilter;
import org.springframework.boot.web.servlet.FilterRegistrationBean;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
/**
 * Config - 配置CAT过滤路径
 * Created by progr1mmer on 2018/9/4.
 */
@Configuration
public class CatFilterConfig {
    @Bean
    public FilterRegistrationBean catFilter() {
        FilterRegistrationBean registration = new FilterRegistrationBean();
        CatServletFilter filter = new CatServletFilter();
        registration.setFilter(filter);
        registration.addUrlPatterns("/*");
        registration.setName("catFilter");
        registration.setOrder(1);
        return registration;
    }
}

+ 0 - 34
common/commons-cat/src/main/java/com/yihu/jw/cat/config/FeignOkHttpConfig.java

@ -1,34 +0,0 @@
package com.yihu.jw.cat.config;
import com.yihu.jw.cat.CatOkHttpInterceptor;
import feign.Feign;
import okhttp3.ConnectionPool;
import org.springframework.boot.autoconfigure.AutoConfigureBefore;
import org.springframework.boot.autoconfigure.condition.ConditionalOnClass;
import org.springframework.cloud.netflix.feign.FeignAutoConfiguration;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import java.util.concurrent.TimeUnit;
/**
 * Config - 自定义OkHttpClient,添加拦截器
 * Created by progr1mmer on 2018/9/5.
 */
@Configuration
@ConditionalOnClass(Feign.class)
@AutoConfigureBefore(FeignAutoConfiguration.class)
public class FeignOkHttpConfig {
    @Bean
    public okhttp3.OkHttpClient okHttpClient(){
        return new okhttp3.OkHttpClient.Builder()
                .readTimeout(10000, TimeUnit.SECONDS)
                .connectTimeout(10000, TimeUnit.SECONDS)
                .writeTimeout(10000, TimeUnit.SECONDS)
                .connectionPool(new ConnectionPool())
                .addInterceptor(new CatOkHttpInterceptor())
                .build();
    }
}

+ 26 - 0
common/commons-cat/src/main/java/com/yihu/jw/ehr/cat/CatContext.java

@ -0,0 +1,26 @@
package com.yihu.jw.ehr.cat;
import com.dianping.cat.Cat;
import java.util.HashMap;
import java.util.Map;
/**
 * Context - CAT上下文
 * Created by progr1mmer on 2018/9/4.
 */
public class CatContext implements Cat.Context {
    private Map<String, String> properties = new HashMap<>();
    @Override
    public void addProperty(String key, String value) {
        properties.put(key, value);
    }
    @Override
    public String getProperty(String key) {
        return properties.get(key);
    }
}

+ 16 - 0
common/commons-cat/src/main/java/com/yihu/jw/ehr/cat/CatErrorConstants.java

@ -0,0 +1,16 @@
package com.yihu.jw.ehr.cat;
/**
 * Constants - 常量
 * Created by progr1mmer on 2018/9/5.
 */
public class CatErrorConstants {
    /*
     * 异常在过滤链中被捕获的时候可以通过
     * request.setAttribute(ERROR_FOR_CAT, e)
     * 将异常传递给 Transaction
     */
    public static final String ERROR_FOR_CAT = "EFC";
}

+ 62 - 0
common/commons-cat/src/main/java/com/yihu/jw/ehr/cat/CatHeaderRequestWrapper.java

@ -0,0 +1,62 @@
package com.yihu.jw.ehr.cat;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletRequestWrapper;
import java.util.*;
/**
 * Wrapper - 添加请求头
 * Created by progr1mmer on 2018/9/4.
 */
public class CatHeaderRequestWrapper extends HttpServletRequestWrapper {
    private final Map<String, String> customHeaders;
    CatHeaderRequestWrapper(HttpServletRequest request) {
        super(request);
        this.customHeaders = new HashMap<>();
    }
    void putHeader(String name, String value) {
        this.customHeaders.put(name, value);
    }
    @Override
    public String getHeader(String name) {
        // check the custom headers first
        String headerValue = customHeaders.get(name);
        if (headerValue != null){
            return headerValue;
        }
        // else return from into the original wrapped object
        return ((HttpServletRequest) getRequest()).getHeader(name);
    }
    @Override
    public Enumeration<String> getHeaders(String name) {
        if (customHeaders.containsKey(name)) {
            Set<String> set = new HashSet<>();
            set.add(getHeader(name));
            return Collections.enumeration(set);
        }
        return super.getHeaders(name);
    }
    @Override
    public Enumeration<String> getHeaderNames() {
        // create a set of the custom header names
        Set<String> set = new HashSet<>(customHeaders.keySet());
        // now add the headers from the wrapped request object
        Enumeration<String> e = ((HttpServletRequest) getRequest()).getHeaderNames();
        while (e.hasMoreElements()) {
            // add the names of the request headers into the list
            String n = e.nextElement();
            set.add(n);
        }
        // create an enumeration from the set and return
        return Collections.enumeration(set);
    }
}

+ 45 - 0
common/commons-cat/src/main/java/com/yihu/jw/ehr/cat/CatOkHttpInterceptor.java

@ -0,0 +1,45 @@
package com.yihu.jw.ehr.cat;
import com.dianping.cat.Cat;
import com.dianping.cat.CatConstants;
import com.dianping.cat.message.Transaction;
import okhttp3.Interceptor;
import okhttp3.Request;
import okhttp3.Response;
import java.io.IOException;
/**
 * Interceptor - 拦截http调用过程
 * Created by progr1mmer on 2018/9/5.
 */
public class CatOkHttpInterceptor implements Interceptor {
    @Override
    public Response intercept(Chain chain) throws IOException {
        Request request = chain.request();
        Transaction t = Cat.newTransaction(CatConstants.TYPE_REMOTE_CALL, request.url().toString());
        try {
            //保存和传递CAT调用链上下文
            CatContext ctx = new CatContext();
            Cat.logRemoteCallClient(ctx);
            Request.Builder builder = request.newBuilder();
            builder.header(Cat.Context.ROOT, ctx.getProperty(Cat.Context.ROOT));
            builder.header(Cat.Context.PARENT, ctx.getProperty(Cat.Context.PARENT));
            builder.header(Cat.Context.CHILD, ctx.getProperty(Cat.Context.CHILD));
            request = builder.build();
            //执行请求
            Response response = chain.proceed(request);
            t.setStatus(Transaction.SUCCESS);
            return response;
        } catch (Exception e) {
            //记录异常
            t.setStatus(e);
            Cat.getProducer().logError(e);
            throw e;
        } finally {
            //当前Transaction须完成
            t.complete();
        }
    }
}

+ 108 - 0
common/commons-cat/src/main/java/com/yihu/jw/ehr/cat/CatServletFilter.java

@ -0,0 +1,108 @@
package com.yihu.jw.ehr.cat;
import com.dianping.cat.Cat;
import com.dianping.cat.CatConstants;
import com.dianping.cat.message.Transaction;
import com.fasterxml.jackson.databind.ObjectMapper;
import javax.servlet.*;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import java.io.IOException;
/**
 * http://tx.cat.weimob.com/cat/doc.html - 部分说明文档
 * Filter - Cat基础过滤器
 * Created by progr1mmer on 2018/9/4.
 */
public class CatServletFilter implements Filter {
    private final ObjectMapper objectMapper = new ObjectMapper();
    @Override
    public void init(FilterConfig filterConfig) throws ServletException {
    }
    @Override
    public void doFilter(ServletRequest servletRequest, ServletResponse servletResponse, FilterChain filterChain) throws IOException, ServletException {
        HttpServletRequest request = (HttpServletRequest) servletRequest;
        CatHeaderRequestWrapper headerRequestWrapper = new CatHeaderRequestWrapper(request);
        if (isRoot(request)) {
            Transaction t = Cat.newTransaction(CatConstants.TYPE_REMOTE_CALL, request.getRequestURL().toString());
            try {
                Cat.logEvent("Request.remoteHost", request.getRemoteHost());
                Cat.logEvent("Request.params", objectMapper.writeValueAsString(request.getParameterMap()));
                CatContext catContext = new CatContext();
                Cat.logRemoteCallClient(catContext);
                headerRequestWrapper.putHeader(Cat.Context.ROOT, catContext.getProperty(Cat.Context.ROOT));
                headerRequestWrapper.putHeader(Cat.Context.PARENT, catContext.getProperty(Cat.Context.PARENT));
                headerRequestWrapper.putHeader(Cat.Context.CHILD, catContext.getProperty(Cat.Context.CHILD));
                filterChain.doFilter(headerRequestWrapper, servletResponse);
                if (null == request.getAttribute(CatErrorConstants.ERROR_FOR_CAT)) {
                    Integer status = ((HttpServletResponse) servletResponse).getStatus();
                    if (status != 500) {
                        t.setStatus(Transaction.SUCCESS);
                    } else {
                        Cat.logError(new IllegalStateException(status.toString()));
                        t.setStatus(new IllegalStateException(status.toString()));
                    }
                } else {
                    Cat.logError((Exception)request.getAttribute(CatErrorConstants.ERROR_FOR_CAT));
                    t.setStatus((Exception)request.getAttribute(CatErrorConstants.ERROR_FOR_CAT));
                }
            } catch (Exception e) {
                Cat.logError(e);
                t.setStatus(e);
                throw e;
            } finally {
                t.complete();
            }
        } else {
            CatContext catContext = new CatContext();
            catContext.addProperty(Cat.Context.ROOT, request.getHeader(Cat.Context.ROOT));
            catContext.addProperty(Cat.Context.PARENT, request.getHeader(Cat.Context.PARENT));
            catContext.addProperty(Cat.Context.CHILD, request.getHeader(Cat.Context.CHILD));
            Cat.logRemoteCallServer(catContext);
            Transaction t = Cat.newTransaction(CatConstants.TYPE_SERVICE, request.getRequestURL().toString());
            try {
                Cat.logEvent("Request.params", objectMapper.writeValueAsString(request.getParameterMap()));
                filterChain.doFilter(headerRequestWrapper, servletResponse);
                if (null == request.getAttribute(CatErrorConstants.ERROR_FOR_CAT)) {
                    Integer status = ((HttpServletResponse) servletResponse).getStatus();
                    if (status != 500) {
                        t.setStatus(Transaction.SUCCESS);
                    } else {
                        Cat.logError(new IllegalStateException(status.toString()));
                        t.setStatus(new IllegalStateException(status.toString()));
                    }
                } else {
                    Cat.logError((Exception)request.getAttribute(CatErrorConstants.ERROR_FOR_CAT));
                    t.setStatus((Exception)request.getAttribute(CatErrorConstants.ERROR_FOR_CAT));
                }
            } catch (Exception e) {
                Cat.logError(e);
                t.setStatus(e);
                throw e;
            } finally {
                t.complete();
            }
        }
    }
    @Override
    public void destroy() {
    }
    private boolean isRoot(HttpServletRequest request) {
        /*return request.getHeader(CatHttpConstants.CAT_HTTP_HEADER_ROOT_MESSAGE_ID) != null &&
                request.getHeader(CatHttpConstants.CAT_HTTP_HEADER_PARENT_MESSAGE_ID) != null &&
                request.getHeader(CatHttpConstants.CAT_HTTP_HEADER_CHILD_MESSAGE_ID) != null;*/
        return request.getHeader(Cat.Context.ROOT) == null &&
                request.getHeader(Cat.Context.PARENT) == null &&
                request.getHeader(Cat.Context.CHILD) == null;
    }
}

+ 26 - 0
common/commons-cat/src/main/java/com/yihu/jw/ehr/cat/config/CatFilterConfig.java

@ -0,0 +1,26 @@
package com.yihu.jw.ehr.cat.config;
import com.yihu.jw.ehr.cat.CatServletFilter;
import org.springframework.boot.web.servlet.FilterRegistrationBean;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
/**
 * Config - 配置CAT过滤路径
 * Created by progr1mmer on 2018/9/4.
 */
@Configuration
public class CatFilterConfig {
    @Bean
    public FilterRegistrationBean catFilter() {
        FilterRegistrationBean registration = new FilterRegistrationBean();
        CatServletFilter filter = new CatServletFilter();
        registration.setFilter(filter);
        registration.addUrlPatterns("/*");
        registration.setName("catFilter");
        registration.setOrder(1);
        return registration;
    }
}

+ 31 - 0
common/commons-cat/src/main/java/com/yihu/jw/ehr/cat/config/FeignOkHttpConfig.java

@ -0,0 +1,31 @@
package com.yihu.jw.ehr.cat.config;
import com.yihu.jw.ehr.cat.CatOkHttpInterceptor;
import feign.Feign;
import okhttp3.ConnectionPool;
import org.springframework.boot.autoconfigure.condition.ConditionalOnClass;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import java.util.concurrent.TimeUnit;
/**
 * Config - 自定义OkHttpClient,添加拦截器
 * Created by progr1mmer on 2018/9/5.
 */
@Configuration
@ConditionalOnClass(Feign.class)
public class FeignOkHttpConfig {
    @Bean
    public okhttp3.OkHttpClient okHttpClient(){
        return new okhttp3.OkHttpClient.Builder()
                .readTimeout(10000, TimeUnit.SECONDS)
                .connectTimeout(10000, TimeUnit.SECONDS)
                .writeTimeout(10000, TimeUnit.SECONDS)
                .connectionPool(new ConnectionPool())
                .addInterceptor(new CatOkHttpInterceptor())
                .build();
    }
}

+ 57 - 0
common/commons-data-elasticsearch/pom.xml

@ -0,0 +1,57 @@
<?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.0.0"
         xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
         xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
    <modelVersion>4.0.0</modelVersion>
    <parent>
        <groupId>com.yihu.jw</groupId>
        <artifactId>wlyy-parent-pom</artifactId>
        <version>2.4.0</version>
        <relativePath>../../wlyy-lib-parent-pom/pom.xml</relativePath>
    </parent>
    <artifactId>commons-data-elasticsearch</artifactId>
    <packaging>jar</packaging>
    <dependencies>
        <!-- true -->
        <dependency>
            <groupId>org.springframework</groupId>
            <artifactId>spring-web</artifactId>
        </dependency>
        <dependency>
            <groupId>org.springframework.boot</groupId>
            <artifactId>spring-boot</artifactId>
        </dependency>
        <!-- ElasticSearch -->
        <dependency>
            <groupId>org.elasticsearch</groupId>
            <artifactId>elasticsearch</artifactId>
        </dependency>
        <dependency>
            <groupId>org.nlpcn</groupId>
            <artifactId>elasticsearch-sql</artifactId>
            <version>${version.elasticsearch-sql}</version>
        </dependency>
        <!-- ElasticSearch -->
        <dependency>
            <groupId>org.slf4j</groupId>
            <artifactId>slf4j-api</artifactId>
        </dependency>
        <dependency>
            <groupId>org.springframework.data</groupId>
            <artifactId>spring-data-commons</artifactId>
            <scope>compile</scope>
        </dependency>
    </dependencies>
    <build>
        <plugins>
            <plugin>
                <groupId>org.apache.maven.plugins</groupId>
                <artifactId>maven-compiler-plugin</artifactId>
            </plugin>
        </plugins>
    </build>
</project>

+ 78 - 0
common/commons-data-elasticsearch/src/main/java/com/yihu/jw/ehr/elasticsearch/ElasticSearchPool.java

@ -0,0 +1,78 @@
package com.yihu.jw.ehr.elasticsearch;
import com.alibaba.druid.pool.DruidDataSource;
import com.alibaba.druid.pool.ElasticSearchDruidDataSourceFactory;
import com.yihu.jw.ehr.elasticsearch.config.ElasticSearchConfig;
import org.elasticsearch.client.transport.TransportClient;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.config.ConfigurableBeanFactory;
import org.springframework.context.annotation.Scope;
import org.springframework.stereotype.Component;
import java.util.Properties;
/**
 * Created by progr1mmer on 2018/1/4.
 */
@Component
@Scope(ConfigurableBeanFactory.SCOPE_SINGLETON)
public class ElasticSearchPool {
    private static volatile TransportClient transportClient;
    @Autowired
    private ElasticSearchConfig elasticSearchConfig;
    private TransportClient getTransportClient() {
        /*Settings settings = Settings.builder()
                .put("cluster.name", elasticSearchConfig.getClusterName())
                .put("client.transport.sniff", false)
                .build();
        String[] nodeArr = elasticSearchConfig.getClusterNodes().split(",");
        InetSocketTransportAddress[] socketArr = new InetSocketTransportAddress[nodeArr.length];
        for (int i = 0; i < socketArr.length; i++) {
            if (!StringUtils.isEmpty(nodeArr[i])) {
                String[] nodeInfo = nodeArr[i].split(":");
                socketArr[i] = new InetSocketTransportAddress(new InetSocketAddress(nodeInfo[0], new Integer(nodeInfo[1])));
            }
        }
        return TransportClient.builder().settings(settings).build().addTransportAddresses(socketArr);*/
        return null;
    }
    /**
     * 1.TransportClient本身支持多线程的数据请求
     * 2.移除多个TransportClient的线程池支持,减少Socket链接
     * 3.基于多重检查的单例模式,兼顾安全和效率
     * 4.为提高效率,使用完毕后请勿进行 transportClient.close() 的关闭操作
     * @return
     */
    public TransportClient getClient() {
        if (transportClient != null) {
            if (transportClient.connectedNodes().isEmpty()) {
                synchronized (TransportClient.class) {
                    if (transportClient.connectedNodes().isEmpty()) {
                        transportClient = getTransportClient();
                    }
                }
            }
            return transportClient;
        }
        synchronized (TransportClient.class) {
            if (null == transportClient) {
                transportClient = getTransportClient();
            }
        }
        return transportClient;
    }
    public DruidDataSource getDruidDataSource() throws Exception {
        Properties properties = new Properties();
        properties.put("url", "jdbc:elasticsearch://" + elasticSearchConfig.getClusterNodes() + "/");
        DruidDataSource druidDataSource = (DruidDataSource) ElasticSearchDruidDataSourceFactory
                .createDataSource(properties);
        druidDataSource.setInitialSize(1);
        return druidDataSource;
    }
}

+ 781 - 0
common/commons-data-elasticsearch/src/main/java/com/yihu/jw/ehr/elasticsearch/ElasticSearchUtil.java

@ -0,0 +1,781 @@
package com.yihu.jw.ehr.elasticsearch;
import com.alibaba.druid.pool.DruidDataSource;
import org.elasticsearch.action.admin.indices.create.CreateIndexRequestBuilder;
import org.elasticsearch.action.admin.indices.delete.DeleteIndexRequestBuilder;
import org.elasticsearch.action.bulk.BulkRequestBuilder;
import org.elasticsearch.action.get.GetRequest;
import org.elasticsearch.action.get.GetResponse;
import org.elasticsearch.action.index.IndexResponse;
import org.elasticsearch.action.search.SearchRequestBuilder;
import org.elasticsearch.action.search.SearchResponse;
import org.elasticsearch.action.search.SearchType;
import org.elasticsearch.client.transport.TransportClient;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.index.engine.DocumentMissingException;
import org.elasticsearch.index.query.*;
import org.elasticsearch.search.SearchHit;
import org.elasticsearch.search.SearchHits;
import org.elasticsearch.search.aggregations.AbstractAggregationBuilder;
import org.elasticsearch.search.aggregations.AggregationBuilders;
import org.elasticsearch.search.aggregations.bucket.histogram.DateHistogramAggregationBuilder;
import org.elasticsearch.search.aggregations.bucket.histogram.DateHistogramInterval;
import org.elasticsearch.search.aggregations.bucket.histogram.ExtendedBounds;
import org.elasticsearch.search.aggregations.bucket.histogram.Histogram;
import org.elasticsearch.search.aggregations.bucket.terms.Terms;
import org.elasticsearch.search.aggregations.bucket.terms.TermsAggregationBuilder;
import org.elasticsearch.search.aggregations.metrics.CardinalityAggregationBuilder;
import org.elasticsearch.search.aggregations.metrics.InternalCardinality;
import org.elasticsearch.search.aggregations.metrics.Sum;
import org.elasticsearch.search.aggregations.metrics.SumAggregationBuilder;
import org.elasticsearch.search.sort.FieldSortBuilder;
import org.elasticsearch.search.sort.SortBuilder;
import org.elasticsearch.search.sort.SortOrder;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.config.ConfigurableBeanFactory;
import org.springframework.context.annotation.Scope;
import org.springframework.data.domain.Page;
import org.springframework.data.domain.PageImpl;
import org.springframework.data.domain.PageRequest;
import org.springframework.stereotype.Service;
import org.springframework.util.StringUtils;
import java.io.IOException;
import java.sql.Connection;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.text.ParseException;
import java.util.*;
/**
 * Util - Es搜索服务
 * Created by progr1mmer on 2017/12/2.
 */
@Service
@Scope(ConfigurableBeanFactory.SCOPE_SINGLETON)
public class ElasticSearchUtil {
    @Autowired
    private ElasticSearchPool elasticSearchPool;
    /**
     * 创建映射
     *  注意:保存数据之前如果没有创建相应的字
     *  段映射会导致搜索结果不准确
     * @param index
     * @param type
     * @param source
     * @param setting - 该设置根据需要进行配置
     * @throws IOException
     */
    public void mapping (String index, String type, Map<String, Map<String, String>> source, Map<String, Object> setting) throws IOException{
        TransportClient transportClient = elasticSearchPool.getClient();
        XContentBuilder xContentBuilder = XContentFactory.jsonBuilder().startObject().startObject("properties");
        for (String field : source.keySet()) {
            xContentBuilder.startObject(field);
            Map<String, String> propsMap = source.get(field);
            for (String prop : propsMap.keySet()) {
                xContentBuilder.field(prop, propsMap.get(prop));
            }
            xContentBuilder.endObject();
        }
        xContentBuilder.endObject().endObject();
        CreateIndexRequestBuilder createIndexRequestBuilder = transportClient.admin().indices().prepareCreate(index);
        createIndexRequestBuilder.addMapping(type, xContentBuilder);
        /*Map<String, Object> settingSource = new HashMap<>();
        settingSource.put("index.translog.flush_threshold_size", "1g"); //log文件大小
        settingSource.put("index.translog.flush_threshold_ops", "100000"); //flush触发次数
        settingSource.put("index.translog.durability", "async"); //异步更新
        settingSource.put("index.refresh_interval", "30s"); //刷新间隔
        settingSource.put("index.number_of_replicas", 1); //副本数
        settingSource.put("index.number_of_shards", 3); //分片数
        createIndexRequestBuilder.setSettings(settingSource);*/
        if (setting != null && !setting.isEmpty()) {
            createIndexRequestBuilder.setSettings(setting);
        }
        createIndexRequestBuilder.get();
    }
    /**
     * 移除索引 - 整个移除
     * @param index
     */
    public void remove (String index){
        TransportClient transportClient = elasticSearchPool.getClient();
        DeleteIndexRequestBuilder deleteIndexRequestBuilder = transportClient.admin().indices().prepareDelete(index);
        deleteIndexRequestBuilder.get();
    }
    /**
     * 添加数据
     * @param index
     * @param type
     * @param source
     * @return
     * @throws ParseException
     */
    public Map<String, Object> index (String index, String type, Map<String, Object> source) throws ParseException{
        TransportClient transportClient = elasticSearchPool.getClient();
        String _id = (String) source.remove("_id");
        if (StringUtils.isEmpty(_id)) {
            IndexResponse response = transportClient.prepareIndex(index, type).setSource(source).get();
            source.put("_id", response.getId());
        } else {
            IndexResponse response = transportClient.prepareIndex(index, type, _id).setSource(source).get();
            source.put("_id", response.getId());
        }
        return source;
    }
    /**
     * 批量添加数据 - 效率高
     * @param index
     * @param type
     * @param source
     * @throws ParseException
     */
    public void bulkIndex (String index, String type, List<Map<String, Object>> source) throws ParseException{
        if (source.size() > 0) {
            TransportClient transportClient = elasticSearchPool.getClient();
            BulkRequestBuilder bulkRequestBuilder = transportClient.prepareBulk();
            source.forEach(item -> {
                String _id = (String) item.remove("_id");
                if (StringUtils.isEmpty(_id)) {
                    bulkRequestBuilder.add(transportClient.prepareIndex(index, type).setSource(item));
                } else {
                    bulkRequestBuilder.add(transportClient.prepareIndex(index, type, _id).setSource(item));
                }
            });
            bulkRequestBuilder.get();
        }
    }
    /**
     * 删除数据
     * @param index
     * @param type
     * @param id
     */
    public void delete (String index, String type, String id) {
        TransportClient transportClient = elasticSearchPool.getClient();
        transportClient.prepareDelete(index, type, id).get();
    }
    /**
     * 批量删除数据
     * @param index
     * @param type
     * @param idArr
     */
    public void bulkDelete (String index, String type, String [] idArr) {
        if (idArr.length > 0) {
            TransportClient transportClient = elasticSearchPool.getClient();
            BulkRequestBuilder bulkRequestBuilder = transportClient.prepareBulk();
            for (String id : idArr) {
                bulkRequestBuilder.add(transportClient.prepareDelete(index, type, id));
            }
            bulkRequestBuilder.get();
        }
    }
    /**
     * 根据字段批量删除数据
     * @param index
     * @param type
     * @param field
     * @param value
     */
    public void deleteByField(String index, String type, String field, Object value) {
        deleteByFilter(index, type, field + "=" + value);
    }
    /**
     * 根据条件批量删除数据
     * @param index
     * @param type
     * @param filters
     */
    public void deleteByFilter(String index, String type, String filters) {
        QueryBuilder queryBuilder = getQueryBuilder(filters);
        deleteByFilter(index, type, queryBuilder);
    }
    /**
     * 根据条件批量删除数据
     * @param index
     * @param type
     * @param queryBuilder
     */
    public void deleteByFilter(String index, String type, QueryBuilder queryBuilder) {
        long count = count(index, type, queryBuilder);
        long page = count/10000 == 0 ? 1 :count/10000 +1;
        for (long i =0;i<page;i++) {
            List<String> idList = getIds(index, type, queryBuilder);
            if (idList.size() > 0) {
                TransportClient transportClient = elasticSearchPool.getClient();
                String[] idArr = new String[idList.size()];
                idArr = idList.toArray(idArr);
                BulkRequestBuilder bulkRequestBuilder = transportClient.prepareBulk();
                for (String id : idArr) {
                    bulkRequestBuilder.add(transportClient.prepareDelete(index, type, id));
                }
                bulkRequestBuilder.get();
            }
        }
    }
    /**
     * 更新数据 - 返回最新文档
     * @param index
     * @param type
     * @param id
     * @param source
     * @return
     * @throws DocumentMissingException
     */
    public Map<String, Object> update(String index, String type, String id, Map<String, Object> source) throws DocumentMissingException {
        TransportClient transportClient = elasticSearchPool.getClient();
        source.remove("_id");
        transportClient.prepareUpdate(index, type, id).setDoc(source).setRetryOnConflict(5).get();
        return findById(index, type, id);
    }
    /**
     * 更新数据 - 不返回文档
     * @param index
     * @param type
     * @param id
     * @param source
     * @throws DocumentMissingException
     */
    public void voidUpdate (String index, String type, String id, Map<String, Object> source) throws DocumentMissingException {
        TransportClient transportClient = elasticSearchPool.getClient();
        source.remove("_id");
        transportClient.prepareUpdate(index, type, id).setDoc(source).setRetryOnConflict(5).get();
    }
    /**
     * 批量更新数据
     * @param index
     * @param type
     * @param source
     * @throws DocumentMissingException
     */
    public void bulkUpdate(String index, String type, List<Map<String, Object>> source) throws DocumentMissingException {
        if (source.size() > 0) {
            TransportClient transportClient = elasticSearchPool.getClient();
            BulkRequestBuilder bulkRequestBuilder = transportClient.prepareBulk();
            source.forEach(item -> {
                String _id = (String)item.remove("_id");
                if (!StringUtils.isEmpty(_id)) {
                    bulkRequestBuilder.add(transportClient.prepareUpdate(index, type, _id).setDoc(item).setRetryOnConflict(5));
                }
            });
            bulkRequestBuilder.get();
        }
    }
    /**
     * 根据ID查找数据
     * @param index
     * @param type
     * @param id
     * @return
     */
    public Map<String, Object> findById(String index, String type, String id) {
        TransportClient transportClient = elasticSearchPool.getClient();
        GetRequest getRequest = new GetRequest(index, type, id);
        GetResponse response = transportClient.get(getRequest).actionGet();
        Map<String, Object> source = response.getSource();
        if (source != null) {
            source.put("_id", response.getId());
        }
        return source;
    }
    /**
     * 根据字段查找数据
     * @param index
     * @param type
     * @param field
     * @param value
     * @return
     */
    public List<Map<String, Object>> findByField(String index, String type, String field, Object value) {
        return list(index, type, field + "=" + value);
    }
    /**
     * 获取文档列表
     * @param index
     * @param type
     * @param filters
     * @return
     */
    public List<Map<String, Object>> list(String index, String type, String filters) {
        QueryBuilder queryBuilder = getQueryBuilder(filters);
        return list(index, type, queryBuilder);
    }
    /**
     * 获取文档列表
     * @param index
     * @param type
     * @param queryBuilder
     * @return
     */
    public List<Map<String, Object>> list(String index, String type, QueryBuilder queryBuilder) {
        int size = (int)count(index, type, queryBuilder);
        SearchRequestBuilder builder = searchRequestBuilder(index, type, queryBuilder, null, 0, size);
        SearchResponse response = builder.get();
        SearchHits hits = response.getHits();
        List<Map<String, Object>> resultList = new ArrayList<Map<String, Object>>();
        for (SearchHit hit : hits.getHits()) {
            Map<String, Object> source = hit.getSourceAsMap();
            source.put("_id", hit.getId());
            resultList.add(source);
        }
        return resultList;
    }
    /**
     * 获取文档分页
     * @param index
     * @param type
     * @param filters
     * @param page
     * @param size
     * @return
     */
    public Page<Map<String, Object>> page(String index, String type, String filters, int page, int size) {
        return page(index, type, filters, null, page, size);
    }
    /**
     * 获取文档分页
     * @param index
     * @param type
     * @param filters
     * @param sorts
     * @param page
     * @param size
     * @return
     */
    public Page<Map<String, Object>> pageBySort(String index, String type, String filters, String sorts, int page, int size) {
        return page(index, type, filters, sorts, page, size);
    }
    /**
     * 获取分档分页 - 带分页功能
     * @param index
     * @param type
     * @param filters
     * @param sorts
     * @param page
     * @param size
     * @return
     */
    public Page<Map<String, Object>> page(String index, String type, String filters, String sorts, int page, int size) {
        QueryBuilder queryBuilder = getQueryBuilder(filters);
        List<SortBuilder> sortBuilders = getSortBuilder(sorts);
        return page(index, type, queryBuilder, sortBuilders, page, size);
    }
    /**
     * 获取分档分页 - 带分页功能
     * @param index
     * @param type
     * @param queryBuilder
     * @param sortBuilders
     * @param page
     * @param size
     * @return
     */
    public Page<Map<String, Object>> page(String index, String type, QueryBuilder queryBuilder, List<SortBuilder> sortBuilders, int page, int size) {
        SearchRequestBuilder builder = searchRequestBuilder(index, type, queryBuilder, sortBuilders, (page - 1) * size, size);
        SearchResponse response = builder.get();
        SearchHits hits = response.getHits();
        List<Map<String, Object>> resultList = new ArrayList<>();
        for (SearchHit hit : hits.getHits()) {
            Map<String, Object> source = hit.getSourceAsMap();
            source.put("_id", hit.getId());
            resultList.add(source);
        }
        return new PageImpl<>(resultList, PageRequest.of(page - 1, size), hits.getTotalHits().value);
    }
    /**
     * 获取ID列表
     * @param index
     * @param type
     * @param filters
     * @return
     */
    public List<String> getIds (String index, String type, String filters){
        QueryBuilder queryBuilder = getQueryBuilder(filters);
        return getIds(index, type, queryBuilder);
    }
    /**
     * 获取ID列表
     * @param index
     * @param type
     * @param queryBuilder
     * 最多只能一万条
     * @return
     */
    public List<String> getIds (String index, String type, QueryBuilder queryBuilder) {
        int size = (int)count(index, type, queryBuilder);
        size = size > 10000 ? 10000:size;
        SearchRequestBuilder builder = searchRequestBuilder(index, type, queryBuilder, null, 0, size);
        SearchResponse response = builder.get();
        SearchHits hits = response.getHits();
        List<String> resultList = new ArrayList<>();
        for (SearchHit hit : hits.getHits()) {
            resultList.add(hit.getId());
        }
        return resultList;
    }
    /**
     * 获取文档数
     * @param index
     * @param type
     * @param filters
     * @return
     */
    public long count(String index, String type, String filters) {
        QueryBuilder queryBuilder = getQueryBuilder(filters);
        return count(index, type, queryBuilder);
    }
    /**
     * 获取文档数
     * @param index
     * @param type
     * @param queryBuilder
     * @return
     */
    public long count(String index, String type, QueryBuilder queryBuilder) {
        SearchRequestBuilder builder = searchRequestBuilder(index, type, queryBuilder, null, null, null);
        return builder.get().getHits().getTotalHits().value;
    }
    /**
     * 根据SQL查找数据
     * @param field
     * @param sql
     * @return
     * @throws Exception
     */
    public List<Map<String, Object>> findBySql(List<String> field, String sql) throws Exception {
        List<Map<String, Object>> list = new ArrayList<>();
        DruidDataSource druidDataSource = null;
        Connection connection = null;
        PreparedStatement preparedStatement = null;
        ResultSet resultSet = null;
        try {
            druidDataSource = elasticSearchPool.getDruidDataSource();
            connection = druidDataSource.getConnection();
            preparedStatement = connection.prepareStatement(sql);
            resultSet = preparedStatement.executeQuery();
            while (resultSet.next()) {
                Map<String, Object> rowData = new HashMap<>();
                for (String _field : field) {
                    rowData.put(_field, resultSet.getObject(_field));
                }
                list.add(rowData);
            }
            return list;
        } catch (Exception e) {
            if (!"Error".equals(e.getMessage())){
                e.printStackTrace();
            }
            return new ArrayList<>();
        } finally {
            if (resultSet != null) {
                resultSet.close();
            }
            if (preparedStatement != null) {
                preparedStatement.close();
            }
            if (connection != null) {
                connection.close();
            }
            if (druidDataSource != null) {
                druidDataSource.close();
            }
        }
    }
    /**
     * 根据SQL查找数据
     * @param sql
     * @return
     * @throws Exception
     */
    public ResultSet findBySql(String sql) throws Exception {
        DruidDataSource druidDataSource = null;
        Connection connection = null;
        PreparedStatement preparedStatement = null;
        ResultSet resultSet = null;
        try {
            druidDataSource = elasticSearchPool.getDruidDataSource();
            connection = druidDataSource.getConnection();
            preparedStatement = connection.prepareStatement(sql);
            resultSet = preparedStatement.executeQuery();
            return resultSet;
        } finally {
            if (resultSet != null) {
                resultSet.close();
            }
            if (preparedStatement != null) {
                preparedStatement.close();
            }
            if (connection != null) {
                connection.close();
            }
            if (druidDataSource != null) {
                druidDataSource.close();
            }
        }
    }
    /**
     * 根据日期分组
     * @param index
     * @param type
     * @param filters
     * @param start
     * @param end
     * @param field
     * @param interval
     * @param format
     * @return
     */
    public Map<String, Long> dateHistogram(String index, String type, String filters, Date start, Date end, String field, DateHistogramInterval interval, String format) {
        QueryBuilder queryBuilder = getQueryBuilder(filters);
        SearchRequestBuilder builder = searchRequestBuilder(index, type, queryBuilder, null, 0, 0);
        DateHistogramAggregationBuilder dateHistogramBuilder = new DateHistogramAggregationBuilder(index + "-" + field);
        dateHistogramBuilder.field(field);
        dateHistogramBuilder.fixedInterval(interval);
        if (!StringUtils.isEmpty(format)) {
            dateHistogramBuilder.format(format);
        }
        dateHistogramBuilder.minDocCount(0);
        ExtendedBounds extendedBounds = new ExtendedBounds(start.getTime(), end.getTime());
        dateHistogramBuilder.extendedBounds(extendedBounds);
        builder.addAggregation(dateHistogramBuilder);
        SearchResponse response = builder.get();
        Histogram histogram = response.getAggregations().get(index + "-" + field);
        Map<String, Long> temp = new HashMap<>();
        histogram.getBuckets().forEach(item -> temp.put(item.getKeyAsString(), item.getDocCount()));
        return temp;
    }
    /**
     * 查询去重数量
     * @param index
     * @param type
     * @param filters
     * @param filed
     * @return
     */
    public int cardinality(String index, String type, String filters, String filed){
        QueryBuilder queryBuilder = getQueryBuilder(filters);
        SearchRequestBuilder builder = searchRequestBuilder(index, type, queryBuilder, null, 0, 0);
        CardinalityAggregationBuilder cardinality = AggregationBuilders.cardinality("cardinality").field(filed);
        builder.addAggregation(cardinality);
        SearchResponse response = builder.get();
        InternalCardinality internalCard = response.getAggregations().get("cardinality");
        return new Double(internalCard.getProperty("value").toString()).intValue();
    }
    /**
     * 分组统计
     * @param index
     * @param type
     * @param filters
     * @param groupField
     * @return
     */
    public Map<String, Long> countByGroup(String index, String type, String filters, String groupField) {
        QueryBuilder queryBuilder = getQueryBuilder(filters);
        SearchRequestBuilder builder = searchRequestBuilder(index, type, queryBuilder, null, null, null);
        AbstractAggregationBuilder aggregation = AggregationBuilders.terms("count").field(groupField);
        builder.addAggregation(aggregation);
        SearchResponse response = builder.get();
        Terms terms = response.getAggregations().get("count");
        List<Terms.Bucket> buckets = (List<Terms.Bucket>) terms.getBuckets();
        Map<String, Long> groupMap = new HashMap<>();
        for (Terms.Bucket bucket : buckets) {
            //System.out.println(bucket.getKey()+"----"+bucket.getDocCount());
            groupMap.put(bucket.getKey().toString(), bucket.getDocCount());
        }
        return groupMap;
    }
    /**
     * 分组求和
     * @param index
     * @param type
     * @param filters
     * @param sumField
     * @param groupField
     * @return
     */
    public Map<String, Double> sumByGroup(String index, String type, String filters, String sumField, String groupField) {
        QueryBuilder queryBuilder = getQueryBuilder(filters);
        SearchRequestBuilder builder = searchRequestBuilder(index, type, queryBuilder, null, null, null);
        TermsAggregationBuilder aggregation = AggregationBuilders.terms("sum_query").field(groupField);
        SumAggregationBuilder sumBuilder= AggregationBuilders.sum("sum_row").field(sumField);
        aggregation.subAggregation(sumBuilder);
        builder.addAggregation(aggregation);
        SearchResponse response = builder.get();
        Terms terms = response.getAggregations().get("sum_query");
        List<Terms.Bucket> buckets = (List<Terms.Bucket>) terms.getBuckets();
        Map<String, Double> groupMap = new HashMap<>();
        for (Terms.Bucket bucket : buckets){
            Sum sum2 = bucket.getAggregations().get("sum_row");
            groupMap.put(bucket.getKey().toString(), sum2.getValue());
        }
        return groupMap;
    }
    /**
     * 获取基础请求生成器
     * @param index
     * @param type
     * @param queryBuilder
     * @param sortBuilders
     * @return
     */
    public SearchRequestBuilder searchRequestBuilder(String index, String type, QueryBuilder queryBuilder, List<SortBuilder> sortBuilders, Integer from, Integer size) {
        TransportClient transportClient = elasticSearchPool.getClient();
        SearchRequestBuilder builder = transportClient.prepareSearch(index);
        builder.setTypes(type);
        builder.setSearchType(SearchType.DFS_QUERY_THEN_FETCH);
        builder.setQuery(queryBuilder);
        builder.setExplain(true);
        if (sortBuilders != null) {
            sortBuilders.forEach(item -> builder.addSort(item));
        }
        if (from != null) {
            builder.setFrom(from);
        }
        if (size != null) {
            builder.setSize(size);
        }
        return builder;
    }
    /**
     * 排序语句转换
     * @param sorts
     * @return
     */
    public List<SortBuilder> getSortBuilder(String sorts) {
        List<SortBuilder> sortBuilderList = new ArrayList<>();
        if (StringUtils.isEmpty(sorts)) {
            return sortBuilderList;
        }
        String [] sortArr = sorts.split(";");
        for (String sort : sortArr) {
            String operator = sort.substring(0, 1);
            SortBuilder sortBuilder = new FieldSortBuilder(sort.substring(1));
            if ("-".equalsIgnoreCase(operator.trim())) {
                sortBuilder.order(SortOrder.DESC);
            } else if ("+".equalsIgnoreCase(operator.trim())) {
                sortBuilder.order(SortOrder.ASC);
            } else {
                sortBuilder.order(SortOrder.DESC);
            }
            sortBuilderList.add(sortBuilder);
        }
        return sortBuilderList;
    }
    /**
     * 查询语句转换
     * @param filters
     * @return
     */
    public QueryBuilder getQueryBuilder(String filters) {
        BoolQueryBuilder boolQueryBuilder = QueryBuilders.boolQuery();
        if (StringUtils.isEmpty(filters)) {
            return boolQueryBuilder;
        }
        String [] filterArr = filters.split(";");
        for (String filter : filterArr) {
            if (filter.contains("||")){
                String [] fields = filter.split("\\|\\|");
                BoolQueryBuilder queryBuilder = QueryBuilders.boolQuery();
                for (String filed : fields) {
                    String [] condition = filed.split("=");
                    if ("null".equals(condition[1])) {
                        condition[1] ="";
                    }
                    queryBuilder.should(QueryBuilders.termQuery(condition[0], condition[1]));
                }
                boolQueryBuilder.must(queryBuilder);
            } else if (filter.contains("?")) {
                String [] condition = filter.split("\\?");
                if ("null".equals(condition[1])) {
                    condition[1] ="";
                }
                MatchPhraseQueryBuilder matchQueryBuilder = QueryBuilders.matchPhraseQuery(condition[0], condition[1]);
                boolQueryBuilder.must(matchQueryBuilder);
            } else if (filter.contains("<>")) {
                String [] condition = filter.split("<>");
                if (condition[1].contains(",")) {
                    String [] inCondition = condition[1].split(",");
                    TermsQueryBuilder termsQueryBuilder = QueryBuilders.termsQuery(condition[0], inCondition);
                    boolQueryBuilder.mustNot(termsQueryBuilder);
                } else {
                    if ("null".equals(condition[1])) {
                        condition[1] ="";
                    }
                    TermQueryBuilder termQueryBuilder = QueryBuilders.termQuery(condition[0], condition[1]);
                    boolQueryBuilder.mustNot(termQueryBuilder);
                }
            } else if (filter.contains(">=")) {
                String [] condition = filter.split(">=");
                RangeQueryBuilder rangeQueryBuilder = QueryBuilders.rangeQuery(condition[0]);
                rangeQueryBuilder.gte(condition[1]);
                boolQueryBuilder.must(rangeQueryBuilder);
            } else if (filter.contains(">")) {
                String [] condition = filter.split(">");
                RangeQueryBuilder rangeQueryBuilder = QueryBuilders.rangeQuery(condition[0]);
                rangeQueryBuilder.gt(condition[1]);
                boolQueryBuilder.must(rangeQueryBuilder);
            } else if (filter.contains("<=")) {
                String [] condition = filter.split("<=");
                RangeQueryBuilder rangeQueryBuilder = QueryBuilders.rangeQuery(condition[0]);
                rangeQueryBuilder.lte(condition[1]);
                boolQueryBuilder.must(rangeQueryBuilder);
            } else if (filter.contains("<")) {
                String [] condition = filter.split("<");
                RangeQueryBuilder rangeQueryBuilder = QueryBuilders.rangeQuery(condition[0]);
                rangeQueryBuilder.lt(condition[1]);
                boolQueryBuilder.must(rangeQueryBuilder);
            } else if (filter.contains("=")) {
                String [] condition = filter.split("=");
                if (condition[1].contains(",")) {
                    String [] inCondition = condition[1].split(",");
                    TermsQueryBuilder termsQueryBuilder = QueryBuilders.termsQuery(condition[0], inCondition);
                    boolQueryBuilder.must(termsQueryBuilder);
                } else {
                    if ("null".equals(condition[1])) {
                        condition[1] = "";
                    }
                    TermQueryBuilder termQueryBuilder = QueryBuilders.termQuery(condition[0], condition[1]);
                    boolQueryBuilder.must(termQueryBuilder);
                }
            }
        }
        return boolQueryBuilder;
    }
}

+ 45 - 0
common/commons-data-elasticsearch/src/main/java/com/yihu/jw/ehr/elasticsearch/config/ElasticSearchConfig.java

@ -0,0 +1,45 @@
package com.yihu.jw.ehr.elasticsearch.config;
import org.springframework.boot.context.properties.ConfigurationProperties;
import org.springframework.context.annotation.Configuration;
import javax.annotation.PostConstruct;
/**
 * Created by progr1mmer on 2017/12/1.
 */
@ConfigurationProperties(prefix = "elasticsearch")
@Configuration
public class ElasticSearchConfig {
    // 集群名称
    private String clusterName;
    // 节点
    private String clusterNodes;
    public String getClusterName() {
        return clusterName;
    }
    public void setClusterName(String clusterName) {
        this.clusterName = clusterName;
    }
    public String getClusterNodes() {
        return clusterNodes;
    }
    public void setClusterNodes(String clusterNodes) {
        this.clusterNodes = clusterNodes;
    }
    @PostConstruct
    private void configInfo() {
        StringBuilder info = new StringBuilder("{");
        info.append("\n  elasticsearch.cluster-name = " + clusterName);
        info.append("\n  elasticsearch.cluster-nodes = " + clusterNodes);
        info.append("\n}");
        System.out.println("Elasticsearch.configInfo : " + info.toString());
    }
}

+ 46 - 0
common/commons-data-fastdfs/pom.xml

@ -0,0 +1,46 @@
<?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.0.0"
         xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
         xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
    <modelVersion>4.0.0</modelVersion>
    <parent>
        <groupId>com.yihu.jw</groupId>
        <artifactId>wlyy-parent-pom</artifactId>
        <version>2.4.0</version>
        <relativePath>../../wlyy-lib-parent-pom/pom.xml</relativePath>
    </parent>
    <artifactId>commons-data-fastdfs</artifactId>
    <packaging>jar</packaging>
    <dependencies>
        <!-- true -->
        <dependency>
            <groupId>com.fasterxml.jackson.core</groupId>
            <artifactId>jackson-databind</artifactId>
        </dependency>
        <dependency>
            <groupId>org.springframework</groupId>
            <artifactId>spring-context</artifactId>
        </dependency>
        <dependency>
            <groupId>org.springframework.boot</groupId>
            <artifactId>spring-boot</artifactId>
        </dependency>
        <dependency>
            <groupId>org.csource</groupId>
            <artifactId>fastdfs-client-java</artifactId>
            <version>${version.fastdfs}</version>
        </dependency>
    </dependencies>
    <build>
        <plugins>
            <plugin>
                <groupId>org.apache.maven.plugins</groupId>
                <artifactId>maven-compiler-plugin</artifactId>
            </plugin>
        </plugins>
    </build>
</project>

+ 53 - 0
common/commons-data-fastdfs/src/main/java/com/yihu/jw/ehr/fastdfs/FastDFSPoolEhr.java

@ -0,0 +1,53 @@
package com.yihu.jw.ehr.fastdfs;
import com.yihu.jw.ehr.fastdfs.config.FastDFSConfigEhr;
import org.csource.fastdfs.StorageClient;
import org.csource.fastdfs.TrackerClient;
import org.csource.fastdfs.TrackerServer;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.config.ConfigurableBeanFactory;
import org.springframework.context.annotation.Scope;
import org.springframework.stereotype.Component;
import java.io.IOException;
import java.util.*;
/**
 * Created by szx on 2015/9/19.
 */
@Component
@Scope(ConfigurableBeanFactory.SCOPE_SINGLETON)
public class FastDFSPoolEhr {
    private Map<Integer, TrackerServer> trackerServerMap = new HashMap<>();
    private List<StorageClient> storageClientPool = new ArrayList<>();
    @Autowired
    private FastDFSConfigEhr fastDFSConfigEhr;
    public synchronized StorageClient getStorageClient() throws IOException {
        //ProtoCommon.activeTest(socket);
        if (storageClientPool.isEmpty()) {
            TrackerClient tracker = new TrackerClient();
            TrackerServer trackerServer = tracker.getConnection();
            StorageClient storageClient = new StorageClient(trackerServer, null);
            trackerServerMap.put(storageClient.hashCode(), trackerServer);
            return storageClient;
        }
        int lastIndex = storageClientPool.size() - 1;
        return storageClientPool.remove(lastIndex);
    }
    public synchronized void releaseStorageClient(StorageClient storageClient) throws IOException {
        if (storageClient != null) {
            if (storageClientPool.size() > fastDFSConfigEhr.getPool().getMaxSize()) {
                TrackerServer trackerServer = trackerServerMap.remove(storageClient.hashCode());
                if (trackerServer != null) {
                    trackerServer.close();
                }
            } else {
                storageClientPool.add(0, storageClient);
            }
        }
    }
}

+ 357 - 0
common/commons-data-fastdfs/src/main/java/com/yihu/jw/ehr/fastdfs/FastDFSUtilEhr.java

@ -0,0 +1,357 @@
package com.yihu.jw.ehr.fastdfs;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.node.ObjectNode;
import com.yihu.jw.ehr.fastdfs.config.FastDFSConfigEhr;
import org.csource.common.MyException;
import org.csource.common.NameValuePair;
import org.csource.fastdfs.*;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Component;
import org.springframework.util.StringUtils;
import java.io.BufferedInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.security.NoSuchAlgorithmException;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
/**
 * FastDFS 客户端工具.
 *
 * 作为Bean方式来调用。
 *
 * @author szx
 * @author Sand
 */
@Component
public class FastDFSUtilEhr {
    public final static String GROUP_NAME = "groupName";
    public final static String REMOTE_FILE_NAME = "remoteFileName";
    public final static String FILE_ID = "fileId";
    public final static String FILE_URL = "fileUrl";
    public final static String FILE_SIZE = "fileSize";
    @Autowired
    private FastDFSPoolEhr pool;
    @Autowired
    private FastDFSConfigEhr fastDFSConfigEhr;
    public ObjectNode upload(InputStream in, String fileExtension, String description) throws IOException, MyException, NoSuchAlgorithmException{
        NameValuePair[] fileMetaData = new NameValuePair[1];
        fileMetaData[0] = new NameValuePair("description", description == null ? "" : description);
        return upload(in, fileExtension, fileMetaData);
    }
    public ObjectNode upload(InputStream in, String fileExtension, NameValuePair[] fileMetaData) throws IOException, MyException, NoSuchAlgorithmException{
        return upload(null, in, fileExtension, fileMetaData);
    }
    /**
     * 以输入流的方式上传文件
     * InputStream in = new FileInputStream("C://Desert.jpg");
     * ObjectNode msg = FileUtil.upload(in,"jpg", "沙漠");
     * in.close();
     *
     * @param _groupName    分组名
     * @param in            输入流
     * @param fileExtension  文件扩展名,不要带“.”
     * @param fileMetaData   文件名称(中文)
     * @return 返回值的格式如下:
     * {
     * "groupName": "healthArchiveGroup",
     * "remoteFileName": "/M00/00/24/rBFuH1XdQC6AP3CDAAzodQCbVVc052.jpg",
     * "fid": "group1/M00/00/24/rBFuH1XdQC6AP3CDAAzodQCbVVc052.jpg",
     * "fileURL": "http://172.19.103.13/healthArchiveGroup/M00/00/24/rBFuH1XdQC6AP3CDAAzodQCbVVc052.jpg"
     * }
     * <p>
     * groupName 及 remoteFileName 可以用于查询在 fastDFS 中文件的信息,如果只是图片显示,可以忽略这两个值。
     * fid 保存了在 fastDFS 上的完整路径,为了避免将来服务器域名发生变更,最好使用本值.服务器的域名另外配置。
     * fileURL 保存了完整的 web 访问路径,为了避免将来服务器域名发生变更,最好不要直接使用本值。
     * 如果需要在下载时,可以显示原始文件名,请在访问file_url时,增加 attname 参数,如:
     * <p>
     * http://host/healthArchiveGroup/M00/00/00/rBFuH1XdIseAUTZZAA1rIuRd3Es062.jpg?attname=a.jpg
     * @throws Exception
     */
    public ObjectNode upload(String _groupName, InputStream in, String fileExtension, NameValuePair[] fileMetaData) throws IOException, MyException, NoSuchAlgorithmException{
        StorageClient client = pool.getStorageClient();
        BufferedInputStream bufferedInputStream = null;
        try {
            ObjectNode message = new ObjectMapper().createObjectNode();
            byte fileBuffer[] = new byte[in.available()];
            bufferedInputStream = new BufferedInputStream(in);
            bufferedInputStream.read(fileBuffer);
            message.put(FILE_SIZE, fileBuffer.length);
            String [] results;
            if (!StringUtils.isEmpty(_groupName)) {
                results = client.upload_file(_groupName, fileBuffer, fileExtension, fileMetaData);
            } else {
                results = client.upload_file(fileBuffer, fileExtension, fileMetaData);
            }
            if (results != null) {
                String groupName = results[0];
                String remoteFile = results[1];
                message.put(GROUP_NAME, groupName);
                message.put(REMOTE_FILE_NAME, remoteFile);
                String fileId = groupName + StorageClient1.SPLIT_GROUP_NAME_AND_FILENAME_SEPERATOR + remoteFile;
                message.put(FILE_ID, fileId);
                String fileURl = fastDFSConfigEhr.getPublicServer() + "/" + fileId;
                if (ClientGlobal.g_anti_steal_token) {
                    int ts = (int) (System.currentTimeMillis() / 1000);
                    String token = ProtoCommon.getToken(fileId, ts, ClientGlobal.g_secret_key);
                    fileURl += "?token=" + token + "&ts=" + ts;
                }
                message.put(FILE_URL, fileURl);
            }
            return message;
        } finally {
            pool.releaseStorageClient(client);
            if (bufferedInputStream != null) {
                bufferedInputStream.close();
            }
        }
    }
    /**
     * 上传本地文件
     * ObjectNode  a = FileUtil.upload("C://Desert.jpg", "沙漠");
     * System.out.println(a.toString());
     *
     * @param filePath    本地文件的绝对路径,如 C://Desert.jpg
     * @param description 文件备注, 可以为空
     * @return {"groupName":"group1","remoteFileName":"/M00/00/24/rBFuH1XdQC6AP3CDAAzodQCbVVc052.jpg"
     * {
     * "groupName": "healthArchiveGroup",
     * "remoteFileName": "/M00/00/24/rBFuH1XdQC6AP3CDAAzodQCbVVc052.jpg",
     * "fid": "group1/M00/00/24/rBFuH1XdQC6AP3CDAAzodQCbVVc052.jpg",
     * "fileURL": "http://172.19.103.13/healthArchiveGroup/M00/00/24/rBFuH1XdQC6AP3CDAAzodQCbVVc052.jpg"
     * }
     * <p>
     * groupName 及 remoteFileName 可以用于查询在 fastDFS 中文件的信息,如果只是图片显示,可以忽略这两个值。
     * fid 保存了在 fastDFS 上的完整路径,为了避免将来服务器域名发生变更,最好使用本值.服务器的域名另外配置。
     * fileURL 保存了完整的 web 访问路径,为了避免将来服务器域名发生变更,最好不要直接使用本值。
     * 如果需要在下载时,可以显示原始文件名,请在访问file_url时,增加 attname 参数,如:
     * <p>
     * http://host/healthArchiveGroup/M00/00/00/rBFuH1XdIseAUTZZAA1rIuRd3Es062.jpg?attname=a.jpg
     * @throws Exception
     */
    public ObjectNode upload(String filePath, String description) throws IOException, MyException, NoSuchAlgorithmException {
        StorageClient client = pool.getStorageClient();
        try {
            NameValuePair[] fileMetaData = new NameValuePair[1];
            fileMetaData[0] = new NameValuePair("description", description == null ? "" : description);
            // ObjectMapper objectMapper = SpringContext.getService(ObjectMapper.class);
            ObjectNode message = new ObjectMapper().createObjectNode();
            String fileExtension;
            if (filePath.contains(".")) {
                fileExtension = filePath.substring(filePath.lastIndexOf(".") + 1);
            } else {
                throw new RuntimeException("上传失败, 文件缺失扩展名.");
            }
            String[] results = client.upload_file(filePath, fileExtension, fileMetaData);
            if (results != null) {
                String groupName = results[0];
                String remoteFileName = results[1];
                message.put(GROUP_NAME, groupName);
                message.put(REMOTE_FILE_NAME, remoteFileName);
                String fileId = groupName + StorageClient1.SPLIT_GROUP_NAME_AND_FILENAME_SEPERATOR + remoteFileName;
                message.put(FILE_ID, fileId);
                String fileURl = fastDFSConfigEhr.getPublicServer() + "/" + fileId;
                if (ClientGlobal.g_anti_steal_token) {
                    int ts = (int) (System.currentTimeMillis() / 1000);
                    String token = ProtoCommon.getToken(fileId, ts, ClientGlobal.g_secret_key);
                    fileURl += "?token=" + token + "&ts=" + ts;
                }
                message.put(FILE_URL, fileURl);
            }
            return message;
        } finally {
            pool.releaseStorageClient(client);
        }
    }
    /**
     * 上传文件
     * @param group_name
     * @param master_filename
     * @param prefix_name
     * @param file_buff
     * @param file_ext_name
     * @param meta_list
     * @return
     * @throws IOException
     * @throws MyException
     * @throws NoSuchAlgorithmException
     */
    public ObjectNode upload(String group_name, String master_filename, String prefix_name, byte [] file_buff, String file_ext_name, NameValuePair[] meta_list) throws IOException, MyException, NoSuchAlgorithmException{
        StorageClient client = pool.getStorageClient();
        try {
            ObjectNode message = new ObjectMapper().createObjectNode();
            message.put(FILE_SIZE, file_buff.length);
            String [] results = client.upload_file(group_name, master_filename, prefix_name, file_buff, file_ext_name, meta_list);
            if (results != null) {
                String groupName = results[0];
                String remoteFile = results[1];
                message.put(GROUP_NAME, groupName);
                message.put(REMOTE_FILE_NAME, remoteFile);
                String fileId = groupName + StorageClient1.SPLIT_GROUP_NAME_AND_FILENAME_SEPERATOR + remoteFile;
                message.put(FILE_ID, fileId);
                String fileURl = fastDFSConfigEhr.getPublicServer() + "/" + fileId;
                if (ClientGlobal.g_anti_steal_token) {
                    int ts = (int) (System.currentTimeMillis() / 1000);
                    String token = ProtoCommon.getToken(fileId, ts, ClientGlobal.g_secret_key);
                    fileURl += "?token=" + token + "&ts=" + ts;
                }
                message.put(FILE_URL, fileURl);
            }
            return message;
        } finally {
            pool.releaseStorageClient(client);
        }
    }
    /**
     * 获取文件信息
     * @param groupName
     * @param remoteFileName
     * @return
     * @throws IOException
     * @throws MyException
     */
    public FileInfo getFileInfo(String groupName, String remoteFileName) throws IOException, MyException{
        StorageClient client = pool.getStorageClient();
        try {
            return client.get_file_info(groupName, remoteFileName);
        } finally {
            pool.releaseStorageClient(client);
        }
    }
    /**
     * 获取文件元信息
     * @param groupName
     * @param remoteFileName
     * @return
     * @throws IOException
     * @throws MyException
     */
    public NameValuePair[] getMetadata(String groupName, String remoteFileName) throws IOException, MyException{
        StorageClient client = pool.getStorageClient();
        try {
            return client.get_metadata(groupName, remoteFileName);
        } finally {
            pool.releaseStorageClient(client);
        }
    }
    /**
     * 下载文件, 返回文件字节数组.
     *
     * @param groupName      在fastdfs上的卷名
     * @param remoteFileName 在fastdfs上的路径
     * @return 文件的字节码
     * @throws Exception
     */
    public byte [] download(String groupName, String remoteFileName) throws IOException, MyException {
        StorageClient client = pool.getStorageClient();
        try {
            return client.download_file(groupName, remoteFileName);
        } finally {
            pool.releaseStorageClient(client);
        }
    }
    /**
     * 下载文件到本地路径上.
     *
     * @param groupName      在 fastDFS 上的卷名
     * @param remoteFileName 在 fastDFS 上的路径
     * @param localPath      本地路径
     * @return 是否下载成功
     */
    public String download(String groupName, String remoteFileName, String localPath) throws IOException, MyException {
        StorageClient client = pool.getStorageClient();
        try {
            String localFileName = localPath + remoteFileName.replaceAll("/", "_");
            client.download_file(groupName, remoteFileName, 0, 0, localFileName);
            return localFileName;
        } finally {
            pool.releaseStorageClient(client);
        }
    }
    /**
     * 删除文件。
     *
     * @param groupName
     * @param remoteFileName
     */
    public void delete(String groupName, String remoteFileName) throws IOException, MyException {
        StorageClient client = pool.getStorageClient();
        try {
            client.delete_file(groupName, remoteFileName);
        } finally {
            pool.releaseStorageClient(client);
        }
    }
    /**
     * 获取服务器信息
     * @return
     * @throws IOException
     */
    public List<Map<String, Object>> status() throws IOException {
        TrackerGroup trackerGroup = ClientGlobal.getG_tracker_group();
        int totalServer = trackerGroup.tracker_servers.length;
        List<Map<String, Object>> resultList = new ArrayList<>(totalServer + 1);
        long totalMb  = 0;
        long freeMb = 0;
        long fileCount = 0;
        TrackerClient trackerClient = new TrackerClient();
        for (int i = 0; i < trackerGroup.tracker_servers.length; i++) {
            TrackerServer trackerServer = null;
            try {
                trackerServer = trackerGroup.getConnection(i);
                StructGroupStat[] structGroupStats = trackerClient.listGroups(trackerServer);
                for (StructGroupStat structGroupStat : structGroupStats) {
                    String groupName = structGroupStat.getGroupName();
                    Map<String, Object> resultMap = new HashMap<>();
                    resultMap.put("server", groupName);
                    StructStorageStat [] structStorageStats = trackerClient.listStorages(trackerServer, groupName);
                    long totalUpload = 0;
                    long totalDelete = 0;
                    for (StructStorageStat structStorageStat : structStorageStats) {
                        totalUpload += structStorageStat.getSuccessUploadCount();
                        totalDelete += structStorageStat.getSuccessDeleteCount();
                    }
                    fileCount += (totalUpload - totalDelete);
                    long singleTotalMb = structGroupStat.getTotalMB();
                    totalMb += singleTotalMb;
                    long singleFreeMb = structGroupStat.getFreeMB();
                    freeMb += singleFreeMb;
                    resultMap.put("total", singleTotalMb / 1024);
                    resultMap.put("free", singleFreeMb / 1024);
                    resultMap.put("fileCount", totalUpload - totalDelete);
                    resultList.add(resultMap);
                }
            } finally {
                if (null != trackerServer) {
                    trackerServer.close();
                }
            }
            break;
        }
        Map<String, Object> resultMap = new HashMap<>();
        resultMap.put("server", "all");
        resultMap.put("total", totalMb/1024);
        resultMap.put("free", freeMb/1024);
        resultMap.put("fileCount", fileCount);
        resultList.add(resultMap);
        return resultList;
    }
}

+ 192 - 0
common/commons-data-fastdfs/src/main/java/com/yihu/jw/ehr/fastdfs/config/FastDFSConfigEhr.java

@ -0,0 +1,192 @@
package com.yihu.jw.ehr.fastdfs.config;
import org.csource.common.MyException;
import org.csource.fastdfs.ClientGlobal;
import org.csource.fastdfs.TrackerGroup;
import org.springframework.boot.context.properties.ConfigurationProperties;
import org.springframework.context.annotation.Configuration;
import javax.annotation.PostConstruct;
import java.net.InetSocketAddress;
/**
 * @author Sand
 * @version 1.0
 * @created 2015.11.27 16:08
 * Modified by progr1mmer on 2018/07/26.
 */
@Configuration
@ConfigurationProperties(prefix = "fast-dfs")
public class FastDFSConfigEhr {
    private int connectTimeout;
    private int networkTimeout;
    private String charset;
    private String trackerServer;
    private String publicServer;
    private Pool pool = new Pool();
    private Http http = new Http();
    public int getConnectTimeout() {
        return connectTimeout;
    }
    public void setConnectTimeout(int connectTimeout) {
        this.connectTimeout = connectTimeout;
    }
    public int getNetworkTimeout() {
        return networkTimeout;
    }
    public void setNetworkTimeout(int networkTimeout) {
        this.networkTimeout = networkTimeout;
    }
    public String getCharset() {
        return charset;
    }
    public void setCharset(String charset) {
        this.charset = charset;
    }
    public String getTrackerServer() {
        return trackerServer;
    }
    public void setTrackerServer(String trackerServer) {
        this.trackerServer = trackerServer;
    }
    public String getPublicServer() {
        return publicServer;
    }
    public void setPublicServer(String publicServer) {
        this.publicServer = publicServer;
    }
    public Pool getPool() {
        return pool;
    }
    public void setPool(Pool pool) {
        this.pool = pool;
    }
    public Http getHttp() {
        return http;
    }
    public void setHttp(Http http) {
        this.http = http;
    }
    public class Pool {
        private int initSize;
        private int maxSize;
        private int waitTime;
        public int getInitSize() {
            return initSize;
        }
        public void setInitSize(int initSize) {
            this.initSize = initSize;
        }
        public int getMaxSize() {
            return maxSize;
        }
        public void setMaxSize(int maxSize) {
            this.maxSize = maxSize;
        }
        public int getWaitTime() {
            return waitTime;
        }
        public void setWaitTime(int waitTime) {
            this.waitTime = waitTime;
        }
    }
    public class Http {
        private int trackerHttpPort;
        private boolean antiStealToken;
        private String secretKey;
        public int getTrackerHttpPort() {
            return trackerHttpPort;
        }
        public void setTrackerHttpPort(int trackerHttpPort) {
            this.trackerHttpPort = trackerHttpPort;
        }
        public boolean isAntiStealToken() {
            return antiStealToken;
        }
        public void setAntiStealToken(boolean antiStealToken) {
            this.antiStealToken = antiStealToken;
        }
        public String getSecretKey() {
            return secretKey;
        }
        public void setSecretKey(String secretKey) {
            this.secretKey = secretKey;
        }
    }
    @PostConstruct
    void init() throws Exception{
        // 此代码复制自:ClientGlobal.init() 方法
        ClientGlobal.g_connect_timeout = connectTimeout;
        if (ClientGlobal.g_connect_timeout < 0) {
            ClientGlobal.g_connect_timeout = 5;
        }
        ClientGlobal.g_connect_timeout *= 1000;
        ClientGlobal.g_network_timeout = networkTimeout;
        if (ClientGlobal.g_network_timeout < 0) {
            ClientGlobal.g_network_timeout = 30;
        }
        ClientGlobal.g_network_timeout *= 1000;
        ClientGlobal.g_charset = charset;
        if (ClientGlobal.g_charset == null || ClientGlobal.g_charset.length() == 0) {
            ClientGlobal.g_charset = "ISO8859-1";
        }
        String[] szTrackerServers = trackerServer.split(",");
        if (szTrackerServers == null) {
            throw new MyException("item \"tracker_server\" not found");
        } else {
            InetSocketAddress[] tracker_servers = new InetSocketAddress[szTrackerServers.length];
            for (int i = 0; i < szTrackerServers.length; ++i) {
                String[] parts = szTrackerServers[i].split("\\:", 2);
                if (parts.length != 2) {
                    throw new MyException("the value of item \"tracker_server\" is invalid, the correct format is host:port");
                }
                tracker_servers[i] = new InetSocketAddress(parts[0].trim(), Integer.parseInt(parts[1].trim()));
            }
            ClientGlobal.g_tracker_group = new TrackerGroup(tracker_servers);
            ClientGlobal.g_tracker_http_port = http.trackerHttpPort;
            ClientGlobal.g_anti_steal_token = http.antiStealToken;
            if (ClientGlobal.g_anti_steal_token) {
                ClientGlobal.g_secret_key = http.secretKey;
            }
        }
        System.out.println("FastDFS.configInfo() : " + ClientGlobal.configInfo());
    }
}

+ 57 - 0
common/commons-data-hbase/pom.xml

@ -0,0 +1,57 @@
<?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.0.0"
         xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
         xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
    <modelVersion>4.0.0</modelVersion>
    <parent>
        <groupId>com.yihu.jw</groupId>
        <artifactId>wlyy-parent-pom</artifactId>
        <version>2.4.0</version>
        <relativePath>../../wlyy-lib-parent-pom/pom.xml</relativePath>
    </parent>
    <artifactId>commons-data-hbase</artifactId>
    <packaging>jar</packaging>
    <dependencies>
        <!-- true -->
        <dependency>
            <groupId>org.apache.hbase</groupId>
            <artifactId>hbase-client</artifactId>
            <version>1.1.1</version>
            <exclusions>
                <exclusion>
                    <groupId>org.slf4j</groupId>
                    <artifactId>*</artifactId>
                </exclusion>
            </exclusions>
        </dependency>
        <dependency>
            <groupId>org.springframework.data</groupId>
            <artifactId>spring-data-hadoop-hbase</artifactId>
            <version>${version.spring-data-hadoop}</version>
        </dependency>
        <dependency>
            <groupId>org.springframework</groupId>
            <artifactId>spring-context</artifactId>
        </dependency>
        <dependency>
            <groupId>org.springframework.boot</groupId>
            <artifactId>spring-boot</artifactId>
        </dependency>
        <dependency>
            <groupId>com.yihu.jw</groupId>
            <artifactId>commons-util</artifactId>
            <version>2.4.0</version>
        </dependency>
    </dependencies>
    <build>
        <plugins>
            <plugin>
                <groupId>org.apache.maven.plugins</groupId>
                <artifactId>maven-compiler-plugin</artifactId>
            </plugin>
        </plugins>
    </build>
</project>

+ 32 - 0
common/commons-data-hbase/src/main/java/com/yihu/jw/hbase/AbstractHBaseClient.java

@ -0,0 +1,32 @@
package com.yihu.jw.hbase;
import org.apache.hadoop.hbase.client.Connection;
import org.apache.hadoop.hbase.client.ConnectionFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.data.hadoop.hbase.HbaseTemplate;
/**
 * AbstractHBaseClient - 基类
 * @author hzp
 * @created 2017.05.03
 */
public abstract class AbstractHBaseClient {
    @Autowired
    protected HbaseTemplate hbaseTemplate;
    /**
     * 创建连接
     */
    protected Connection getConnection() throws Exception {
        return getConnection(hbaseTemplate);
    }
    /**
     * 创建连接
     */
    protected Connection getConnection(HbaseTemplate hbaseTemplate) throws Exception {
        Connection connection = ConnectionFactory.createConnection(hbaseTemplate.getConfiguration());
        return connection;
    }
}

+ 213 - 0
common/commons-data-hbase/src/main/java/com/yihu/jw/hbase/HBaseAdmin.java

@ -0,0 +1,213 @@
package com.yihu.jw.hbase;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.node.ObjectNode;
import org.apache.hadoop.hbase.HColumnDescriptor;
import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.Admin;
import org.apache.hadoop.hbase.client.Connection;
import org.apache.hadoop.hbase.util.Bytes;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
import java.util.ArrayList;
import java.util.List;
/**
 * Service - HBase DDL
 * @author hzp && Progr1mmer
 * @created 2017.05.03
 * @modifey 2017/11/23
 */
@Service
public class HBaseAdmin extends AbstractHBaseClient {
    @Autowired
    private ObjectMapper objectMapper;
    /**
     * 判断表是否存在
     * @param tableName
     * @return
     * @throws Exception
     */
    public boolean isTableExists(String tableName) throws Exception {
        Connection connection = null;
        Admin admin = null;
        try {
            connection = getConnection();
            admin = connection.getAdmin();
            return admin.tableExists(TableName.valueOf(tableName));
        }finally {
            if(admin != null) {
                admin.close();
            }
            if(connection != null) {
                connection.close();
            }
        }
    }
    /**
     * 新建表
     * @param tableName
     * @param columnFamilies
     * @throws Exception
     */
    public void createTable(String tableName, String... columnFamilies) throws Exception {
        Connection connection = null;
        Admin admin = null;
        try {
            connection = getConnection();
            admin = connection.getAdmin();
            if (!admin.tableExists(TableName.valueOf(tableName))) {
                HTableDescriptor tableDescriptor = new HTableDescriptor(TableName.valueOf(tableName));
                for (String fc : columnFamilies) {
                    tableDescriptor.addFamily(new HColumnDescriptor(fc));
                }
                admin.createTable(tableDescriptor);
            }
        }finally {
            if(admin != null) {
                admin.close();
            }
            if(connection != null) {
                connection.close();
            }
        }
    }
    /**
     * 模糊匹配表名
     * @param regex 表达式
     * @param includeSysTables 是否包含系统表
     * @return
     * @throws Exception
     */
    public List<String> getTableList(String regex, boolean includeSysTables) throws Exception {
        Connection connection = null;
        Admin admin = null;
        List<String> tables = new ArrayList<>();
        try {
            connection = getConnection();
            admin = connection.getAdmin();
            TableName[] tableNames;
            if (regex == null || regex.length() == 0) {
                tableNames = admin.listTableNames();
            } else {
                tableNames = admin.listTableNames(regex, includeSysTables);
            }
            for (TableName tableName : tableNames) {
                tables.add(tableName.getNameAsString());
            }
            return tables;
        }finally {
            if(admin != null) {
                admin.close();
            }
            if(connection != null) {
                connection.close();
            }
        }
    }
    /**
     * 批量清空表数据 (直接删除相关表,再新建)
     * @param tables
     * @throws Exception
     */
    public void cleanTable(List<String> tables) throws Exception {
        Connection connection = null;
        Admin admin = null;
        try {
            connection = getConnection();
            admin = connection.getAdmin();
            for (String tableName : tables) {
                TableName tn = TableName.valueOf(tableName);
                if (admin.tableExists(TableName.valueOf(tableName))) {
                    HTableDescriptor descriptor = admin.getTableDescriptor(tn);
                    admin.disableTable(tn);
                    admin.deleteTable(tn);
                    admin.createTable(descriptor);
                }
                else{
                    System.out.print("not exit table "+tableName+".\r\n");
                }
                /*else{
                    HTableDescriptor descriptor = new HTableDescriptor(tableName);
                    descriptor.addFamily(new HColumnDescriptor("basic"));
                    descriptor.addFamily(new HColumnDescriptor("d"));
                    admin.createTable(descriptor);
                }*/
            }
        } finally {
            if(admin != null) {
                admin.close();
            }
            if(connection != null) {
                connection.close();
            }
        }
    }
    /**
     * 删除表
     * @param tableName
     * @throws Exception
     */
    public void dropTable(String tableName) throws Exception {
        Connection connection = null;
        Admin admin = null;
        try {
            connection = getConnection();
            admin = connection.getAdmin();
            admin.disableTable(TableName.valueOf(tableName));
            admin.deleteTable(TableName.valueOf(tableName));
        } finally {
            if(admin != null) {
                admin.close();
            }
            if(connection != null) {
                connection.close();
            }
        }
    }
    /**
     * 获取表结构
     * @param tableName
     * @return
     * @throws Exception
     */
    public ObjectNode getTableMetaData(String tableName) throws Exception{
        Connection connection = null;
        Admin admin = null;
        try {
            connection = getConnection();
            admin = connection.getAdmin();
            TableName tn = TableName.valueOf(tableName);
            if (admin.tableExists(tn)) {
                ObjectNode objectNode = objectMapper.createObjectNode();
                HTableDescriptor tableDescriptor = admin.getTableDescriptor(tn);
                HColumnDescriptor[] columnDescriptors = tableDescriptor.getColumnFamilies();
                for (int i = 0; i < columnDescriptors.length; ++i) {
                    HColumnDescriptor columnDescriptor = columnDescriptors[i];
                    objectNode.put(Integer.toString(i), Bytes.toString(columnDescriptor.getName()));
                }
                return objectNode;
            }
            return null;
        }finally {
            if(admin != null) {
                admin.close();
            }
            if(connection != null) {
                connection.close();
            }
        }
    }
}

+ 395 - 0
common/commons-data-hbase/src/main/java/com/yihu/jw/hbase/HBaseDao.java

@ -0,0 +1,395 @@
package com.yihu.jw.hbase;
import com.fasterxml.jackson.databind.ObjectMapper;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.client.*;
import org.apache.hadoop.hbase.filter.CompareFilter;
import org.apache.hadoop.hbase.filter.RegexStringComparator;
import org.apache.hadoop.hbase.filter.RowFilter;
import org.apache.hadoop.hbase.util.Bytes;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.data.hadoop.hbase.RowMapper;
import org.springframework.data.hadoop.hbase.TableCallback;
import org.springframework.stereotype.Service;
import org.springframework.util.StringUtils;
import java.util.*;
/**
 * HBase - DML
 * @Author Progr1mmer
 */
@Service
public class HBaseDao extends AbstractHBaseClient {
    @Autowired
    private ObjectMapper objectMapper;
    /**
     * 新增数据 - 多列族
     * @param tableName
     * @param rowKey
     * @param family
     * @throws Exception
     */
    public void add(String tableName , String rowKey, Map<String, Map<String, String>> family) {
        hbaseTemplate.execute(tableName, new TableCallback<Void>() {
            @Override
            public Void doInTable(HTableInterface table) throws Throwable {
                Put p = new Put(rowKey.getBytes());
                for (String familyName : family.keySet()) {
                    Map<String, String> map = family.get(familyName);
                    for (String qualifier : map.keySet()) {
                        String value = map.get(qualifier);
                        p.addColumn(familyName.getBytes(), qualifier.getBytes(), value.getBytes());
                    }
                }
                table.put(p);
                return null;
            }
        });
    }
    /**
     * 新增数据 - 单列族
     * @param tableName
     * @param rowKey
     * @param family
     * @param columns
     * @param values
     */
    public void add(String tableName, String rowKey, String family, Object[] columns, Object[] values) {
        hbaseTemplate.execute(tableName, new TableCallback<Void>() {
            @Override
            public Void doInTable(HTableInterface table) throws Throwable {
                Put put = new Put(Bytes.toBytes(rowKey));
                for (int j = 0; j < columns.length; j++) {
                    //为空字段不保存
                    if (values[j] != null) {
                        String column = String.valueOf(columns[j]);
                        String value = String.valueOf(values[j]);
                        put.addColumn(Bytes.toBytes(family), Bytes.toBytes(column), Bytes.toBytes(value));
                    }
                }
                table.put(put);
                return null;
            }
        });
    }
    /**
     * 删除记录
     * @param tableName
     * @param rowKey
     */
    public void delete(String tableName, String rowKey)  {
        hbaseTemplate.execute(tableName, new TableCallback<Void>() {
            @Override
            public Void doInTable(HTableInterface table) throws Throwable {
                Delete d = new Delete(rowKey.getBytes());
                table.delete(d);
                return null;
            }
        });
    }
    /**
     * 批量删除数据
     * @param tableName
     * @param rowKeys
     * @return
     * @throws Exception
     */
    public Object[] deleteBatch(String tableName, String[] rowKeys) {
        return hbaseTemplate.execute(tableName, new TableCallback<Object[]>() {
            @Override
            public Object[] doInTable(HTableInterface table) throws Throwable {
                List<Delete> deletes = new ArrayList<>(rowKeys.length);
                for (String rowKey : rowKeys) {
                    Delete delete = new Delete(Bytes.toBytes(rowKey));
                    deletes.add(delete);
                }
                Object[] results = new Object[deletes.size()];
                table.batch(deletes, results);
                return results;
            }
        });
    }
    /**
     * 删除列族
     * @param tableName
     * @param rowKey
     * @param familyName
     * @throws Exception
     */
    public void deleteFamily(String tableName, String rowKey, String familyName) throws Exception {
        hbaseTemplate.delete(tableName, rowKey, familyName);
    }
    /**
     * 删除某列
     * @param tableName
     * @param rowKey
     * @param familyName
     * @param columnName
     * @throws Exception
     */
    public void deleteColumn(String tableName, String rowKey, String familyName, String columnName) throws Exception {
        hbaseTemplate.delete(tableName, rowKey, familyName, columnName);
    }
    /**
     * 修改某行某列值
     */
    public void put(String tableName, String rowKey, String familyName, String qualifier, String value) throws Exception {
        hbaseTemplate.put(tableName, rowKey, familyName, qualifier, value.getBytes());
    }
    /**
     * 模糊匹配rowKey
     * @param tableName 表名
     * @param rowKeyRegEx 表达式
     * @return
     * @throws Exception
     */
    public String[] findRowKeys(String tableName, String startRow, String stopRow, String rowKeyRegEx) throws Exception {
        Scan scan = new Scan();
        scan.addFamily(Bytes.toBytes("basic"));
        scan.setStartRow(startRow.getBytes());
        scan.setStopRow(stopRow.getBytes());
        scan.setFilter(new RowFilter(CompareFilter.CompareOp.EQUAL, new RegexStringComparator(rowKeyRegEx)));
        List<String> list = new LinkedList<>();
        hbaseTemplate.find(tableName, scan, new RowMapper<Void>() {
            @Override
            public Void mapRow(Result result, int rowNum) throws Exception {
                list.add(Bytes.toString(result.getRow()));
                return null;
            }
        });
        return list.toArray(new String[list.size()]);
    }
    /**
     * 表总条数
     * @param tableName
     * @return
     * @throws Exception
     */
    public Integer count(String tableName) throws Exception {
        Scan scan = new Scan();
        scan.addFamily(Bytes.toBytes("basic"));
        scan.setFilter(new RowFilter(CompareFilter.CompareOp.EQUAL, new RegexStringComparator("^")));
        List<String> list = new LinkedList<>();
        hbaseTemplate.find(tableName, scan, new RowMapper<Void>() {
            @Override
            public Void mapRow(Result result, int rowNum) throws Exception {
                list.add(Bytes.toString(result.getRow()));
                return null;
            }
        });
        return list.size();
    }
    /**
     * 根据rowKey获取一条记录
     * @param tableName
     * @param rowKey
     * @return 字符串
     */
    public String get(String tableName, String rowKey) {
        return hbaseTemplate.get(tableName, rowKey, new RowMapper<String>() {
            @Override
            public String mapRow(Result result, int rowNum) throws Exception {
                if(!result.isEmpty()) {
                    List<Cell> ceList = result.listCells();
                    Map<String, Object> map = new HashMap<String, Object>();
                    map.put("rowkey", rowKey);
                    for (Cell cell : ceList) {
                        // 默认不加列族
                        // Bytes.toString(cell.getFamilyArray(), cell.getFamilyOffset(), cell.getFamilyLength()) +"_"
                        map.put(Bytes.toString(cell.getQualifierArray(), cell.getQualifierOffset(), cell.getQualifierLength()),
                                Bytes.toString(cell.getValueArray(), cell.getValueOffset(), cell.getValueLength()));
                    }
                    return objectMapper.writeValueAsString(map);
                }
                else{
                    return "";
                }
            }
        });
    }
    /**
     * 通过rowKey获取某行数据
     * @param tableName
     * @param rowKey
     * @return Map
     */
    public Map<String, Object> getResultMap(String tableName, String rowKey) {
        return hbaseTemplate.get(tableName, rowKey, new RowMapper<Map<String, Object>>() {
            @Override
            public Map<String, Object> mapRow(Result result, int rowNum) throws Exception {
                if(!result.isEmpty()) {
                    List<Cell> ceList = result.listCells();
                    Map<String, Object> map = new HashMap<String, Object>();
                    map.put("rowkey", rowKey);
                    for (Cell cell : ceList) {
                        //默认不加列族
                        // Bytes.toString(cell.getFamilyArray(), cell.getFamilyOffset(), cell.getFamilyLength()) +"_"
                        map.put(Bytes.toString(cell.getQualifierArray(), cell.getQualifierOffset(), cell.getQualifierLength()),
                                Bytes.toString(cell.getValueArray(), cell.getValueOffset(), cell.getValueLength()));
                    }
                    return map;
                }else {
                    return null;
                }
            }
        });
    }
    /**
     * 通过rowKey获取某行数据
     * @param tableName
     * @param rowKey
     * @return
     * @throws Exception
     */
    public Result getResult(String tableName, String rowKey) throws Exception {
        return hbaseTemplate.get(tableName, rowKey, new RowMapper<Result>() {
            @Override
            public Result mapRow(Result result, int rowNum) throws Exception {
                return result;
            }
        });
    }
    /**
     * 通过表名和rowKey获取指定列族下的值
     * @param tableName 表名
     * @param rowKey rowKey
     * @param familyName 列族
     * @return
     */
    public Map<String, String> get(String tableName, String rowKey, String familyName) {
        return hbaseTemplate.get(tableName, rowKey, familyName, new RowMapper<Map<String, String>>(){
            @Override
            public Map<String, String> mapRow(Result result, int rowNum) throws Exception {
                Map<String, String> map = new HashMap<>();
                NavigableMap<byte[], byte[]> navigableMaps = result.getFamilyMap(familyName.getBytes());
                if(null != navigableMaps) {
                    for (byte[] key : navigableMaps.keySet()) {
                        String keys = new String(key);
                        String values = new String(navigableMaps.get(key));
                        map.put(keys, values);
                    }
                }
                return map;
            }
        });
    }
    /**
     * 通过表名和rowKey获取指定列族下的列名的值
     * @param tableName 表名
     * @param rowKey rowKey
     * @param familyName 列族
     * @param qualifier 列名
     * @return
     */
    public String get(String tableName, String rowKey, String familyName, String qualifier) {
        return hbaseTemplate.get(tableName, rowKey, familyName, qualifier, new RowMapper<String>(){
            @Override
            public String mapRow(Result result, int rowNum) throws Exception {
                Cell cell = result.getColumnLatestCell(familyName.getBytes(), qualifier.getBytes());
                return new String(CellUtil.cloneValue(cell));
            }
        });
    }
    /**
     * 通过rowKey集合获取指定列名下的多条数据
     * @param tableName 表名
     * @param rowKeys rowKeys
     * @param basicFl basic列族下的列名
     * @param dFl d列族下的列名
     * @return
     * @throws Exception
     */
    public Result[] getResultList(String tableName, List<String> rowKeys, String basicFl, String dFl) {
        return hbaseTemplate.execute(tableName, new TableCallback<Result[]>() {
            @Override
            public Result[] doInTable(HTableInterface table) throws Throwable {
                List<Get> list = new ArrayList<Get>();
                for (String rowKey : rowKeys) {
                    Get get = new Get(Bytes.toBytes(rowKey));
                    if (!StringUtils.isEmpty(basicFl)) {
                        String[] basicArr = basicFl.split(",");
                        for (String basicStr : basicArr) {
                            get.addColumn(Bytes.toBytes("basic"), Bytes.toBytes(basicStr));
                        }
                    }
                    if (!StringUtils.isEmpty(dFl)) {
                        String[] dArr = dFl.split(",");
                        for (String dStr : dArr) {
                            get.addColumn(Bytes.toBytes("d"), Bytes.toBytes(dStr));
                        }
                    }
                    list.add(get);
                }
                return table.get(list);
            }
        });
    }
    /************************************* Bean使用原型模式 ***************************************************************/
    /**
     * 保存数据 原型模式
     */
    public void save(String tableName, TableBundle tableBundle) {
        hbaseTemplate.execute(tableName, new TableCallback<Void>() {
            @Override
            public Void doInTable(HTableInterface table) throws Throwable {
                List<Put> puts = tableBundle.putOperations();
                Object[] results = new Object[puts.size()];
                table.batch(puts, results);
                return null;
            }
        });
    }
    /**
     * 删除数据 原型模式
     */
    public void delete(String tableName, TableBundle tableBundle) {
        hbaseTemplate.execute(tableName, new TableCallback<Object[]>() {
            @Override
            public Object[] doInTable(HTableInterface table) throws Throwable {
                List<Delete> deletes = tableBundle.deleteOperations();
                Object[] results = new Object[deletes.size()];
                table.batch(deletes, results);
                return null;
            }
        });
    }
    /**
     * 查询数据 原型模式
     */
    public Object[] get(String tableName, TableBundle tableBundle) {
        return hbaseTemplate.execute(tableName, new TableCallback<Object[]>() {
            @Override
            public Object[] doInTable(HTableInterface table) throws Throwable {
                List<Get> gets = tableBundle.getOperations();
                Object[] results = new Object[gets.size()];
                table.batch(gets, results);
                if (results.length > 0 && results[0].toString().equals("keyvalues=NONE")) {
                    return null;
                }
                return results;
            }
        });
    }
}

+ 158 - 0
common/commons-data-hbase/src/main/java/com/yihu/jw/hbase/TableBundle.java

@ -0,0 +1,158 @@
package com.yihu.jw.hbase;
import org.apache.commons.collections.CollectionUtils;
import org.apache.commons.lang3.StringUtils;
import org.apache.commons.lang3.tuple.ImmutablePair;
import org.apache.commons.lang3.tuple.Pair;
import org.apache.hadoop.hbase.client.Delete;
import org.apache.hadoop.hbase.client.Get;
import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.util.Bytes;
import java.util.*;
import java.util.stream.Collectors;
/**
 * 将HBase中的行,列族,列捆绑成一束。并一次性生成所需要的Get, Put操作。
 * <p>
 * 仅支持单表操作。
 * <p>
 * 虽然支持多种HBase操作,但请注意,一次只能用于一种操作,如:Get,Put,Delete不能混用,
 * 否则将出现难以预料的后果。
 *
 * @author Sand
 * @created 2016.04.27 14:38
 */
public class TableBundle {
    Map<String, Row> rows = new HashMap<>();
    public void addRows(String... rowKeys) {
        for (String rowKey : rowKeys) {
            rows.put(rowKey, null);
        }
    }
    private Row getRow(String rowKey) {
        Row row = rows.get(rowKey);
        if (row == null) {
            row = new Row();
            rows.put(rowKey, row);
        }
        return row;
    }
    public void addFamily(String rowKey, Object family) {
        Row row = getRow(rowKey);
        row.addFamily(family.toString());
    }
    public void addColumns(String rowKey, Object family, String[] columns) {
        Row row = getRow(rowKey);
        row.addColumns(family.toString(), columns);
    }
    public void addValues(String rowKey, Object family, Map<String, String> values) {
        Row row = getRow(rowKey);
        row.addValues(family.toString(), values);
    }
    public void clear() {
        rows.clear();
    }
    public List<Get> getOperations() {
        List<Get> gets = new ArrayList<>(rows.size());
        for (String rowKey : rows.keySet()) {
            Get get = new Get(Bytes.toBytes(rowKey));
            Row row = rows.get(rowKey);
            if (row != null) {
                for (String family : row.getFamilies()) {
                    Set<Object> columns = row.getCells(family);
                    if (CollectionUtils.isEmpty(columns)) {
                        get.addFamily(Bytes.toBytes(family));
                    }
                    for (Object column : columns) {
                        get.addColumn(Bytes.toBytes(family), Bytes.toBytes((String) column));
                    }
                }
            }
            gets.add(get);
        }
        return gets;
    }
    public List<Put> putOperations() {
        List<Put> puts = new ArrayList<>(rows.values().size());
        for (String rowKey : rows.keySet()) {
            Put put = new Put(Bytes.toBytes(rowKey));
            Row row = rows.get(rowKey);
            for (String family : row.getFamilies()) {
                Set<Object> columns = row.getCells(family);
                for (Object column : columns) {
                    Pair<String, String> pair = (Pair<String, String>) column;
                    if (StringUtils.isNotEmpty(pair.getRight())) {
                        put.addColumn(Bytes.toBytes(family),
                                Bytes.toBytes(pair.getLeft()),
                                Bytes.toBytes(pair.getRight()));
                    }
                }
            }
            puts.add(put);
        }
        return puts;
    }
    public List<Delete> deleteOperations() {
        List<Delete> deletes = new ArrayList<>(rows.values().size());
        for (String rowkey : rows.keySet()) {
            Delete delete = new Delete(Bytes.toBytes(rowkey));
            deletes.add(delete);
        }
        return deletes;
    }
    /**
     * HBase中的一行
     */
    public static class Row {
        private Map<String, Set<Object>> cells = new HashMap<>();   // key为family,value为columns
        public void addFamily(String family) {
            cells.put(family, null);
        }
        public void addColumns(String family, String... columns) {
            Set value = getFamily(family);
            for (String column : columns) {
                value.add(column);
            }
        }
        public void addValues(String family, Map<String, String> values) {
            Set value = getFamily(family);
            value.addAll(values.keySet().stream().map(key -> new ImmutablePair<>(key, values.get(key))).collect(Collectors.toList()));
        }
        public Set<String> getFamilies() {
            return cells.keySet();
        }
        public Set<Object> getCells(String family) {
            return cells.get(family);
        }
        private Set<Object> getFamily(String family) {
            Set value = cells.get(family);
            if (value == null) {
                value = new TreeSet<>();
                cells.put(family, value);
            }
            return value;
        }
    }
}

+ 86 - 0
common/commons-data-hbase/src/main/java/com/yihu/jw/hbase/config/HbaseConfig.java

@ -0,0 +1,86 @@
package com.yihu.jw.hbase.config;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.springframework.boot.context.properties.ConfigurationProperties;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.data.hadoop.hbase.HbaseTemplate;
import javax.annotation.PostConstruct;
import java.util.*;
/**
 * @author Sand
 * @version 1.0
 * @created 2015.11.28 16:26
 * @modified by Progr1mmer 2017/11/23 注释启动连接代码
 */
@Configuration
@ConfigurationProperties(prefix = "hadoop")
public class HbaseConfig{
    private Map<String, String> hbaseProperties = new HashMap<>();
    private User user = new User();
    public Map<String, String> getHbaseProperties(){
        return this.hbaseProperties;
    }
    public void setHbaseProperties(Map<String, String> hbaseProperties) {
        this.hbaseProperties = hbaseProperties;
    }
    public User getUser() {
        return user;
    }
    public void setUser(User user) {
        this.user = user;
    }
    public class User {
        private String name;
        public String getName() {
            return name;
        }
        public void setName(String name) {
            this.name = name;
        }
    }
    @PostConstruct
    private void configInfo() {
        StringBuilder info = new StringBuilder("{");
        hbaseProperties.forEach((key, val) -> info.append("\n  hadoop." + key + " = " + val));
        info.append("\n  hadoop.user.name = " + user.getName());
        info.append("\n}");
        System.out.println("Hbase.configInfo : " + info.toString());
    }
    @Bean
    public org.apache.hadoop.conf.Configuration configuration() {
        Set<String> keys = new HashSet<>(hbaseProperties.keySet());
        for (String key : keys){
            String value = hbaseProperties.remove(key);
            key = key.replaceAll("^\\d{1,2}\\.", "");
            hbaseProperties.put(key, value);
        }
        org.apache.hadoop.conf.Configuration configuration = HBaseConfiguration.create();
        hbaseProperties.keySet().stream().filter(key -> hbaseProperties.get(key) != null).forEach(key -> {
            configuration.set(key, hbaseProperties.get(key));
        });
        return configuration;
    }
    @Bean
    public HbaseTemplate hbaseTemplate(org.apache.hadoop.conf.Configuration configuration){
        System.setProperty("HADOOP_USER_NAME", user.getName() != null ? user.getName() : "root");
        HbaseTemplate hbaseTemplate = new HbaseTemplate();
        hbaseTemplate.setConfiguration(configuration);
        return hbaseTemplate;
    }
}

+ 13 - 0
common/commons-data-hbase/src/main/resources/hbase-site.xml

@ -0,0 +1,13 @@
<configuration>
    <property>
        <name>hbase.client.retries.number</name>
        <value>3</value>
    </property>
    <property>
        <name>hbase.cluster.distributed</name>
        <value>true</value>
    </property>
    
</configuration>

+ 163 - 0
common/commons-data-hbase/src/main/resources/hbase/core-site.xml

@ -0,0 +1,163 @@
  <configuration>
    
    <property>
      <name>fs.defaultFS</name>
      <value>hdfs://dev</value>
    </property>
    
    <property>
      <name>fs.trash.interval</name>
      <value>360</value>
    </property>
    
    <property>
      <name>ha.failover-controller.active-standby-elector.zk.op.retries</name>
      <value>120</value>
    </property>
    
    <property>
      <name>ha.zookeeper.quorum</name>
      <value>node1.hde.h3c.com:2181,node2.hde.h3c.com:2181,node3.hde.h3c.com:2181</value>
    </property>
    
    <property>
      <name>hadoop.http.authentication.simple.anonymous.allowed</name>
      <value>true</value>
    </property>
    
    <property>
      <name>hadoop.proxyuser.hbase.groups</name>
      <value>*</value>
    </property>
    
    <property>
      <name>hadoop.proxyuser.hbase.hosts</name>
      <value>*</value>
    </property>
    
    <property>
      <name>hadoop.proxyuser.hcat.groups</name>
      <value>users</value>
    </property>
    
    <property>
      <name>hadoop.proxyuser.hcat.hosts</name>
      <value>node2.hde.h3c.com</value>
    </property>
    
    <property>
      <name>hadoop.proxyuser.hdfs.groups</name>
      <value>*</value>
    </property>
    
    <property>
      <name>hadoop.proxyuser.hdfs.hosts</name>
      <value>*</value>
    </property>
    
    <property>
      <name>hadoop.proxyuser.hive.groups</name>
      <value>*</value>
    </property>
    
    <property>
      <name>hadoop.proxyuser.hive.hosts</name>
      <value>node2.hde.h3c.com</value>
    </property>
    
    <property>
      <name>hadoop.proxyuser.HTTP.groups</name>
      <value>*</value>
    </property>
    
    <property>
      <name>hadoop.proxyuser.HTTP.hosts</name>
      <value>*</value>
    </property>
    
    <property>
      <name>hadoop.proxyuser.hue.groups</name>
      <value>*</value>
    </property>
    
    <property>
      <name>hadoop.proxyuser.hue.hosts</name>
      <value>*</value>
    </property>
    
    <property>
      <name>hadoop.proxyuser.oozie.groups</name>
      <value>*</value>
    </property>
    
    <property>
      <name>hadoop.proxyuser.oozie.hosts</name>
      <value>*</value>
    </property>
    
    <property>
      <name>hadoop.security.auth_to_local</name>
      <value>DEFAULT</value>
    </property>
    
    <property>
      <name>hadoop.security.authentication</name>
      <value>simple</value>
    </property>
    
    <property>
      <name>hadoop.security.authorization</name>
      <value>false</value>
    </property>
    
    <property>
      <name>hadoop.security.key.provider.path</name>
      <value></value>
    </property>
    
    <property>
      <name>io.compression.codecs</name>
      <value>org.apache.hadoop.io.compress.GzipCodec,org.apache.hadoop.io.compress.DefaultCodec,org.apache.hadoop.io.compress.SnappyCodec</value>
    </property>
    
    <property>
      <name>io.file.buffer.size</name>
      <value>131072</value>
    </property>
    
    <property>
      <name>io.serializations</name>
      <value>org.apache.hadoop.io.serializer.WritableSerialization</value>
    </property>
    
    <property>
      <name>ipc.client.connect.max.retries</name>
      <value>50</value>
    </property>
    
    <property>
      <name>ipc.client.connection.maxidletime</name>
      <value>30000</value>
    </property>
    
    <property>
      <name>ipc.client.idlethreshold</name>
      <value>8000</value>
    </property>
    
    <property>
      <name>ipc.server.tcpnodelay</name>
      <value>true</value>
    </property>
    
    <property>
      <name>mapreduce.jobtracker.webinterface.trusted</name>
      <value>false</value>
    </property>
    
    <property>
      <name>net.topology.script.file.name</name>
      <value>/etc/hadoop/conf/topology_script.py</value>
    </property>
    
  </configuration>

+ 243 - 0
common/commons-data-hbase/src/main/resources/hbase/hbase-site.xml

@ -0,0 +1,243 @@
  <configuration>
    
    <property>
      <name>dfs.domain.socket.path</name>
      <value>/var/lib/hadoop-hdfs/dn_socket</value>
    </property>
    
    <property>
      <name>hbase.bulkload.staging.dir</name>
      <value>/apps/hbase/staging</value>
    </property>
    
    <property>
      <name>hbase.client.keyvalue.maxsize</name>
      <value>1048576</value>
    </property>
    
    <property>
      <name>hbase.client.retries.number</name>
      <value>35</value>
    </property>
    
    <property>
      <name>hbase.client.scanner.caching</name>
      <value>100</value>
    </property>
    
    <property>
      <name>hbase.cluster.distributed</name>
      <value>true</value>
    </property>
    
    <property>
      <name>hbase.coprocessor.master.classes</name>
      <value>org.apache.hadoop.hbase.security.access.AccessController</value>
    </property>
    
    <property>
      <name>hbase.coprocessor.region.classes</name>
      <value>org.apache.hadoop.hbase.security.access.AccessController,org.apache.hadoop.hbase.security.access.SecureBulkLoadEndpoint</value>
    </property>
    
    <property>
      <name>hbase.coprocessor.regionserver.classes</name>
      <value>org.apache.hadoop.hbase.security.access.AccessController</value>
    </property>
    
    <property>
      <name>hbase.defaults.for.version.skip</name>
      <value>true</value>
    </property>
    
    <property>
      <name>hbase.hregion.majorcompaction</name>
      <value>604800000</value>
    </property>
    
    <property>
      <name>hbase.hregion.majorcompaction.jitter</name>
      <value>0.50</value>
    </property>
    
    <property>
      <name>hbase.hregion.max.filesize</name>
      <value>10737418240</value>
    </property>
    
    <property>
      <name>hbase.hregion.memstore.block.multiplier</name>
      <value>4</value>
    </property>
    
    <property>
      <name>hbase.hregion.memstore.flush.size</name>
      <value>134217728</value>
    </property>
    
    <property>
      <name>hbase.hregion.memstore.mslab.enabled</name>
      <value>true</value>
    </property>
    
    <property>
      <name>hbase.hstore.blockingStoreFiles</name>
      <value>10</value>
    </property>
    
    <property>
      <name>hbase.hstore.compaction.max</name>
      <value>10</value>
    </property>
    
    <property>
      <name>hbase.hstore.compactionThreshold</name>
      <value>3</value>
    </property>
    
    <property>
      <name>hbase.local.dir</name>
      <value>${hbase.tmp.dir}/local</value>
    </property>
    
    <property>
      <name>hbase.master.info.bindAddress</name>
      <value>0.0.0.0</value>
    </property>
    
    <property>
      <name>hbase.master.info.port</name>
      <value>16010</value>
    </property>
    
    <property>
      <name>hbase.master.port</name>
      <value>16000</value>
    </property>
    
    <property>
      <name>hbase.regionserver.global.memstore.size</name>
      <value>0.4</value>
    </property>
    
    <property>
      <name>hbase.regionserver.handler.count</name>
      <value>30</value>
    </property>
    
    <property>
      <name>hbase.regionserver.info.port</name>
      <value>16030</value>
    </property>
    
    <property>
      <name>hbase.regionserver.port</name>
      <value>16020</value>
    </property>
    
    <property>
      <name>hbase.regionserver.thrift.http</name>
      <value>true</value>
    </property>
    
    <property>
      <name>hbase.regionserver.wal.codec</name>
      <value>org.apache.hadoop.hbase.regionserver.wal.WALCellCodec</value>
    </property>
    
    <property>
      <name>hbase.replication</name>
      <value>true</value>
    </property>
    
    <property>
      <name>hbase.rootdir</name>
      <value>hdfs://dev/apps/hbase/data</value>
    </property>
    
    <property>
      <name>hbase.rpc.protection</name>
      <value>authentication</value>
    </property>
    
    <property>
      <name>hbase.rpc.timeout</name>
      <value>90000</value>
    </property>
    
    <property>
      <name>hbase.security.authentication</name>
      <value>simple</value>
    </property>
    
    <property>
      <name>hbase.security.authorization</name>
      <value>true</value>
    </property>
    
    <property>
      <name>hbase.superuser</name>
      <value>hbase</value>
    </property>
    
    <property>
      <name>hbase.thrift.support.proxyuser</name>
      <value>true</value>
    </property>
    
    <property>
      <name>hbase.tmp.dir</name>
      <value>/hadoop/hbase</value>
    </property>
    
    <property>
      <name>hbase.zookeeper.property.clientPort</name>
      <value>2181</value>
    </property>
    
    <property>
      <name>hbase.zookeeper.quorum</name>
      <value>node1.hde.h3c.com,node2.hde.h3c.com,node3.hde.h3c.com</value>
    </property>
    
    <property>
      <name>hbase.zookeeper.useMulti</name>
      <value>true</value>
    </property>
    
    <property>
      <name>hfile.block.cache.size</name>
      <value>0.40</value>
    </property>
    
    <property>
      <name>phoenix.query.timeoutMs</name>
      <value>60000</value>
    </property>
    
    <property>
      <name>replication.replicationsource.implementation</name>
      <value>com.ngdata.sep.impl.SepReplicationSource</value>
    </property>
    
    <property>
      <name>replication.source.nb.capacity</name>
      <value>1000</value>
    </property>
    
    <property>
      <name>replication.source.ratio</name>
      <value>1</value>
    </property>
    
    <property>
      <name>zookeeper.session.timeout</name>
      <value>90000</value>
    </property>
    
    <property>
      <name>zookeeper.znode.parent</name>
      <value>/hbase-unsecure</value>
    </property>
    
  </configuration>

+ 348 - 0
common/commons-data-hbase/src/main/resources/hbase/hdfs-site.xml

@ -0,0 +1,348 @@
  <configuration>
    
    <property>
      <name>dfs.block.access.token.enable</name>
      <value>true</value>
    </property>
    
    <property>
      <name>dfs.blockreport.initialDelay</name>
      <value>120</value>
    </property>
    
    <property>
      <name>dfs.blocksize</name>
      <value>134217728</value>
    </property>
    
    <property>
      <name>dfs.client.failover.proxy.provider.dev</name>
      <value>org.apache.hadoop.hdfs.server.namenode.ha.ConfiguredFailoverProxyProvider</value>
    </property>
    
    <property>
      <name>dfs.client.read.shortcircuit</name>
      <value>true</value>
    </property>
    
    <property>
      <name>dfs.client.read.shortcircuit.streams.cache.size</name>
      <value>4096</value>
    </property>
    
    <property>
      <name>dfs.client.retry.policy.enabled</name>
      <value>false</value>
    </property>
    
    <property>
      <name>dfs.cluster.administrators</name>
      <value> hdfs</value>
    </property>
    
    <property>
      <name>dfs.content-summary.limit</name>
      <value>5000</value>
    </property>
    
    <property>
      <name>dfs.datanode.address</name>
      <value>0.0.0.0:50010</value>
    </property>
    
    <property>
      <name>dfs.datanode.balance.bandwidthPerSec</name>
      <value>6250000</value>
    </property>
    
    <property>
      <name>dfs.datanode.data.dir</name>
      <value>/opt/hadoop/hdfs/data</value>
    </property>
    
    <property>
      <name>dfs.datanode.data.dir.perm</name>
      <value>750</value>
    </property>
    
    <property>
      <name>dfs.datanode.du.reserved</name>
      <value>1073741824</value>
    </property>
    
    <property>
      <name>dfs.datanode.failed.volumes.tolerated</name>
      <value>0</value>
    </property>
    
    <property>
      <name>dfs.datanode.http.address</name>
      <value>0.0.0.0:50075</value>
    </property>
    
    <property>
      <name>dfs.datanode.https.address</name>
      <value>0.0.0.0:50475</value>
    </property>
    
    <property>
      <name>dfs.datanode.ipc.address</name>
      <value>0.0.0.0:8010</value>
    </property>
    
    <property>
      <name>dfs.datanode.max.transfer.threads</name>
      <value>4096</value>
    </property>
    
    <property>
      <name>dfs.domain.socket.path</name>
      <value>/var/lib/hadoop-hdfs/dn_socket</value>
    </property>
    
    <property>
      <name>dfs.encrypt.data.transfer.cipher.suites</name>
      <value>AES/CTR/NoPadding</value>
    </property>
    
    <property>
      <name>dfs.encryption.key.provider.uri</name>
      <value></value>
    </property>
    
    <property>
      <name>dfs.ha.automatic-failover.enabled</name>
      <value>true</value>
    </property>
    
    <property>
      <name>dfs.ha.fencing.methods</name>
      <value>shell(/bin/true)</value>
    </property>
    
    <property>
      <name>dfs.ha.namenodes.dev</name>
      <value>nn1,nn2</value>
    </property>
    
    <property>
      <name>dfs.heartbeat.interval</name>
      <value>3</value>
    </property>
    
    <property>
      <name>dfs.hosts.exclude</name>
      <value>/etc/hadoop/conf/dfs.exclude</value>
    </property>
    
    <property>
      <name>dfs.http.policy</name>
      <value>HTTP_ONLY</value>
    </property>
    
    <property>
      <name>dfs.https.port</name>
      <value>50470</value>
    </property>
    
    <property>
      <name>dfs.journalnode.edits.dir</name>
      <value>/hadoop/hdfs/journal</value>
    </property>
    
    <property>
      <name>dfs.journalnode.http-address</name>
      <value>0.0.0.0:8480</value>
    </property>
    
    <property>
      <name>dfs.journalnode.https-address</name>
      <value>0.0.0.0:8481</value>
    </property>
    
    <property>
      <name>dfs.namenode.accesstime.precision</name>
      <value>0</value>
    </property>
    
    <property>
      <name>dfs.namenode.audit.log.async</name>
      <value>true</value>
    </property>
    
    <property>
      <name>dfs.namenode.avoid.read.stale.datanode</name>
      <value>true</value>
    </property>
    
    <property>
      <name>dfs.namenode.avoid.write.stale.datanode</name>
      <value>true</value>
    </property>
    
    <property>
      <name>dfs.namenode.checkpoint.dir</name>
      <value>/opt/hadoop/hdfs/namesecondary</value>
    </property>
    
    <property>
      <name>dfs.namenode.checkpoint.edits.dir</name>
      <value>${dfs.namenode.checkpoint.dir}</value>
    </property>
    
    <property>
      <name>dfs.namenode.checkpoint.period</name>
      <value>21600</value>
    </property>
    
    <property>
      <name>dfs.namenode.checkpoint.txns</name>
      <value>1000000</value>
    </property>
    
    <property>
      <name>dfs.namenode.fslock.fair</name>
      <value>false</value>
    </property>
    
    <property>
      <name>dfs.namenode.handler.count</name>
      <value>100</value>
    </property>
    
    <property>
      <name>dfs.namenode.http-address</name>
      <value>node1.hde.h3c.com:50070</value>
    </property>
    
    <property>
      <name>dfs.namenode.http-address.dev.nn1</name>
      <value>node1.hde.h3c.com:50070</value>
    </property>
    
    <property>
      <name>dfs.namenode.http-address.dev.nn2</name>
      <value>node2.hde.h3c.com:50070</value>
    </property>
    
    <property>
      <name>dfs.namenode.https-address</name>
      <value>node1.hde.h3c.com:50470</value>
    </property>
    
    <property>
      <name>dfs.namenode.https-address.dev.nn1</name>
      <value>node1.hde.h3c.com:50470</value>
    </property>
    
    <property>
      <name>dfs.namenode.https-address.dev.nn2</name>
      <value>node2.hde.h3c.com:50470</value>
    </property>
    
    <property>
      <name>dfs.namenode.name.dir</name>
      <value>/opt/hadoop/hdfs/namenode</value>
    </property>
    
    <property>
      <name>dfs.namenode.name.dir.restore</name>
      <value>true</value>
    </property>
    
    <property>
      <name>dfs.namenode.rpc-address</name>
      <value>node1.hde.h3c.com:8020</value>
    </property>
    
    <property>
      <name>dfs.namenode.rpc-address.dev.nn1</name>
      <value>node1.hde.h3c.com:8020</value>
    </property>
    
    <property>
      <name>dfs.namenode.rpc-address.dev.nn2</name>
      <value>node2.hde.h3c.com:8020</value>
    </property>
    
    <property>
      <name>dfs.namenode.safemode.threshold-pct</name>
      <value>0.99</value>
    </property>
    
    <property>
      <name>dfs.namenode.secondary.http-address</name>
      <value>localhost:50090</value>
    </property>
    
    <property>
      <name>dfs.namenode.shared.edits.dir</name>
      <value>qjournal://node1.hde.h3c.com:8485;node2.hde.h3c.com:8485;node3.hde.h3c.com:8485/dev</value>
    </property>
    
    <property>
      <name>dfs.namenode.stale.datanode.interval</name>
      <value>30000</value>
    </property>
    
    <property>
      <name>dfs.namenode.startup.delay.block.deletion.sec</name>
      <value>3600</value>
    </property>
    
    <property>
      <name>dfs.namenode.write.stale.datanode.ratio</name>
      <value>1.0f</value>
    </property>
    
    <property>
      <name>dfs.nameservices</name>
      <value>dev</value>
    </property>
    
    <property>
      <name>dfs.permissions.enabled</name>
      <value>true</value>
    </property>
    
    <property>
      <name>dfs.permissions.superusergroup</name>
      <value>hdfs</value>
    </property>
    
    <property>
      <name>dfs.replication</name>
      <value>3</value>
    </property>
    
    <property>
      <name>dfs.replication.max</name>
      <value>50</value>
    </property>
    
    <property>
      <name>dfs.support.append</name>
      <value>true</value>
    </property>
    
    <property>
      <name>dfs.webhdfs.enabled</name>
      <value>true</value>
    </property>
    
    <property>
      <name>fs.permissions.umask-mode</name>
      <value>022</value>
    </property>
    
    <property>
      <name>nfs.exports.allowed.hosts</name>
      <value>* rw</value>
    </property>
    
    <property>
      <name>nfs.file.dump.dir</name>
      <value>/tmp/.hdfs-nfs</value>
    </property>
    
  </configuration>

+ 66 - 0
common/commons-data-mysql/pom.xml

@ -0,0 +1,66 @@
<?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.0.0"
         xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
         xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
    <modelVersion>4.0.0</modelVersion>
    <parent>
        <groupId>com.yihu.jw</groupId>
        <artifactId>wlyy-parent-pom</artifactId>
        <version>2.4.0</version>
        <relativePath>../../wlyy-lib-parent-pom/pom.xml</relativePath>
    </parent>
    <artifactId>commons-data-mysql</artifactId>
    <packaging>jar</packaging>
    <dependencies>
        <!-- true -->
        <!-- JdbcTemplate-->
        <dependency>
            <groupId>org.springframework.boot</groupId>
            <artifactId>spring-boot-starter-jdbc</artifactId>
        </dependency>
        <dependency>
            <groupId>mysql</groupId>
            <artifactId>mysql-connector-java</artifactId>
            <scope>runtime</scope>
        </dependency>
        <dependency>
            <groupId>org.apache.commons</groupId>
            <artifactId>commons-dbcp2</artifactId>
        </dependency>
        <!-- JdbcTemplate-->
        <!-- Jpa -->
        <dependency>
            <groupId>org.springframework.boot</groupId>
            <artifactId>spring-boot-starter-data-jpa</artifactId>
        </dependency>
        <!-- Jpa -->
        <!-- Hibernate -->
        <dependency>
            <groupId>org.hibernate</groupId>
            <artifactId>hibernate-validator</artifactId>
            <version>${version.hibernate-validator}</version>
        </dependency>
        <!-- Hibernate -->
        <dependency>
            <groupId>com.yihu.jw</groupId>
            <artifactId>commons-ehr-constants</artifactId>
            <version>2.4.0</version>
        </dependency>
        <dependency>
            <groupId>com.yihu.jw</groupId>
            <artifactId>commons-util</artifactId>
            <version>2.4.0</version>
        </dependency>
    </dependencies>
    <build>
        <plugins>
            <plugin>
                <groupId>org.apache.maven.plugins</groupId>
                <artifactId>maven-compiler-plugin</artifactId>
            </plugin>
        </plugins>
    </build>
</project>

+ 219 - 0
common/commons-data-mysql/src/main/java/com/yihu/jw/ehr/parm/PageModel.java

@ -0,0 +1,219 @@
package com.yihu.jw.ehr.parm;
import com.yihu.jw.query.FieldCondition;
import org.springframework.util.StringUtils;
import javax.persistence.Column;
import java.lang.reflect.Method;
import java.util.*;
/**
 * 实体查询模型。将前端的查询转换为实体查询。
 *
 * @author lincl
 * @version 1.0
 * @created 2016.2.1
 */
public class PageModel {
    private int page;                               // 页码
    private int rows;                               // 页大小
    private String[] order;                         // 排序,格式为 +f1,-f2
    private Map<String, FieldCondition> filters;    // 记录过滤器
    private String[] result;                        // 实际返回的字段
    private Class modelClass;                       // JPA实体类
    public PageModel() {
    }
    public PageModel(int page, int rows) {
        this.page = page;
        this.rows = rows;
    }
    public String format(String modelName, boolean isSql) {
        if (modelClass == null) {
            System.err.print("NullPoint: modelClass");
            return "";
        }
        Map<String, FieldCondition> filters = getFilters();
        if (filters.size() == 0)
            return "";
        Map<String, String> whMap = new HashMap<>();
        FieldCondition fieldCondition;
        String wh = "";
        for (String k : filters.keySet()) {
            fieldCondition = filters.get(k);
            if (!fieldCondition.isValid())
                continue;
            if (fieldCondition.isGroup()) {
                String str = whMap.get(fieldCondition.getGroup());
                if (str == null)
                    str = "(" + fieldCondition.format(modelName, isSql);
                else
                    str += " or " + fieldCondition.format(modelName, isSql);
                whMap.put(fieldCondition.getGroup(), str);
            } else {
                if (wh.equals(""))
                    wh = fieldCondition.format(modelName, isSql);
                else
                    wh += " and " + fieldCondition.format(modelName, isSql);
            }
        }
        for (String k : whMap.keySet()) {
            wh += " and " + whMap.get(k) + ") ";
        }
        return wh;
    }
    private String getTableCol(String field) {
        try {
            Method method = modelClass.getMethod("get" + firstLetterToUpper(field));
            Column column = method.getDeclaredAnnotation(Column.class);
            if (column != null) {
                return column.name();
            }
            return null;
        } catch (Exception e) {
            return null;
        }
    }
    public String formatSqlOrder(String modelName) {
        return formatOrder(modelName, true);
    }
    public String formatSqlOrder() {
        return formatSqlOrder("");
    }
    public String formatOrder() {
        return formatOrder("", false);
    }
    public String formatOrder(String modelName, boolean isSql) {
        if (modelClass == null) {
            System.err.print("NullPoint: modelClass");
            return "";
        }
        if (order == null || order.length == 0)
            return "";
        List<String> ls = new ArrayList<>();
        String tmp = "";
        if (isSql) {
            for (String item : order) {
                tmp = getTableCol(item);
                if (!StringUtils.isEmpty(tmp))
                    ls.add(tmp);
            }
        } else
            for (String item : order) {
                tmp = getTableCol(item);
                if (!StringUtils.isEmpty(tmp))
                    ls.add(item);
            }
        return arrayJoin(ls, StringUtils.isEmpty(modelName) ? "," : "," + modelName + ".", 1);
    }
    public String arrayJoin(Collection<String> ls, String joinStr, int offer) {
        if (ls == null || ls.size() == 0)
            return "";
        String tmp = "";
        for (String str : ls) {
            tmp += joinStr + str;
        }
        return tmp.substring(offer);
    }
    public String formatWithOrder(String modelName) {
        return format(modelName, false) + " order by " + formatOrder(modelName, false);
    }
    public String formatSqlWithOrder(String modelName) {
        return formatSql(modelName) + " order by " + formatSqlOrder(modelName);
    }
    public String format() {
        return format("", false);
    }
    public String formatSql(String modelName) {
        return format(modelName, true);
    }
    public String formatSql() {
        return formatSql("");
    }
    public Object getFieldVal(String field) {
        return filters.get(field).getVal();
    }
    public void setFieldVal(String field, List val) {
        filters.get(field).setVal(val);
    }
    public int getPage() {
        return page;
    }
    public void setPage(int page) {
        this.page = page;
    }
    public int getRows() {
        return rows;
    }
    public void setRows(int rows) {
        this.rows = rows;
    }
    public Map<String, FieldCondition> getFilters() {
        return filters == null ? new HashMap<>() : filters;
    }
    public void setFilters(Map<String, FieldCondition> filters) {
        this.filters = filters;
    }
    public void addFieldCondition(FieldCondition fieldCondition) {
        if (filters == null)
            filters = new HashMap<>();
        filters.put(fieldCondition.getCol(), fieldCondition);
    }
    public String[] getOrder() {
        return order;
    }
    public void setOrder(String[] order) {
        this.order = order;
    }
    public String[] getResult() {
        return result;
    }
    public void setResult(String[] result) {
        this.result = result;
    }
    public Class getModelClass() {
        return modelClass;
    }
    public void setModelClass(Class modelClass) {
        this.modelClass = modelClass;
        Map<String, FieldCondition> map = getFilters();
        for (String key : map.keySet()) {
            map.get(key).setTableCol(getTableCol(key));
        }
    }
    public static String firstLetterToUpper(String str) {
        if (str == null || "".equals(str.trim())) {
            return "";
        }
        return str.replaceFirst(("" + str.charAt(0)), ("" + str.charAt(0)).toUpperCase());
    }
}

+ 244 - 0
common/commons-data-mysql/src/main/java/com/yihu/jw/query/BaseJpaService.java

@ -0,0 +1,244 @@
package com.yihu.jw.query;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.yihu.jw.constants.PageArg;
import com.yihu.jw.ehr.lang.SpringContextEhr;
import org.apache.commons.lang3.StringUtils;
import org.hibernate.Query;
import org.hibernate.Session;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.data.domain.Sort;
import org.springframework.data.jpa.repository.JpaRepository;
import org.springframework.data.repository.PagingAndSortingRepository;
import org.springframework.jdbc.core.JdbcTemplate;
import org.springframework.transaction.annotation.Propagation;
import org.springframework.transaction.annotation.Transactional;
import javax.persistence.EntityManager;
import javax.persistence.PersistenceContext;
import javax.persistence.criteria.CriteriaBuilder;
import javax.persistence.criteria.CriteriaQuery;
import javax.persistence.criteria.Predicate;
import javax.persistence.criteria.Root;
import javax.persistence.metamodel.EntityType;
import java.io.Serializable;
import java.lang.reflect.ParameterizedType;
import java.lang.reflect.Type;
import java.text.ParseException;
import java.util.*;
/**
 * Service基础类。此类基于Spring Data JPA进行封装(Spring Data JPA又是基于JPA封装,EHR平台使用Hibernate作为JPA实现者)。
 * 需要注意的是,部分功能会跳过JPA接口而直接使用Hibernate接口,比如访问Hibernate的Session接口,因为它把JPA的EntityManager功能强大。
 *
 * @author lincl
 * @author Sand
 * @version 1.0
 * @created 2016.2.3。
 */
@Transactional(propagation = Propagation.SUPPORTS)
public class BaseJpaService<T, R> {
    Class<R> repoClass;
    @PersistenceContext
    protected EntityManager entityManager;
    @Autowired
    protected JdbcTemplate jdbcTemplate;
    @Autowired
    protected ObjectMapper objectMapper;
    public BaseJpaService(){
        Type genType = getClass().getGenericSuperclass();
        if ((genType instanceof ParameterizedType)) {
            Type[] params = ((ParameterizedType) genType).getActualTypeArguments();
            if (params.length==2) {
                repoClass = (Class) params[1];
            }
        }
    }
    public T save(T entity) {
        return (T) getRepository().save(entity);
    }
    public T retrieve(Serializable id) {
        return (T) getRepository().findById(id);
    }
    public void delete(Serializable id) {
        getRepository().delete(id);
    }
    public void delete(T entity) {
        getRepository().delete(entity);
    }
    public void delete(Iterable ids) {
        Iterable list = getRepository().findAllById(ids);
        getRepository().delete(list);
    }
    public Class<T> getEntityClass() {
        Type genType = this.getClass().getGenericSuperclass();
        Type[] parameters = ((ParameterizedType) genType).getActualTypeArguments();
        return (Class) parameters[0];
    }
    public List search(String fields, String filters, String sorts, Integer page, Integer size) throws ParseException {
        URLQueryParser queryParser = createQueryParser(fields, filters, sorts);
        CriteriaQuery query = queryParser.makeCriteriaQuery();
        if (page == null || page <= 0) page = PageArg.DefaultPage;
        if (size == null || size <= 0 || size > 10000) size = PageArg.DefaultSize;
        return entityManager
                .createQuery(query)
                .setFirstResult((page - 1) * size)
                .setMaxResults(size)
                .getResultList();
    }
    public List search(String filters) throws ParseException {
        URLQueryParser queryParser = createQueryParser("", filters, "");
        CriteriaQuery query = queryParser.makeCriteriaQuery();
        return entityManager
                .createQuery(query)
                .getResultList();
    }
    public List search(String filters,String sorts) throws ParseException {
        URLQueryParser queryParser = createQueryParser("", filters, sorts);
        CriteriaQuery query = queryParser.makeCriteriaQuery();
        return entityManager
                .createQuery(query)
                .getResultList();
    }
    public long getCount(String filters) throws ParseException {
        URLQueryParser queryParser = createQueryParser(filters);
        CriteriaQuery query = queryParser.makeCriteriaCountQuery();
        return (long) entityManager.createQuery(query).getSingleResult();
    }
    protected <T> URLQueryParser createQueryParser(String fields, String filters, String orders) {
        URLQueryParser queryParser = new URLQueryParser<T>(fields, filters, orders)
                .setEntityManager(entityManager)
                .setEntityClass(getEntityClass());
        return queryParser;
    }
    protected <T> URLQueryParser createQueryParser(String filters) {
        URLQueryParser queryParser = new URLQueryParser<T>(filters)
                .setEntityManager(entityManager)
                .setEntityClass(getEntityClass());
        return queryParser;
    }
    protected Sort parseSorts(String sorter){
        if (StringUtils.isNotEmpty(sorter)) {
            String[] orderArray = sorter.split(",");
            List<Sort.Order> orderList = new ArrayList<>(orderArray.length);
            Arrays.stream(orderArray).forEach(
                    elem -> orderList.add(
                            elem.startsWith("+") ? new Sort.Order(Sort.Direction.ASC, elem.substring(1)):
                                    new Sort.Order(Sort.Direction.DESC, elem.substring(1))));
            return Sort.by(orderList);
        }
        return null;
    }
    protected Session currentSession() {
        return entityManager.unwrap(Session.class);
    }
    public PagingAndSortingRepository getRepository() {
        return (PagingAndSortingRepository) SpringContextEhr.getService(repoClass);
    }
    public JpaRepository getJpaRepository(){
        return (JpaRepository) SpringContextEhr.getService(repoClass);
    }
    public List<T> findByField(String field, Object value){
        return findByFields(
                new String[]{field},
                new Object[]{value}
        );
    }
    public List<T> findByFields(String[] fields, Object[] values){
        CriteriaBuilder criteriaBuilder = entityManager.getCriteriaBuilder();
        CriteriaQuery query = criteriaBuilder.createQuery(getEntityClass());
        Root<T> root = query.from(getEntityClass());
        List<Predicate> ls = new ArrayList<>();
        for(int i=0; i< fields.length; i++){
            if(values[i].getClass().isArray())
                ls.add(criteriaBuilder.in(root.get(fields[i]).in((Object[])values[i])));
            else
                ls.add(criteriaBuilder.equal(root.get(fields[i]), values[i]));
        }
        query.where(ls.toArray(new Predicate[ls.size()]));
        return entityManager
                .createQuery(query)
                .getResultList() ;
    }
    public String getClzName(){
        return getEntityClass().getName();
    }
    public String getEntityIdFiled(){
        EntityType entityType = entityManager.getMetamodel().entity(getEntityClass());
        javax.persistence.metamodel.Type type = entityType.getIdType();
        String s = entityType.getId(type.getJavaType()).getName();
        return s;
    }
    public int delete(Object[] ids){
        String hql = " DELETE FROM "+getEntityClass().getName()+" WHERE "+getEntityIdFiled()+" in(:ids)";
        Query query = currentSession().createQuery(hql);
        query.setParameterList("ids", ids);
        return query.executeUpdate();
    }
    public void batchInsert(List list) {
        for (int i = 0; i < list.size(); i++) {
            entityManager.persist(list.get(i));
            if (i % 30 == 0) {
                entityManager.flush();
                entityManager.clear();
            }
        }
    }
    public String getCode() {
        return UUID.randomUUID().toString().replaceAll("-", "");
    }
    /**
     * 获取指定长度的随机字符串
     * @param length
     * @return
     */
    protected String getRandomString(int length) {
        String str = "abcdefghigklmnopkrstuvwxyzABCDEFGHIGKLMNOPQRSTUVWXYZ0123456789";
        StringBuffer buffer = new StringBuffer();
        Random random = new Random();
        for (int i = 0; i < length; i++) {
            int number = random.nextInt(str.length() - 1);//0~61
            buffer.append(str.charAt(number));
        }
        return buffer.toString();
    }
}

+ 198 - 0
common/commons-data-mysql/src/main/java/com/yihu/jw/query/FieldCondition.java

@ -0,0 +1,198 @@
package com.yihu.jw.query;
import org.springframework.util.StringUtils;
import java.util.ArrayList;
import java.util.List;
/**
 * @author lincl
 * @version 1.0
 * @created 2016.2.1
 */
public class FieldCondition {
    private String col;       //过滤字段 ,不可为空
    private String logic;    //过滤方式,默认为=;   =, sw, ew, like, >, <, between, >=, <=
    private List<Object> val;//过滤值, 值为空则不过滤
    private String group;   //分组,  多个过滤器中的group相同时  用or连接
    private String tableCol;//数据库字段, 初始化根据实体自动设置, user设置无效
    public FieldCondition() {
    }
    public FieldCondition(String col, Object val) {
        this.col = col;
        this.addVal(val);
    }
    public FieldCondition(String col, String logic, Object ... vals) {
        this.col = col;
        this.logic = logic;
        this.addVal(vals);
    }
    public FieldCondition(String col, String logic, List<Object> val, String group) {
        this.col = col;
        this.logic = logic;
        this.val = val;
        this.group = group;
    }
    /**
     * 格式化过滤条件
     * @param modelName 视图名
     * @param isSql true:返回sql形式, false:返回jpa形式
     * @return
     */
    public String format(String modelName, boolean isSql){
        if(getCol()==null || getCol().equals("") || getVal()==null || getVal().size()==0)
            return "";
        String val = getValMapping();
        if(val==null)
            return "";
        String rs = (isSql ? getTableCol() : getCol()) + " " + getLogic() + " " + val;
        if(modelName.trim().equals(""))
            return " " + rs;
        return " " +modelName + "." + rs;
    }
    /**
     * 格式化过滤条件
     * @return 返回jpa形式
     */
    public String format(){
        return format("", false);
    }
    /**
     * 格式化过滤条件
     * @return 返回sql形式
     */
    public String formatSql(){
        return format("", true);
    }
    /**
     * 判断是否存在分组信息
     * @return
     */
    public boolean isGroup(){
        return !(getGroup()==null || "".equals(getGroup()));
    }
    /**
     * 添加值
     * @param vals
     */
    public void addVal(Object ... vals){
        if(this.val==null)
            this.val = new ArrayList<>();
        for(Object val:vals){
            this.val.add(val);
        }
    }
    /**
     * 判断数据表是否包含有该过滤字段
     * @return
     */
    public boolean isValid() {
        return !StringUtils.isEmpty(getTableCol()) && !(getVal()==null || getVal().size()==0)
                 && !(getCol()==null || getCol().equals("")) && isLogicValid();
    }
    /**
     * 判断查询方式是否符合规范
     * @return
     */
    public boolean isLogicValid(){
        String logic = getLogic();
        if(logic.equals("=") || logic.equals("like") || logic.equals("sw") || logic.equals("ew") ||
                logic.equals("<") || logic.equals(">") || logic.equals(">=") || logic.equals("<=") ||
                    logic.equals("in") || logic.equals("not in") || logic.equals("between"))
            return true;
        return false;
    }
    /**
     * 获取占位符
     * @return
     */
    private String getValMapping(){
        String logic = getLogic();
        String val = ":" + getCol();
        if(logic.equals("in") || logic.equals("not in"))
            return  "("+val+") ";
        if(logic.equals("between"))
            return val + "1 and " +val+"2 ";
        if(logic.equals("=") || logic.equals("like") || logic.equals("sw") || logic.equals("ew") ||
                logic.equals("<") || logic.equals(">") || logic.equals(">=") || logic.equals("<=")){
            return val;
        }
        return null;
    }
    /**
     * 格式化 值, 不支持between
     * between形式: 调用getVal(), 获取值,  占位符为 between col + "1" and  col + "2"
     * @return
     */
    public Object formatVal(){
        if(getLogic().equals("sw"))
            return "%"+getVal().get(0);
        if (getLogic().equals("ew"))
            return getVal().get(0)+"%";
        if (getLogic().equals("like"))
            return "%"+getVal().get(0)+"%";
        if(getLogic().equals("in") || getLogic().equals("not in"))
            return getVal();
        return getVal().get(0);
    }
    /************************************************************************************/
    /***************            getter  &  setter                            ************/
    /***************                                                         ************/
    /************************************************************************************/
    public String getCol() {
        return col;
    }
    public void setCol(String col) {
        this.col = col;
    }
    public String getLogic() {
        if(logic==null || "".equals(logic))
            return "=";
        return logic;
    }
    public void setLogic(String logic) {
        this.logic = logic;
    }
    public List<Object> getVal() {
        return val;
    }
    public void setVal(List<Object> val) {
        this.val = val;
    }
    public String getGroup() {
        return group;
    }
    public void setGroup(String group) {
        this.group = group;
    }
    public String getTableCol() {
        return tableCol;
    }
    public void setTableCol(String tableCol) {
        this.tableCol = tableCol;
    }
}

+ 28 - 0
common/commons-data-mysql/src/main/java/com/yihu/jw/query/ReturnIdPstCreator.java

@ -0,0 +1,28 @@
package com.yihu.jw.query;
import org.springframework.jdbc.core.PreparedStatementCreator;
import java.sql.Connection;
import java.sql.PreparedStatement;
import java.sql.SQLException;
import java.sql.Statement;
/**
 * @author lincl
 * @version 1.0
 * @created 2016/5/6
 */
public class ReturnIdPstCreator implements PreparedStatementCreator {
    String sql;
    public ReturnIdPstCreator(String sql){
        this.sql = sql;
    }
    @Override
    public PreparedStatement createPreparedStatement(Connection connection) throws SQLException {
        return connection.prepareStatement(sql, Statement.RETURN_GENERATED_KEYS);
    }
}

+ 303 - 0
common/commons-data-mysql/src/main/java/com/yihu/jw/query/URLHqlQueryParser.java

@ -0,0 +1,303 @@
package com.yihu.jw.query;
import javafx.util.Pair;
import org.apache.commons.lang3.StringUtils;
import org.hibernate.Criteria;
import org.hibernate.Session;
import org.hibernate.criterion.*;
import org.hibernate.metadata.ClassMetadata;
import java.lang.reflect.Field;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
/**
 * URL 查询串解析器。
 *
 * @author lincl
 * @author Sand
 * @version 1.0
 * @created 2016.02.05 10:17
 */
public class URLHqlQueryParser<T> {
    private String fields;
    private String filters;
    private String orders;
    Session session;
    Class<T> entityCls;
    public URLHqlQueryParser(String fields, String filters, String orders) {
        this.fields = fields;
        this.filters = filters;
        this.orders = orders;
    }
    public URLHqlQueryParser(String filters){
        this.filters = filters;
    }
    public URLHqlQueryParser setSession(Session session) {
        this.session = session;
        return this;
    }
    public URLHqlQueryParser setEntityClass(Class<T> cls) {
        this.entityCls = cls;
        return this;
    }
    /**
     * 生成搜索语句.
     *
     * @return
     */
    public Criteria makeCriteriaQuery() {
        Criteria criteria = session.createCriteria(entityCls);
        ClassMetadata classMetadata = session.getSessionFactory().getClassMetadata(entityCls);
//        makeSelection(criteria, classMetadata);
        makeOrderBy(criteria, classMetadata);
        makeWhere(criteria, classMetadata);
        return criteria;
    }
    /**
     * 生成count语句。
     *
     * @return
     */
    public Criteria makeCriteriaCountQuery() {
        Criteria criteria = session.createCriteria(entityCls);
        ClassMetadata classMetadata = session.getSessionFactory().getClassMetadata(entityCls);
        criteria.setProjection(Projections.rowCount());
        makeWhere(criteria, classMetadata);
        return criteria;
    }
    /**
     * 生成返回值字段。
     *
     * @param criteria
     * @param classMetadata
     */
    private void makeSelection(Criteria criteria, ClassMetadata classMetadata) {
    }
    /**
     * +code 以code字段进行升序排序
     * -code 以code字段进行降序排序
     * 生成排序字段。
     *
     * @param criteria
     * @param classMetadata
     */
    private void makeOrderBy(Criteria criteria, ClassMetadata classMetadata) {
        if (StringUtils.isNotEmpty(orders)) {
            String[] orderArray = orders.split(",");
            for(String elem : orderArray){
//                try {
//                    classMetadata.getPropertyType(elem);
//                }catch (Exception e){
//                    throw new IllegalArgumentException("the property not found!");
//                }
                criteria = elem.startsWith("+") ?
                        criteria.addOrder(Order.asc(elem.substring(1)))
                        : criteria.addOrder(Order.desc(elem.substring(1)));
            }
        }
    }
    /**
     * like:使用"?"来表示,如:name?'%医'
     * not in:使用"<>"来表示并用","逗号对值进行分隔,如:status=2,3,4,5
     * in:使用"="来表示并用","逗号对值进行分隔,如:status=2,3,4,5
     * =:使用"="来表示,如:status=2
     * >=:使用大于号和大于等于语法,如:createDate>2012
     * <=:使用小于号和小于等于语法,如:createDate<=2015
     * 分组:在条件后面加上空格,并设置分组号,如:createDate>2012 g1,具有相同组名的条件将使用or连接
     * 多条件组合:使用";"来分隔
     * <p>
     * 生成 where 条件。
     *
     * @param criteria
     * @param classMetadata
     */
    private void makeWhere(Criteria criteria, ClassMetadata classMetadata) {
        if (StringUtils.isEmpty(filters)) return;
        Map<String, List<Criterion>> criterionMap = new HashMap<>();
        String[] filterArray = filters.split(";");
        List<Criterion> groupCriterion = new ArrayList<>();
        for (int i = 0; i < filterArray.length; ++i) {
            String[] tokens = filterArray[i].split(" ");
            if (tokens.length > 2){
                for(int j=1; j<tokens.length; j++){
                    if(j==tokens.length-1)
                        tokens[1] = tokens[j];
                    else
                        tokens[0] += " " + tokens[j] ;
                }
            }
//            if (tokens.length > 2) throw new IllegalArgumentException("无效过滤参数");
            String group = null;
            if (tokens.length >= 2) group = tokens[1];
            Criterion criterion = splitFilter(tokens[0], classMetadata);
            if (group == null)
                group = Integer.toString(i);
            criterionMap.put(group,
                    makeGroupCriterion(criterionMap.get(group), criterion));
        }
        addWhere(criteria, criterionMap);
    }
    private void addWhere(Criteria criteria, Map<String, List<Criterion>> criterionMap) {
        List<Criterion> ls;
        for (String group : criterionMap.keySet()){
            ls = criterionMap.get(group);
            if(ls.size()>1)
                criteria.add(
                        Restrictions.or(ls.toArray(new Criterion[ls.size()]))
                );
            else
                criteria.add(
                        Restrictions.and(ls.toArray(new Criterion[ls.size()]))
                );
        }
    }
    protected List<Criterion> makeGroupCriterion(List<Criterion> ls, Criterion criterion){
        (ls = ls == null ? new ArrayList<>() : ls)
                .add(criterion);
        return ls;
    }
    protected Criterion splitFilter(String filter, ClassMetadata classMetadata) {
        Criterion criterion = null;
        if (filter.contains("?")) {
            Pair<Property, Object> pair = getPair(filter, "[?]", classMetadata);
            criterion = pair.getKey().like("%"+pair.getValue()+"%");
        } else if (filter.contains("<>")) {
            Pair<Property, Object> pair = getPair(filter, "<>", classMetadata);
            if (pair.getValue().getClass().isArray()) {
                criterion = pair.getKey().in((Object[])pair.getValue());
            } else {
                criterion = pair.getKey().eq(pair.getValue());
            }
            criterion = Restrictions.not(criterion);
        }  else if (filter.contains(">=")) {
            Pair<Property, Object> pair = getPair(filter, ">=", classMetadata);
            criterion = pair.getKey().ge(pair.getValue());
        } else if (filter.contains(">")) {
            Pair<Property, Object> pair = getPair(filter, ">", classMetadata);
            //todo:  转成对应类型
            criterion = pair.getKey().gt(pair.getValue());
        } else if (filter.contains("<=")) {
            Pair<Property, Object> pair = getPair(filter, "<=", classMetadata);
            criterion = pair.getKey().le(pair.getValue());
        } else if (filter.contains("<")) {
            Pair<Property, Object> pair = getPair(filter, "<", classMetadata);
            criterion = pair.getKey().lt(pair.getValue());
        } else if (filter.contains("=")) {
            Pair<Property, Object> pair = getPair(filter, "=", classMetadata);
            if (pair.getValue().getClass().isArray()) {
                criterion = pair.getKey().in((Object[])pair.getValue());
            } else {
                criterion = pair.getKey().eq(pair.getValue());
            }
        }
        return criterion;
    }
    protected Pair<Property, Object> getPair(String filter, String splitter, ClassMetadata classMetadata) throws IllegalArgumentException {
        String[] tokens = filter.split(splitter);
        String valStr = tokens[1];
        Object val = tokens[1];
        try {
            if((splitter.equals("=") || splitter.equals("<>")) && valStr.contains(",")){
                val = formatVal(tokens[0], valStr, true);
            }
            else if(!splitter.equals("[?]")){
                val = formatVal(tokens[0], valStr, false);
            }
        } catch (NoSuchFieldException e) {
            e.printStackTrace();
        }
        return new Pair(Property.forName(tokens[0]), val);
    }
    private Object formatVal(String fileName, String valStr, boolean isArr) throws NoSuchFieldException {
        Object val = "";
        if(isLong(fileName)){
            if(isArr){
                val = strToLongArr(valStr);
            }else
                val = Long.parseLong(valStr);
        }else if(isInteger(fileName)){
            if(isArr){
                val = strToIntArr(valStr);
            }else
                val = Integer.parseInt(valStr);
        }else {
            if(isArr)
                val = valStr.split(",");
            else
                val = valStr;
        }
        return val;
    }
    private Long[] strToLongArr(String valStr){
        String[] strArr = valStr.split(",");
        Long[] longArr = new Long[strArr.length];
        for(int i=0; i<strArr.length; i++){
            longArr[i] = Long.parseLong(strArr[i]);
        }
        return longArr;
    }
    private Integer[] strToIntArr(String valStr){
        String[] strArr = valStr.split(",");
        Integer[] intArr = new Integer[strArr.length];
        for(int i=0; i<strArr.length; i++){
            intArr[i] = Integer.parseInt(strArr[i]);
        }
        return intArr;
    }
    private boolean isInteger(String fieldName) throws NoSuchFieldException {
        Field field = getField(fieldName);
        return field.getType().equals(Integer.class) || field.getType().equals(Integer.TYPE);
    }
    private boolean isLong(String fieldName) throws NoSuchFieldException {
        Field field = getField(fieldName);
        return field.getType().equals(Long.class) || field.getType().equals(Long.TYPE);
    }
    private Field getField(String fieldName) throws NoSuchFieldException {
        Field f;
        try {
            f = entityCls.getDeclaredField(fieldName);
        } catch (NoSuchFieldException e) {
            f = entityCls.getSuperclass().getDeclaredField(fieldName);
        }
        return f;
    }
}

+ 267 - 0
common/commons-data-mysql/src/main/java/com/yihu/jw/query/URLQueryParser.java

@ -0,0 +1,267 @@
package com.yihu.jw.query;
import com.yihu.jw.ehr.util.datetime.DateUtil;
import com.yihu.jw.ehr.util.url.URLQueryBuilder;
import javafx.util.Pair;
import org.apache.commons.lang3.BooleanUtils;
import org.apache.commons.lang3.StringUtils;
import javax.persistence.EntityManager;
import javax.persistence.criteria.*;
import java.text.ParseException;
import java.util.*;
/**
 * URL 查询串解析器。将 {@link URLQueryBuilder} 中产生的查询字符串反解析。
 *
 * @author Sand
 * @version 1.0
 * @created 2016.02.05 10:17
 */
public class URLQueryParser<T> {
    private String fields;
    private String filters;
    private String orders;
    EntityManager entityManager;
    CriteriaBuilder builder;
    Class<T> entityCls;
    public URLQueryParser(String fields, String filters, String orders) {
        this.fields = fields;
        this.filters = filters;
        this.orders = orders;
    }
    public URLQueryParser(String filters) {
        this.filters = filters;
    }
    public URLQueryParser setEntityManager(EntityManager entityManager) {
        this.entityManager = entityManager;
        builder = entityManager.getCriteriaBuilder();
        return this;
    }
    public URLQueryParser setEntityClass(Class<T> cls) {
        this.entityCls = cls;
        return this;
    }
    /**
     * 生成搜索语句.
     *
     * @return
     */
    public CriteriaQuery makeCriteriaQuery() throws ParseException {
        CriteriaQuery query = builder.createQuery();
        Root<T> root = query.from(entityCls);
        makeSelection(builder, query, root);
        makeOrderBy(builder, query, root);
        makeWhere(builder, query, root);
        return query;
    }
    /**
     * 生成count语句。
     *
     * @return
     */
    public CriteriaQuery makeCriteriaCountQuery() throws ParseException {
        CriteriaQuery<Long> query = builder.createQuery(Long.class);
        Root<T> root = query.from(entityCls);
        query.select(builder.count(root));
        makeWhere(builder, query, root);
        return query;
    }
    /**
     * 生成返回值字段。
     *
     * @param criteriaBuilder
     * @param query
     * @param root
     */
    private void makeSelection(CriteriaBuilder criteriaBuilder, CriteriaQuery query, Root<T> root) {
        if (false/*StringUtils.isNotEmpty(fields)*/) {
            String[] fieldArray = fields.split(",");
            List<Selection<T>> selections = new ArrayList<>(fieldArray.length);
            Arrays.stream(fieldArray).forEach(elem -> selections.add(root.get(elem)));
            query.select(criteriaBuilder.tuple(selections.toArray(new Selection[selections.size()])));
        } else {
            query.select(root);
        }
    }
    /**
     * +code 以code字段进行升序排序
     * -code 以code字段进行降序排序
     * 生成排序字段。
     *
     * @param criteriaBuilder
     * @param query
     * @param root
     */
    private void makeOrderBy(CriteriaBuilder criteriaBuilder, CriteriaQuery query, Root<T> root) {
        if (StringUtils.isNotEmpty(orders)) {
            String[] orderArray = orders.split(",");
            List<Order> orderList = new ArrayList<>(orderArray.length);
            Arrays.stream(orderArray).forEach(
                    elem -> orderList.add(
                            elem.startsWith("+") ?
                                    criteriaBuilder.asc(root.get(elem.substring(1))) : criteriaBuilder.desc(root.get(elem.substring(1)))));
            query.orderBy(orderList);
        }
    }
    /**
     * like:使用"?"来表示,如:name?'%医'
     * in:使用"="来表示并用","逗号对值进行分隔,如:status=2,3,4,5
     * not in:使用"<>"来表示并用","逗号对值进行分隔,如:status=2,3,4,5
     * =:使用"="来表示,如:status=2
     * >=:使用大于号和大于等于语法,如:createDate>2012
     * <=:使用小于号和小于等于语法,如:createDate<=2015
     * 分组:在条件后面加上空格,并设置分组号,如:createDate>2012 g1,具有相同组名的条件将使用or连接 GB/T 2261.2-2003
     * 多条件组合:使用";"来分隔
     * <p/>
     * 生成 where 条件。
     *
     * @param criteriaBuilder
     * @param query
     * @param root
     */
    private void makeWhere(CriteriaBuilder criteriaBuilder, CriteriaQuery query, Root<T> root) throws ParseException {
         if (StringUtils.isEmpty(filters)) return;
        Map<String, Predicate> predicateMap = new HashMap<>();
        String[] filterArray = filters.split(";");
        for (int i = 0; i < filterArray.length; ++i) {
            String filter = filterArray[i];
            //查看是否是时间格式 yyyy-MM-dd hh:mm:ss
            String[] tokens;
//            Pattern p = Pattern.compile("[0-9]{4}-[0-9]{2}-[0-9]{2} [0-9]{2}:[0-9]{2}:[0-9]{2}");
//            String[] filters = filter.split("[?]|<>|>=|>|<=|<|=");
//            Matcher m = p.matcher(filters[1]);
//            if (m.matches()) {
//                tokens = new String[]{filter};
//            }else {
//                tokens = filter.split(" ");
//            }
            tokens = filter.split(" ");
            if (tokens.length > 2){
                for(int j=1; j<tokens.length; j++){
                    if(j==tokens.length-1)
                        tokens[1] = tokens[j];
                    else
                        tokens[0] += " " +tokens[j] ;
                }
            }
            String group = null;
            if (tokens.length >= 2) group = tokens[1];
            Predicate predicate = splitFilter(tokens[0], criteriaBuilder, root);
            if (group != null) {
                if (predicateMap.get(group) == null)
                    predicateMap.put(group, predicate);
                else
                    predicateMap.put(group, criteriaBuilder.or(predicateMap.get(group), predicate));
            } else
                predicateMap.put(Integer.toString(i), predicate);
        }
        query.where(predicateMap.values().toArray(new Predicate[predicateMap.size()]));
    }
    protected Predicate splitFilter(String filter, CriteriaBuilder cb, Root<T> root) throws ParseException {
        Predicate predicate = null;
        if (filter.contains("?")) {
            Pair<Path, String> pair = getPair(filter, "[?]", root);
            predicate = cb.like(pair.getKey(), "%" + pair.getValue() + "%");
        } else if (filter.contains("<>")) {
            Pair<Path, String> pair = getPair(filter, "<>", root);
            if (pair.getValue().contains(",")) {
                predicate = cb.not(pair.getKey().in(pair.getValue().split(",")));
            } else {
                predicate = cb.notEqual(pair.getKey(), pair.getValue());
            }
        } else if (filter.contains(">=")) {
            Pair<Path, String> pair = getPair(filter, ">=", root);
            String value = pair.getValue();
            if(pair.getKey().getJavaType() == Date.class){
                Date date = DateUtil.strToDate(pair.getValue());
                predicate = cb.greaterThanOrEqualTo(pair.getKey(), date);
            }else {
                predicate = cb.greaterThanOrEqualTo(pair.getKey(),value);
            }
        } else if (filter.contains(">")) {
            Pair<Path, String> pair = getPair(filter, ">", root);
            String value = pair.getValue();
            if(pair.getKey().getJavaType() == Date.class){
                Date date = DateUtil.strToDate(pair.getValue());
                predicate = cb.greaterThan(pair.getKey(), date);
            }else {
                predicate = cb.greaterThan(pair.getKey(),value);
            }
        } else if (filter.contains("<=")) {
            Pair<Path, String> pair = getPair(filter, "<=", root);
            String value = pair.getValue();
            if(pair.getKey().getJavaType() == Date.class){
                Date date = DateUtil.strToDate(pair.getValue());
                predicate = cb.lessThanOrEqualTo(pair.getKey(), date);
            }else {
                predicate = cb.lessThanOrEqualTo(pair.getKey(),value);
            }
        } else if (filter.contains("<")) {
            Pair<Path, String> pair = getPair(filter, "<", root);
            String value = pair.getValue();
            if(pair.getKey().getJavaType() == Date.class){
                Date date = DateUtil.strToDate(pair.getValue());
                predicate = cb.lessThan(pair.getKey(), date);
            }else {
                predicate = cb.lessThan(pair.getKey(),value);
            }
        } else if (filter.contains("=")) {
            Pair<Path, String> pair = getPair(filter, "=", root);
            Set<Object> values = new HashSet<>();
            for (String value : pair.getValue().split(",")) {
                if (pair.getKey().getJavaType().isEnum()) {
                    values.add(Enum.valueOf(pair.getKey().getJavaType(), value));
                } else if (pair.getKey().getJavaType().equals(Boolean.class) ||
                        pair.getKey().getJavaType().equals(Boolean.TYPE)) {
                    values.add(BooleanUtils.toBoolean(value));
                } else if(pair.getKey().getJavaType() == Date.class){
                    Date date = DateUtil.strToDate(pair.getValue());
                    values.add(date);
                }else {
                    values.add(value);
                }
            }
            predicate = pair.getKey().in(values);
        }
        return predicate;
    }
    protected Pair<Path, String> getPair(String filter, String splitter, Root<T> root) {
        String[] tokens = filter.split(splitter);
        return new Pair<>(root.get(tokens[0]), tokens[1]);
    }
}

+ 63 - 0
common/commons-data-mysql/src/main/java/com/yihu/jw/query/UpdatePstCallback.java

@ -0,0 +1,63 @@
package com.yihu.jw.query;
import javafx.util.Pair;
import org.springframework.dao.DataAccessException;
import org.springframework.jdbc.core.PreparedStatementCallback;
import java.lang.reflect.Type;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.util.List;
/**
 * @author lincl
 * @version 1.0
 * @created 2016/5/6
 */
public class UpdatePstCallback implements PreparedStatementCallback<Integer> {
    List<Pair<Type, Object>> values;
    public UpdatePstCallback(List<Pair<Type, Object>> values){
        this.values = values;
    }
    @Override
    public Integer doInPreparedStatement(PreparedStatement preparedStatement) throws SQLException, DataAccessException {
        //设参
        setParams(preparedStatement);
        //执行语句
        preparedStatement.executeUpdate();
        //获取id
        int key = getKey(preparedStatement);
        //关闭
        preparedStatement.close();
        return key;
    }
    private int getKey(PreparedStatement preparedStatement) throws SQLException {
        int autoIncKeyFromApi = -1;
        ResultSet rs = preparedStatement.getGeneratedKeys();
        if (rs.next()) {
            autoIncKeyFromApi = rs.getInt(1);
        }
        rs.close();
        rs = null;
        return autoIncKeyFromApi;
    }
    public PreparedStatement setParams(PreparedStatement pst) throws SQLException {
        int i=1;
        for(Pair<Type, Object> pair : values){
            pst.setObject(i, pair.getValue());
            i++;
        }
        return pst;
    }
}

+ 28 - 0
common/commons-data-mysql/src/test/com/yihu/ehr/query/URLQueryBuilderTest.java

@ -0,0 +1,28 @@
package com.yihu.jw.query;
import org.junit.Test;
/**
 * @author Sand
 * @version 1.0
 * @created 2016.02.18 9:27
 */
public class URLQueryBuilderTest {
    @Test
    public void testToString() throws Exception {
        /*URLQueryBuilder builder = new URLQueryBuilder();
        builder.addField("name")
                .addField("age")
                .addField("address")
                .addFilter("name", "?", "王", null)
                .addFilter("age", ">=", "20", null)
                .addSorter("name", true)
                .addSorter("age", false)
                .setPageNumber(20)
                .setPageSize(10);
        String url = builder.toString();
        Assert.assertTrue("编码失败", StringUtils.isNotEmpty(url));*/
    }
}

+ 63 - 0
common/commons-data-query/pom.xml

@ -0,0 +1,63 @@
<?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.0.0"
         xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
         xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
    <modelVersion>4.0.0</modelVersion>
    <parent>
        <groupId>com.yihu.jw</groupId>
        <artifactId>wlyy-parent-pom</artifactId>
        <version>2.4.0</version>
        <relativePath>../../wlyy-parent-pom/pom.xml</relativePath>
    </parent>
    <artifactId>commons-data-query</artifactId>
    <packaging>jar</packaging>
    <dependencies>
        <!-- true -->
        <dependency>
            <groupId>commons-dbutils</groupId>
            <artifactId>commons-dbutils</artifactId>
            <version>${version.commons-dbutils}</version>
        </dependency>
        <dependency>
            <groupId>org.apache.commons</groupId>
            <artifactId>commons-dbcp2</artifactId>
        </dependency>
        <dependency>
            <groupId>org.springframework</groupId>
            <artifactId>spring-context</artifactId>
        </dependency>
        <dependency>
            <groupId>com.yihu.jw</groupId>
            <artifactId>commons-util</artifactId>
        </dependency>
        <dependency>
            <groupId>com.yihu.jw</groupId>
            <artifactId>commons-data-hbase</artifactId>
        </dependency>
        <dependency>
            <groupId>com.yihu.jw</groupId>
            <artifactId>commons-data-solr</artifactId>
        </dependency>
        <dependency>
            <groupId>org.springframework.data</groupId>
            <artifactId>spring-data-solr</artifactId>
        </dependency>
        <dependency>
            <groupId>com.yihu.jw</groupId>
            <artifactId>mysql-starter</artifactId>
            <version>${version.wlyy-common}</version>
        </dependency>
    </dependencies>
    <build>
        <plugins>
            <plugin>
                <groupId>org.apache.maven.plugins</groupId>
                <artifactId>maven-compiler-plugin</artifactId>
            </plugin>
        </plugins>
    </build>
</project>

+ 24 - 0
common/commons-data-query/src/main/java/com/yihu/jw/query/common/enums/SolrIndexEnum.java

@ -0,0 +1,24 @@
package com.yihu.jw.query.common.enums;
/**
 * Created by janseny on 2018/6/29.
 * store true 的索引
 */
public enum SolrIndexEnum {
    EHR_003905,// 就诊年龄(岁)   主表
    EHR_005013,// 住院年龄			主表
    EHR_000019,// 性别				主表
    EHR_000081,// 门诊科室 		主表
    EHR_000228,// 住院科室			主表
    EHR_000170,// 实际住院天数		主表
    EHR_000109,// 疾病					细表
    EHR_000131,// 药物名称(中药)     细表
    EHR_000175,// 住院费用金额(元)		细表
    EHR_000049,// 费用(门诊费用)		细表
    EHR_000293,// 诊断代   码 -医院	细表
    EHR_000044,//门诊费用分类代码		细表
    EHR_000045, //费用(门诊费用)		细表
    EHR_000051,//费用支付代码(门诊费用)细表
    EHR_000180;//费用支付代码(住院费用)细表
}

+ 83 - 0
common/commons-data-query/src/main/java/com/yihu/jw/query/common/model/SolrGroupEntity.java

@ -0,0 +1,83 @@
package com.yihu.jw.query.common.model;
import java.util.HashMap;
import java.util.LinkedHashMap;
import java.util.Map;
/**
 * solr分组聚合条件
 * Created by hzp on 2015/11/17.
 */
public class SolrGroupEntity {
    // 分组统计类型
    public static enum GroupType {
        FIELD_VALUE, // 按字段值统计
        DATE_RANGE, // 日期范围统计
        NUMERIC_RANGE // 数值范围统计
    }
    private String groupField; // 分组字段名
    private GroupType type; // 分组统计类型
    private Object gap; // 分组间隔,用于范围统计
    private Map<String, String> groupCondition = new LinkedHashMap<>(); // 分组条件
    public SolrGroupEntity(String groupField) {
        this.groupField = groupField;
        this.type = GroupType.FIELD_VALUE;
        this.gap = null;
        this.groupCondition = new HashMap<String, String>();
    }
    public SolrGroupEntity(String groupField, Map<String, String> groupCondition) {
        this.groupField = groupField;
        this.type = GroupType.FIELD_VALUE;
        this.gap = null;
        this.groupCondition = groupCondition;
    }
    public SolrGroupEntity(String groupField, GroupType type) {
        this.groupField = groupField;
        this.type = GroupType.FIELD_VALUE;
        this.gap = null;
    }
    public SolrGroupEntity(String groupField, GroupType type, Object gap) {
        this.groupField = groupField;
        this.type = type;
        this.gap = gap;
    }
    public String getGroupField() {
        return groupField;
    }
    public void setGroupField(String groupField) {
        this.groupField = groupField;
    }
    public GroupType getType() {
        return type;
    }
    public void setType(GroupType type) {
        this.type = type;
    }
    public Object getGap() {
        return gap;
    }
    public void setGap(Object gap) {
        this.gap = gap;
    }
    public Map<String, String> getGroupCondition() {
        return groupCondition;
    }
    public void putGroupCondition(String key, String condition) {
        this.groupCondition.put(key, condition);
    }
}

+ 76 - 0
common/commons-data-query/src/main/java/com/yihu/jw/query/common/model/SolrJoinEntity.java

@ -0,0 +1,76 @@
package com.yihu.jw.query.common.model;
import com.yihu.jw.mysql.enums.Logical;
import com.yihu.jw.mysql.model.QueryCondition;
import java.util.List;
/**
 * Created by hzp on 2015/11/17.
 */
public class SolrJoinEntity {
    private String fromIndex;
    private String fromCol;
    private String toCol;
    private List<QueryCondition> conditions; //过滤条件
    public SolrJoinEntity() {}
    /**
     * 构造函数
     */
    public SolrJoinEntity(String fromIndex, String fromCol, String toCol, List<QueryCondition> conditions) {
        this.fromIndex = fromIndex;
        this.fromCol = fromCol;
        this.toCol = toCol;
        this.conditions = conditions;
    }
    /**
     * 构造函数
     */
    public SolrJoinEntity(String fromCol, String toCol, List<QueryCondition> conditions) {
        this.fromCol = fromCol;
        this.toCol = toCol;
        this.conditions = conditions;
    }
    /**
     *join条件转字符串
     */
    public String toString() {
        if(!fromCol.equals("") && !toCol.equals("")) {
            String re = "{!join ";
            if(fromIndex!=null && !fromIndex.equals("")) {
                re += " fromIndex="+fromIndex;
            }
            re +=" from="+fromCol+" to="+toCol+"}";
            if(conditions!=null && conditions.size() > 0) {
                String conditionString ="";
                for(QueryCondition condition :conditions) {
                    if(!conditionString.equals("")) {
                        switch (condition.getLogical()) {
                            case Logical.AND:
                                conditionString+=" AND ";
                                break;
                            case Logical.OR:
                                conditionString+=" OR ";
                                break;
                            case Logical.NOT:
                                conditionString+=" NOT ";
                                break;
                        }
                    }
                    conditionString+=condition.toString() +" ";
                }
            } else {
                re +="*:* ";
            }
            return re;
        } else{
            return "";
        }
    }
}

+ 401 - 0
common/commons-data-query/src/main/java/com/yihu/jw/query/services/HbaseQuery.java

@ -0,0 +1,401 @@
package com.yihu.jw.query.services;
import com.fasterxml.jackson.databind.JavaType;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.yihu.jw.mysql.enums.Logical;
import com.yihu.jw.mysql.enums.Operation;
import com.yihu.jw.mysql.model.QueryCondition;
import com.yihu.jw.mysql.model.QueryEntity;
import com.yihu.jw.query.common.model.SolrJoinEntity;
import com.yihu.jw.solr.SolrUtil;
import com.yihu.jw.hbase.HBaseDao;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.solr.common.SolrDocument;
import org.apache.solr.common.SolrDocumentList;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.data.domain.Page;
import org.springframework.data.domain.PageImpl;
import org.springframework.data.domain.PageRequest;
import org.springframework.data.domain.Sort;
import org.springframework.stereotype.Service;
import java.util.*;
/**
 * Hase - Solr查询服务
 * add by hzp at 2016-04-26
 */
@Service
public class HbaseQuery {
	@Autowired
	private SolrUtil solr;
	@Autowired
	private HBaseDao hbaseDao;
	/**
	 * 查询solr 索引列表
	 */
	public Page<String> queryIndexBySolr(String table, String q, String sort, String fq, String basicFl, String dFl, int page, int rows) throws Exception{
		long count = 0;
		List<Map<String,Object>> data = new ArrayList<>();
		if(rows < 0) rows = 50;
		if(page <0) page = 1;
		long start= (page-1) * rows;
		Map<String, String> sortMap = getSortMap(sort);
		//Solr查询
		SolrDocumentList solrList = solr.query(table, q, fq, sortMap, start, rows);
		//Hbase查询
		List<String> list = new ArrayList<String>();
		if(solrList!=null && solrList.getNumFound()>0) {
			count = solrList.getNumFound();
			for (SolrDocument doc : solrList){
				String rowkey = String.valueOf(doc.getFieldValue("rowkey"));
				list.add(rowkey);
			}
		}
		return new PageImpl<String>(list,PageRequest.of(page-1, rows), count);
	}
	/**
	 * Keyrow条件查询
	 * @param rowkey
	 * @return
	 */
	public Map<String,Object> queryByRowKey(String table, String rowkey) throws Exception {
		Result rs = hbaseDao.getResult(table, rowkey);
		return resultToMap(rs);
	}
	/**
	 * 根据QueryEntity查询
	 * @return
	 */
	public Page<Map<String,Object>> query(QueryEntity query) throws Exception {
		return queryJoin(query, null);
	}
	/**
	 * 根据QueryEntity & joinsQuery查询,
	 * @return
	 */
	public Page<Map<String,Object>> queryJoinJson(QueryEntity query, String joinsQuery) throws Exception {
		List<SolrJoinEntity> joins = new ArrayList<>();
		if(joinsQuery != null && joinsQuery.length() > 0) {
			ObjectMapper objectMapper = new ObjectMapper();
			if (joinsQuery.startsWith("[") && joinsQuery.endsWith("]")) {
				JavaType javaType = objectMapper.getTypeFactory().constructParametricType(List.class, SolrJoinEntity.class);
				List<SolrJoinEntity> joinList = objectMapper.readValue(joinsQuery, javaType);
				if (joinList!=null && joinList.size()>0) {
					for(SolrJoinEntity join : joinList){
						joins.add(join);
					}
				}
			} else {
				SolrJoinEntity join = objectMapper.readValue(joinsQuery, SolrJoinEntity.class);
				joins.add(join);
			}
		}
		return queryJoin(query, joins);
	}
	/**
	 * 根据Query查询,
	 * @return
	 */
	public Page<Map<String,Object>> queryJoin(QueryEntity query, List<SolrJoinEntity> joins) throws Exception {
		String table = query.getTableName();
		String q = "";
		String fq = "";
		String fl = query.getFields();
		String sort = query.getSort();
		int page = query.getPage();
		int rows = query.getRows();
		List<QueryCondition> conditions = query.getConditions();
		if(joins!=null&&joins.size()>0) {
			fq = conditionToString(conditions);
			for(SolrJoinEntity join :joins){
				q += join.toString();
			}
		}
		else{
			q = conditionToString(conditions);
		}
		return queryBySolr(table, q, sort, fq, fl, "", page, rows);
	}
	/**
	 * json条件查询
	 * @param json{'q':'*:*','fq':'','sort':'','page':1,'row':10}
	 * @return
	 */
	public Page<Map<String,Object>> queryByJson(String table, String json) throws Exception {
		ObjectMapper objectMapper = new ObjectMapper();
		Map<String, String> query = objectMapper.readValue(json, Map.class);
		if(query!=null) {
			String q = query.containsKey("q")?query.get("q").toString():"";
			String fq = query.containsKey("fq")?query.get("fq").toString():"";
			String sort = query.containsKey("sort")?query.get("sort").toString():"";
			int page = query.containsKey("page")?Integer.parseInt(query.get("page").toString()):1;
			int rows = query.containsKey("rows")?Integer.parseInt(query.get("rows").toString()):50;
			return queryBySolr(table, q, sort, fq, page, rows);
		}
		return null;
	}
	/**
	 * 查询
	 */
	public Page<Map<String,Object>> queryBySolr(String table, String q, String sort, int page, int rows) throws Exception {
		return queryBySolr(table,q, sort, "", page, rows);
	}
	/**
	 * 查询
	 */
	public Page<Map<String,Object>> queryBySolr(String table, String q, String sort, String fq, int page, int rows) throws Exception {
		return queryBySolr(table, q, sort, fq, "", "", page, rows);
	}
	/**
	 * 查询
	 */
	public Page<Map<String,Object>> queryBySolr(String table, String q, String sort, String fq, String basicFl, String dFl, int page, int rows) throws Exception{
		long count = 0;
		List<Map<String,Object>> data = new ArrayList<>();
		if (rows < 0) {
			rows = 50;
		}
		if (page <0) {
			page = 1;
		}
		long start= (page-1) * rows;
		Map<String, String> sortMap = getSortMap(sort);
		//Solr查询
		SolrDocumentList solrList = solr.query(table, q, fq, sortMap, start, rows);
		//Hbase查询
		List<String> list = new ArrayList<String>();
		if (solrList != null && solrList.getNumFound() > 0) {
			count = solrList.getNumFound();
			for (SolrDocument doc : solrList){
				String rowkey = String.valueOf(doc.getFieldValue("rowkey"));
				list.add(rowkey);
			}
		}
		Result[] resultList = hbaseDao.getResultList(table, list, basicFl, dFl); //hbase结果集
		if (resultList != null && resultList.length > 0){
			for (Result result : resultList) {
				Map<String, Object> obj = resultToMap(result);
				if (obj != null) {
					data.add(obj);
				}
			}
		}
		return new PageImpl<>(data, PageRequest.of(page - 1, rows, Sort.by(sort)), count);
	}
	/**
	 * 排序转换
	 * @param sort(格式1:{"field1":"asc","field2":"desc"}  格式2:field1 asc,field2 desc)
	 * @return
	 */
	private Map<String,String> getSortMap(String sort){
		if(sort!=null && sort.length()>0) {
			Map<String,String> sortMap = new HashMap<String, String>();
			try{
				//json数据
				ObjectMapper objectMapper = new ObjectMapper();
				Map<String, String> obj = objectMapper.readValue(sort, Map.class);
				if(obj!=null){
					Set<String> set = obj.keySet();
					for (Iterator<String> it = set.iterator();it.hasNext();) {
						String field = it.next();
						String value = obj.get(field);
						sortMap.put(field, value);
					}
				}
			} catch (Exception e) {
				String[] items = sort.split(",");
				if(items.length>0) {
					for(String str : items) {
						String[] sortItem = sort.split(" ");
						sortMap.put(sortItem[0], sortItem[1] != null ? sortItem[1] : "asc");
					}
				}
			}
			return sortMap;
		}
		else
			return null;
	}
	/**
	 * Result 转 JSON
	 * @return
	 */
	public Map<String,Object> resultToMap(Result result){
		String rowkey = Bytes.toString(result.getRow());
		if (rowkey != null && rowkey.length() >  0) {
			Map<String,Object> obj = new HashMap<>();
			obj.put("rowkey", rowkey);
			for(Cell cell : result.rawCells()) {
				String fieldName = Bytes.toString(CellUtil.cloneQualifier(cell));
				String fieldValue = Bytes.toString(CellUtil. cloneValue(cell));
				obj.put(fieldName, fieldValue);
			}
			return obj;
		}
		else
			return null;
	}
	/**
	 * 条件列表转字符串
	 * @param conditions
	 * @return
	 */
	private String conditionToString(List<QueryCondition> conditions) {
		String re ="";
		if(conditions!=null && conditions.size()>0) {
			for(QueryCondition condition:conditions){
				if(!re.equals("")) {
					switch (condition.getLogical()) {
						case Logical.AND:
							re += " AND ";
							break;
						case Logical.OR:
							re += " OR ";
							break;
						case Logical.NOT:
							re += " NOT ";
							break;
					}
				}
				re += qcToString(condition);
			}
		}
		else {
			re = "*:*";
		}
		return re;
	}
	/**
	 * 条件转字符串
	 * @return
	 */
	private String qcToString(QueryCondition qc){
		String s = "";
		String field = qc.getField();
		Object keyword = qc.getKeyword();
		Object[] keywords = qc.getKeywords();
		switch(qc.getOperation()){
			case Operation.LIKE:
				s = field + ":*" + keyword + "*";
				break;
			case Operation.LEFTLIKE:
				s = field + ":*" + keyword + "";
				break;
			case Operation.RIGHTLIKE:
				s = field + ":" + keyword + "*";
				break;
			case Operation.RANGE: {
				if(keywords.length==2) {
					s = field + ":[" +  keywords[0] + " TO " + keywords[1] + "]";
				}
				else if(keywords.length==1) {
					s = field + ":[" +  keywords[0] + " TO *]";
				}
				else if(keyword!=null&&!keyword.equals("")) {
					s = field + ":[" +  keyword + " TO *]";
				}
				break;
			}
			case Operation.IN: {
				String in = "";
				if(keywords!=null && keywords.length>0) {
					for (Object key : keywords) {
						if(in!=null&&in.length()>0) {
							in += " OR " + field + ":" + key;
						}
						else {
							in = field + ":" + key;
						}
					}
				}
				s = "(" + in + ")";
				break;
			}
			default:
				s = field + ":\"" + keyword + "\"";
		}
		return s;
	}
	/************************* 全文检索 ******************************************************/
	/**
	 * 全文检索
	 * fields 检索和返回字段,逗号分隔
	 * query 空格分隔
	 */
	public Page<Map<String,Object>> getLucene(String table, String fields, String query, String sort, int page, int rows) throws Exception{
		String[] queryList = query.split(" ");
		String[] fieldList = fields.split(",");
		String q = "";
		for(String fieldName :fieldList) {
			for(String item :queryList) {
				if (q.length() == 0) {
					q = fieldName + ":" + item;
				} else {
					q += " OR " + fieldName + ":" + item;
				}
			}
		}
		return queryBySolr(table,q, sort,"", fields, "", page, rows);
	}
	/**
	 * 查询 queryByScan
	 */
	public Page<Map<String,Object>> queryByScan(String table, String rowKey, String sort, String basicFl, String dFl, int page, int rows) throws Exception{
		long count = 0;
		List<Map<String,Object>> data = new ArrayList<>();
		if (rows < 0) {
			rows = 50;
		}
		if (page <0) {
			page = 1;
		}
		long start= (page-1) * rows;
		Map<String, String> sortMap = getSortMap(sort);
		//Scan查询
		String legacyRowKeys[] = hbaseDao.findRowKeys(table, rowKey, rowKey.substring(0, rowKey.length() - 1) + "1", "^" + rowKey);
		List list = Arrays.asList(legacyRowKeys);
		Result[] resultList = hbaseDao.getResultList(table, list, basicFl, dFl); //hbase结果集
		if (resultList != null && resultList.length > 0){
			for (Result result : resultList) {
				Map<String, Object> obj = resultToMap(result);
				if (obj != null) {
					data.add(obj);
				}
			}
		}
		return new PageImpl<>(data, PageRequest.of(page - 1, rows), count);
	}
}

+ 1164 - 0
common/commons-data-query/src/main/java/com/yihu/jw/query/services/SolrQuery.java

@ -0,0 +1,1164 @@
package com.yihu.jw.query.services;
import com.yihu.jw.mysql.enums.Logical;
import com.yihu.jw.mysql.enums.Operation;
import com.yihu.jw.mysql.model.QueryCondition;
import com.yihu.jw.query.common.enums.SolrIndexEnum;
import com.yihu.jw.query.common.model.SolrGroupEntity;
import com.yihu.jw.solr.SolrUtil;
import com.yihu.jw.hbase.HBaseDao;
import org.apache.commons.lang3.StringUtils;
import org.apache.commons.lang3.time.DateUtils;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.solr.client.solrj.response.*;
import org.apache.solr.common.SolrDocument;
import org.apache.solr.common.SolrDocumentList;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.data.domain.Page;
import org.springframework.data.domain.PageImpl;
import org.springframework.data.domain.PageRequest;
import org.springframework.stereotype.Service;
import java.text.DecimalFormat;
import java.util.*;
/**
 * Solr统计查询服务
 * add by hzp at 2016-04-26
 */
@Service
public class SolrQuery {
    // 基础指标抽取时用
    private String startTime; // 查找的开始日期
    private String endTime; // 查找的截止日期
    @Autowired
    private SolrUtil solrUtil;
    @Autowired
    private HBaseDao hBaseDao;
    public void initParams(String startTime, String endTime) {
        // 初始执行指标,起止日期没有值,默认分组聚合前50年的。
        this.startTime = startTime == null ? "NOW/YEAR-50YEAR" : startTime;
        this.endTime = endTime == null ? "NOW" : endTime;
    }
    //region solr语法转换
    /**
     * solr语法转换
     */
    public String conditionToString(QueryCondition condition) {
        String s = "";
        String operation = condition.getOperation();
        String field = condition.getField();
        Object keyword = condition.getKeyword();
        Object[] keywords = condition.getKeywords();
        switch (operation) {
            case Operation.LIKE:
                s = field + ":*" + keyword + "*";
                break;
            case Operation.LEFTLIKE:
                s = field + ":*" + keyword + "";
                break;
            case Operation.RIGHTLIKE:
                s = field + ":" + keyword + "*";
                break;
            case Operation.RANGE: {
                if (keywords.length >= 2) {
                    s = field + ":[" + keywords[0] + " TO " + keywords[1] + "]";
                }
                break;
            }
            case Operation.NOTRANGE: {
                if (keywords.length >= 2) {
                    s = "NOT " + field + ":[" + keywords[0] + " TO " + keywords[1] + "]";
                }
                break;
            }
            case Operation.NE: {
                s = "NOT(" + field + ":" + keyword + ")";
                break;
            }
            case Operation.IN: {
                String in = "";
                if (keywords != null && keywords.length > 0) {
                    for (Object key : keywords) {
                        if (in != null && in.length() > 0) {
                            in += " OR " + field + ":" + key;
                        } else {
                            in = field + ":" + key;
                        }
                    }
                } else if (keyword != null) {
                    in = field + ":" + keyword;
                }
                s = "( " + in + " )";
                break;
            }
            case Operation.NIN: {
                String in = "";
                if (keywords != null && keywords.length > 0) {
                    for (Object key : keywords) {
                        if (in != null && in.length() > 0) {
                            in += " OR " + field + ":" + key;
                        } else {
                            in = field + ":" + key;
                        }
                    }
                } else if (keyword != null) {
                    in = field + ":" + keyword;
                }
                s = "NOT (" + in + ")";
                break;
            }
            case Operation.GT: {
                s = field + ":{" + keyword + " TO *}";
                break;
            }
            case Operation.GTE: {
                s = field + ":[" + keyword + " TO * ]";
                break;
            }
            case Operation.LT: {
                s = field + ":" + "{* TO " + keyword + "}";
                break;
            }
            case Operation.LTE: {
                s = field + ":" + "[* TO " + keyword + "]";
                break;
            }
            case Operation.EQ: {
                s = field + ":" + keyword;
                break;
            }
            default:
                s = "unknown operation";
        }
        return s;
    }
    /**
     * solr语法转换
     */
    public String conditionToString(List<QueryCondition> conditions) {
        String re = "";
        String NOT = "";
        if (conditions != null && conditions.size() > 0) {
            for (QueryCondition condition : conditions) {
                if (!re.equals("") || !NOT.equals("")) {
                    switch (condition.getLogical()) {
                        case Logical.AND:
                            re += " AND ";
                            break;
                        case Logical.OR:
                            re += " OR  ";
                            break;
                        case Logical.NOT:
                            re += " NOT ";
                            break;
                    }
                    if (conditionToString(condition).indexOf("NOT") == 0) {
                        if (NOT.equals("")) {
                            NOT = " NOT(" + conditionToString(condition).substring(3);
                            re = re.substring(0, re.length() - 5);
                        } else {
                            NOT += re.substring(re.length() - 5) + conditionToString(condition).substring(3, conditionToString(condition).length());
                            re = re.substring(0, re.length() - 5);
                        }
                    } else {
                        re += conditionToString(condition);
                        re = "(" + re + ")";
                    }
                } else {
                    re += conditionToString(condition);
                    if (conditionToString(condition).indexOf("NOT") == 0) {
                        NOT = "NOT(" + re.substring(3);
                        re = "";
                    }
                }
            }
        } else {
            re = "*:* ";
        }
        if (NOT.equals(""))
            return re;
        else
            return re + NOT + ")";
    }
    //endregion solr语法转换
    //region 获取指定字段的查询集合,可选择去重查询
    /**
     * 获取总条数
     *
     * @param table
     * @param q
     * @param fq
     * @return
     */
    public long count(String table, String q , String fq) throws Exception {
        return solrUtil.count(table, q,fq);
    }
    /**
     * 获取指定返回字段的查询集合
     *
     * @param q      查询字符串
     * @param fq     过滤查询
     * @param sort   排序
     * @param start  查询起始行
     * @param rows   查询行数
     * @param fields 返回字段
     * @return
     */
    public List<Map<String, Object>> queryReturnFieldList(String tableName,
                                                          String q,
                                                          String fq,
                                                          Map<String, String> sort,
                                                          long start,
                                                          long rows,
                                                          String[] fields) throws Exception {
        List<Map<String, Object>> data = new ArrayList<>();
        List<String> falseStoreList = new ArrayList<>();
        Map<String, Integer> falseStoreIndexMap = new HashMap<>();
        Map<String, String> SolrIndexEnumMap = new HashMap<>();
        for (SolrIndexEnum solrIndex : SolrIndexEnum.values()) {
            SolrIndexEnumMap.put(solrIndex.toString(), solrIndex.toString());
        }
        SolrDocumentList solrDocList = solrUtil.query(tableName, q, fq, sort, start, rows, fields);
        if (solrDocList != null && solrDocList.getNumFound() > 0) {
            for (SolrDocument doc : solrDocList) {
                Map<String, Object> map = new HashMap<>();
                // 从solr查询结果中,给返回字段赋值,并判断标记该字段是否在solr中配置为store为false的。
                putFieldValueFromSolr(data, map, fields, doc, falseStoreIndexMap);
            }
        }
        // 返回字段在solr结果中没有存储值,则从HBase中获取赋值
        fillFieldValueFromHbase(data, falseStoreIndexMap, solrDocList.size(), tableName);
        return data;
    }
    /**
     * 去重查询指定返回字段的集合
     *
     * @param q                查询字符串
     * @param fq               过滤查询
     * @param sort             排序
     * @param start            查询起始行
     * @param rows             查询行数
     * @param fields           返回字段
     * @param groupField       分组去重字段
     * @param groupSort        组内排序字段
     * @param groupNullIsolate 组内空值记录是保存第一条,还是每条记录单独保存
     * @return
     */
    public List<Map<String, Object>> distinctQueryReturnFieldList(String tableName,
                                                                  String q,
                                                                  String fq,
                                                                  Map<String, String> sort,
                                                                  int start,
                                                                  int rows,
                                                                  String[] fields,
                                                                  String groupField,
                                                                  String groupSort,
                                                                  boolean groupNullIsolate) throws Exception {
        List<Map<String, Object>> data = new ArrayList<>();
        List<String> falseStoreList = new ArrayList<>();
        Map<String, Integer> falseStoreIndexMap = new HashMap<>();
        Map<String, String> SolrIndexEnumMap = new HashMap<>();
        for (SolrIndexEnum solrIndex : SolrIndexEnum.values()) {
            SolrIndexEnumMap.put(solrIndex.toString(), solrIndex.toString());
        }
        List<Group> groupList = solrUtil.queryDistinctOneField(tableName, q, fq, sort, start, rows, fields, groupField, groupSort);
        int solrDocListSize = 0;
        for (Group group : groupList) {
            SolrDocument firstDoc = group.getResult().get(0);
            String fieldValue = firstDoc.getFieldValue(groupField) == null ? "null" : firstDoc.getFieldValue(groupField).toString();
            int i = 1;
            // 该组为空值的话,判断每条空值记录是单独保存,还是合并为一条保存。
            SolrDocumentList nullDocList = new SolrDocumentList();
            if (StringUtils.isEmpty(group.getGroupValue()) && groupNullIsolate) {
                String null_fq = fq + " AND -" + groupField + ":*";
                long count = solrUtil.count(tableName, q, null_fq);
                nullDocList = solrUtil.query(tableName, q, null_fq, null, 0, count, fields);
                i = nullDocList.size();
            }
            while (i != 0) {
                Map<String, Object> map = new HashMap<>();
                SolrDocument doc;
                if (nullDocList.getNumFound() == 0) {
                    doc = firstDoc;
                } else {
                    doc = nullDocList.get(i-1);
                }
                map.put("distinctField", groupField);
                map.put("distinctFieldValue", fieldValue);
                // 从solr查询结果中,给返回字段赋值,并判断标记该字段是否在solr中配置为store为false的。
                putFieldValueFromSolr(data, map, fields, doc, falseStoreIndexMap);
                solrDocListSize++;
                i--;
            }
        }
        // 返回字段在solr结果中没有存储值,则从HBase中获取赋值
        fillFieldValueFromHbase(data, falseStoreIndexMap, solrDocListSize, tableName);
        return data;
    }
    // 从solr查询结果中,给返回字段赋值,并判断标记该字段是否在solr中配置为store为false的。
    private void putFieldValueFromSolr(List<Map<String, Object>> data, Map<String, Object> itemMap, String[] fields, SolrDocument doc, Map<String, Integer> falseStoreIndexMap) {
        if (fields != null && fields.length > 0) {
            for (String key : fields) {
                if (key.equals("event_date")) {
                    //如果是时间结果,则被加了八个小时
                    itemMap.put(key, DateUtils.addHours((Date) doc.getFieldValue(key), -8));
                } else {
                    if (doc.getFieldValue(key) != null) {
                        itemMap.put(key, doc.getFieldValue(key));
                    } else {
                        if (falseStoreIndexMap.containsKey(key)) {//&& !SolrIndexEnumMap.containsKey(key) 后续改为配置
                            int n = falseStoreIndexMap.get(key);
                            n++;
                            falseStoreIndexMap.put(key, n);
                        } else {
                            falseStoreIndexMap.put(key, 1);
                        }
                    }
                }
            }
        }
        data.add(itemMap);
    }
    // 返回字段在solr结果中没有存储值,则从HBase中获取赋值
    private void fillFieldValueFromHbase(List<Map<String, Object>> data, Map<String, Integer> falseStoreIndexMap, int solrDocListSize, String tableName) {
        List<String> falseStoreList = new ArrayList<>();
        boolean isStored = true;
        for (String key : falseStoreIndexMap.keySet()) {
            if (key.contains("EHR_") && falseStoreIndexMap.get(key) == solrDocListSize) {
                isStored = false;
                falseStoreList.add(key);
            }
        }
        //solr 未被存储的数据需要从hbase中查询得到原始数据
        if (!isStored) {
            //多线程方式 数据控制在 20000 以内
            List<String> rowkeyList = new ArrayList<>();
            if (data.size() > 0) {
                for (Map<String, Object> keyMap : data) {
                    if (keyMap.get("rowkey") != null) {
                        rowkeyList.add(keyMap.get("rowkey").toString());
                    }
                }
                for (String storeKey : falseStoreList) {
                    Result[] hbaseResult = hBaseDao.getResultList(tableName, rowkeyList, null, storeKey);
                    if (hbaseResult != null) {
                        for (Map<String, Object> keyMap : data) {
                            for (Result result : hbaseResult) {
                                String rowkey = Bytes.toString(result.getRow());
                                if (keyMap.get("rowkey").toString().equals(rowkey)) {
                                    String fieldValue = "";
                                    for (Cell cell : result.rawCells()) {
                                        fieldValue = Bytes.toString(CellUtil.cloneValue(cell));
                                    }
                                    keyMap.put(storeKey, fieldValue);
                                }
                            }
                        }
                    }
                }
            }
        }
    }
    /**
     * 单分组统计Count
     *
     * @return
     */
    public Page<Map<String, Object>> getGroupCount(String table, String groupField) throws Exception {
        return getGroupCount(table, groupField, "", "", 1, 1000);
    }
    /**
     * 单分组统计Count(分页)
     */
    public Page<Map<String, Object>> getGroupCount(String table,
                                                   String groupField,
                                                   String q,
                                                   String fq,
                                                   int page,
                                                   int rows) throws Exception {
        List<Map<String, Object>> data = new ArrayList<>();
        if (rows < 0) rows = 10;
        if (rows > 100) rows = 100;
        if (page < 0) page = 1;
        int start = (page - 1) * rows;
        /***** Solr查询 ********/
        Map<String, Long> list = solrUtil.groupCount(table, q, fq, groupField, start, rows);
        if (list != null && list.size() > 0) {
            for (Map.Entry<String, Long> item : list.entrySet()) {
                Map<String, Object> obj = new HashMap<>();
                obj.put(groupField, item.getKey());
                obj.put("$count", item.getValue());
                data.add(obj);
            }
        }
        return new PageImpl<Map<String, Object>>(data, PageRequest.of(page - 1, rows), data.size());
    }
    /**
     * 递归转换统计数据
     */
    private List<Map<String, Object>> pivotToMapList(List<PivotField> pivotList,
                                                     List<Map<String, Object>> data,
                                                     Map<String, Object> pre) {
        if (data == null) {
            data = new ArrayList<>();
        }
        if (pivotList != null) {
            for (PivotField pivot : pivotList) {
                String field = pivot.getField();
                String value = pivot.getValue().toString();
                Map<String, Object> newRow = new HashMap<>();
                if (pre != null) {
                    newRow.putAll(pre); //深度复制
                }
                newRow.put(field, value);
                //递归获取子数据
                if (pivot.getPivot() != null && pivot.getPivot().size() > 0) {
                    data = pivotToMapList(pivot.getPivot(), data, newRow);
                } else {
                    int count = pivot.getCount();
                    newRow.put("$count", count);
                    data.add(newRow);
                }
            }
        }
        return data;
    }
    /**
     * 多级分组统计Count(不包含自定义分组)
     */
    public Page<Map<String, Object>> getGroupMult(String table,
                                                  String groupFields,
                                                  String q,
                                                  String fq,
                                                  int page,
                                                  int rows) throws Exception {
        List<PivotField> listPivot = solrUtil.groupCountMult(table, q, fq, groupFields, page, rows);
        return new PageImpl<Map<String, Object>>(pivotToMapList(listPivot, null, null), PageRequest.of(page - 1, rows), listPivot.size());
    }
    /**
     * 纯自定义分组递归
     */
    private List<Map<String, Object>> recGroupCount(String table,
                                                    List<SolrGroupEntity> grouplist,
                                                    int num,
                                                    List<Map<String, Object>> data,
                                                    Map<String, Object> pre,
                                                    String q,
                                                    String fq) throws Exception {
        if (data == null) {
            data = new ArrayList<>();
        }
        String groupField = grouplist.get(num).getGroupField();
        Map<String, String> list = grouplist.get(num).getGroupCondition();
        for (String key : list.keySet()) {
            String condition = list.get(key);
            String query = fq;
            if (query != null && query.length() > 0) {
                query += " AND " + condition;
            } else {
                query = condition;
            }
            Map<String, Object> newRow = new HashMap<>();
            if (pre != null) {
                newRow.putAll(pre); //深度复制
            }
            newRow.put(groupField, key);
            //最后一级查询
            if (num == grouplist.size() - 1) {
                long count = solrUtil.count(table, query);
                newRow.put("$count", count);
                data.add(newRow);
            } else {
                data = recGroupCount(table, grouplist, num + 1, data, newRow, q, query);
            }
        }
        return data;
    }
    /**
     * 多级分组统计Count(包含自定义分组)
     */
    public Page<Map<String, Object>> getGroupMult(String table,
                                                  String groupFields,
                                                  List<SolrGroupEntity> customGroup,
                                                  String q,
                                                  String fq) throws Exception {
        List<Map<String, Object>> data = null;
        if (groupFields != null && groupFields.length() > 0) {
            String[] groups = groupFields.split(",");
            List<SolrGroupEntity> grouplist = new ArrayList<>();
            if (customGroup != null && customGroup.size() > 0) {
                grouplist = customGroup;
            }
            //遍历字段分组
            List<FacetField> facets = solrUtil.groupCount(table, q, fq, groups);
            for (FacetField facet : facets) {
                String groupName = facet.getName();
                SolrGroupEntity group = new SolrGroupEntity(groupName);
                List<FacetField.Count> counts = facet.getValues();
                for (FacetField.Count count : counts) {
                    String value = count.getName();
                    group.putGroupCondition(value, groupName + ":" + value);
                }
                grouplist.add(group);
            }
            data = recGroupCount(table, grouplist, 0, null, q, fq);
        } else { // 纯自定义分组
            if (customGroup != null && customGroup.size() > 0) {
                data = recGroupCount(table, customGroup, 0, null, null, null);
            }
        }
        return new PageImpl<Map<String, Object>>(data);
    }
    /**
     * 多级分组 Count 统计(包含自定义分组)
     * <p>
     * TODO
     * 涉及时间维度聚合统计,目前是按天间隔统计的写法,需要扩展按年、月等间隔统计时,需要定制分支。
     * 具体需要扩展的地方查看 joinAggregationCondition()、finalCount() 方法中备注。
     * -- 张进军 2018.1.26
     *
     * @param core               core名
     * @param q                  查询条件
     * @param fq                 筛选条件
     * @param dimensionGroupList 分组字段
     * @param customGroups       额外自定义分组
     */
    public List<Map<String, Object>> getCountMultList(String core,
                                                      String q,
                                                      String fq,
                                                      List<SolrGroupEntity> dimensionGroupList,
                                                      List<SolrGroupEntity> customGroups) throws Exception {
        // 维度字段及最后一个维度基于其他维度组合作为条件的统计结果的集合
        List<Map<String, Object>> resultCounts = new ArrayList<>();
        if (dimensionGroupList.size() > 0) {
            List<SolrGroupEntity> groupList = new ArrayList<>();
            if (customGroups != null && customGroups.size() > 0) {
                groupList = customGroups;
            }
            // 收集根据指标维度分组聚合的条件
            groupList.addAll(joinAggregationCondition(core, q, fq, dimensionGroupList));
            resultCounts = recGroupCount(core, groupList, 0, null, q, fq);
        } else { // 纯自定义分组
            if (customGroups != null && customGroups.size() > 0) {
                resultCounts = recGroupCount(core, customGroups, 0, null, null, null);
            }
        }
        return resultCounts;
    }
    /**
     * 递归 Count 统计(混合)
     */
    private List<Map<String, Object>> recGroupCount(String core,
                                                    List<SolrGroupEntity> groupList,
                                                    int num,
                                                    List<Map<String, Object>> preList,
                                                    String q,
                                                    String fq) throws Exception {
        // 维度字段、维度组合Key及统计结果
        List<Map<String, Object>> resultList = new ArrayList<>();
        String conditionName = "$condition"; // 拼接最后一个维度分组聚合统计的过滤条件
        String statisticsKeyName = "$statisticsKey"; // 拼接最后一个维度分组聚合统计值对应的唯一健
        if (num == groupList.size() - 1) {
            SolrGroupEntity groupEntity = groupList.get(num);  // 最后一个维度
            String groupField = groupEntity.getGroupField();
            Map<String, String> groupConditionMap = groupEntity.getGroupCondition();
            if (preList != null && preList.size() > 0) {
                // 遍历前 N-1 维度组合作为筛选条件
                for (Map<String, Object> preObj : preList) {
                    String currFq = preObj.get(conditionName).toString();
                    if (StringUtils.isNotEmpty(fq) && !fq.equals("*:*")) {
                        currFq += " AND " + fq;
                    }
                    // 对最后一个维度统计
                    Map<String, Long> countMap = finalCount(core, q, currFq, groupEntity);
                    if (countMap.size() > 0) {
                        for (String key : countMap.keySet()) {
                            Map<String, Object> obj = new LinkedHashMap<>();
                            obj.putAll(preObj); // 深拷贝
                            obj.put(groupField, key);
                            String statisticsKey = preObj.get(statisticsKeyName).toString() + "-" + key;
                            obj.put(statisticsKeyName, statisticsKey);
                            obj.put("$result", countMap.get(key)); // 统计值
                            obj.remove(conditionName);
                            resultList.add(obj);
                        }
                    }
                }
            } else {  // 只有一个维度分组统计场合
                for (Map.Entry<String, String> item : groupConditionMap.entrySet()) {
                    String currFq = item.getValue();
                    if (StringUtils.isNotEmpty(fq) && !fq.equals("*:*")) {
                        currFq += " AND " + fq;
                    }
                    Map<String, Long> countMap = finalCount(core, q, currFq, groupEntity);
                    if (countMap.size() > 0) {
                        for (String key : countMap.keySet()) {
                            Map<String, Object> obj = new LinkedHashMap<>();
                            obj.put(groupField, key);
                            obj.put(statisticsKeyName, key);
                            obj.put("$result", countMap.get(key)); // 统计值
                            resultList.add(obj);
                        }
                    }
                }
            }
            return resultList;
        } else {
            List<Map<String, Object>> list = new ArrayList<>();//返回集合
            SolrGroupEntity group = groupList.get(num); //当前分组
            Map<String, String> groupMap = group.getGroupCondition(); //当前分组项
            String groupField = group.getGroupField();
            if (preList != null) {
                //遍历上级递归数据
                for (Map<String, Object> preObj : preList) {
                    //遍历当前分组数据
                    for (Map.Entry<String, String> item : groupMap.entrySet()) {
                        Map<String, Object> obj = new LinkedHashMap<>();
                        obj.putAll(preObj); // 深拷贝
                        obj.put(groupField, item.getKey());
                        String condition = preObj.get(conditionName).toString() + " AND " + item.getValue();
                        obj.put(conditionName, condition);
                        String statisticsKey = preObj.get(statisticsKeyName).toString() + "-" + item.getKey();
                        obj.put(statisticsKeyName, statisticsKey);
                        list.add(obj);
                    }
                }
            } else { //第一次遍历
                for (Map.Entry<String, String> item : groupMap.entrySet()) {
                    Map<String, Object> obj = new HashMap<>();
                    obj.put(groupField, item.getKey());
                    obj.put(conditionName, item.getValue());
                    obj.put(statisticsKeyName, item.getKey());
                    list.add(obj);
                }
            }
            return recGroupCount(core, groupList, num + 1, list, q, fq);
        }
    }
    /**
     * 对最后一个维度进行 count 统计
     *
     * @param core        core名
     * @param q           查询条件
     * @param fq          筛选条件
     * @param groupEntity 分组信息
     */
    private Map<String, Long> finalCount(String core,
                                         String q,
                                         String fq,
                                         SolrGroupEntity groupEntity) throws Exception {
        Map<String, Long> countMap = new HashMap<>();
        SolrGroupEntity.GroupType groupType = groupEntity.getType();
        String groupField = groupEntity.getGroupField();
        Object gap = groupEntity.getGap();
        if (groupType.equals(SolrGroupEntity.GroupType.DATE_RANGE)) {
            // 按日期范围统计
            List<RangeFacet> rangeFacets = solrUtil.getFacetDateRange(core, groupField, startTime, endTime, gap.toString(), fq, q);
            for (RangeFacet rangeFacet : rangeFacets) {
                List<RangeFacet.Count> countList = rangeFacet.getCounts();
                for (RangeFacet.Count count : countList) {
                    if (count.getCount() > 0) {
                        // TODO 目前是按天间隔统计的写法,需要扩展按年、月等间隔统计时,需要定制分支。 -- 张进军 2018.1.26
                        String key = count.getValue().substring(0, 10);
                        countMap.put(key, (long) count.getCount());
                    }
                }
            }
        } else {
            // 按字段值统计
            countMap = solrUtil.groupCount(core, q, fq, groupField, 0, -1);
        }
        return countMap;
    }
    //endregion Count 统计
    //region 多级数值统计
    /**
     * 多级数值统计
     *
     * @param core        core名
     * @param groupFields 分组字段
     * @param statsFields 统计字段,多个则逗号分隔
     */
    public Page<Map<String, Object>> getStats(String core,
                                              String groupFields,
                                              String statsFields) throws Exception {
        return getStats(core, groupFields, statsFields, "", "", null);
    }
    /**
     * 多级数值统计
     *
     * @param core        core名
     * @param groupFields 分组字段
     * @param statsFields 统计字段,多个则逗号分隔
     * @param q           查询条件
     * @param fq          筛选条件
     */
    public Page<Map<String, Object>> getStats(String core,
                                              String groupFields,
                                              String statsFields,
                                              String q,
                                              String fq) throws Exception {
        return getStats(core, groupFields, statsFields, q, fq, null);
    }
    /**
     * 多级数值统计
     *
     * @param core        core名
     * @param groupFields 分组字段
     * @param statsFields 统计字段,多个则逗号分隔
     * @param q           查询条件
     * @param fq          筛选条件
     * @param customGroup 额外自定义分组
     */
    public Page<Map<String, Object>> getStats(String core,
                                              String groupFields,
                                              String statsFields,
                                              String q,
                                              String fq,
                                              List<SolrGroupEntity> customGroup) throws Exception {
        String[] groups = groupFields.split(",");
        String[] stats = statsFields.split(",");
        List<Map<String, Object>> data = null;
        if (groups != null && groups.length > 0) {
            List<SolrGroupEntity> grouplist = new ArrayList<>();
            if (customGroup != null && customGroup.size() > 0) {
                grouplist = customGroup;
            }
            //遍历字段分组
            List<FacetField> facets = solrUtil.groupCount(core, q, fq, groups);
            for (FacetField facet : facets) {
                String groupName = facet.getName();
                SolrGroupEntity group = new SolrGroupEntity(groupName);
                List<FacetField.Count> counts = facet.getValues();
                for (FacetField.Count count : counts) {
                    String value = count.getName();
                    group.putGroupCondition(value, groupName + ":" + value);
                }
                grouplist.add(group);
            }
            data = recStats(core, stats, grouplist, q, fq, 0, null);
        } else {
            //全部自定义条件
            System.out.print("All custom condition!");
        }
        return new PageImpl<Map<String, Object>>(data);
    }
    /**
     * 递归数值统计
     */
    private List<Map<String, Object>> recStats(String core,
                                               String[] statsFields,
                                               List<SolrGroupEntity> grouplist,
                                               String q,
                                               String fq,
                                               int num,
                                               List<Map<String, Object>> preList) throws Exception {
        String conditionName = "$condition";
        if (num == grouplist.size() - 1) {
            List<Map<String, Object>> list = new ArrayList<>();//返回集合
            SolrGroupEntity group = grouplist.get(num);
            Map<String, String> groupMap = group.getGroupCondition(); //当前分组项
            String groupName = group.getGroupField();
            DecimalFormat df = new DecimalFormat("#.00");
            if (preList != null && preList.size() > 0) {
                for (Map<String, Object> preObj : preList) {
                    String query = preObj.get(conditionName).toString();
                    if ((fq != null && !fq.equals("")) && (query != null && !query.equals(""))) {
                        query = fq + " AND " + query;
                    } else {
                        query = fq + query;
                    }
                    //根据条件最后一级数值统计
                    Map<String, List<FieldStatsInfo>> statsMap = new HashMap<>();
                    //所有统计字段
                    for (String field : statsFields) {
                        List<FieldStatsInfo> statsList = solrUtil.getStats(core, q, query, field, groupName);
                        statsMap.put(field, statsList);
                    }
                    if (statsMap != null && statsMap.size() > 0) {
                        List<FieldStatsInfo> statsFirst = statsMap.get(statsFields[0]);
                        if (statsFirst != null) {
                            for (int i = 0; i < statsFirst.size(); i++) {
                                String groupItem = statsFirst.get(i).getName();
                                Map<String, Object> obj = new HashMap<>();
                                obj.putAll(preObj); //深拷贝
                                obj.put(groupName, groupItem == null ? "" : groupItem);
                                obj.remove(conditionName);
                                for (String field : statsFields) {
                                    List<FieldStatsInfo> statsList = statsMap.get(field);
                                    FieldStatsInfo item = statsList.get(i);
                                    obj.put("$count_" + field, item.getCount());
                                    obj.put("$sum_" + field, df.format(item.getSum()));
                                    obj.put("$avg_" + field, df.format(item.getMean()));
                                    obj.put("$max_" + field, df.format(item.getMax()));
                                    obj.put("$min_" + field, df.format(item.getMin()));
                                }
                                list.add(obj);
                            }
                        }
                    }
                }
            } else {
                //最后一级数值统计
                Map<String, List<FieldStatsInfo>> statsMap = new HashMap<>();
                //所有统计字段
                for (String field : statsFields) {
                    List<FieldStatsInfo> statsList = solrUtil.getStats(core, q, fq, field, groupName);
                    statsMap.put(field, statsList);
                }
                if (statsMap != null && statsMap.size() > 0) {
                    List<FieldStatsInfo> statsFirst = statsMap.get(statsFields[0]);
                    for (int i = 0; i < statsFirst.size(); i++) {
                        String groupItem = statsFirst.get(i).getName();
                        Map<String, Object> obj = new HashMap<>();
                        obj.put(groupName, groupItem == null ? "" : groupItem);
                        obj.remove(conditionName);
                        for (String field : statsFields) {
                            List<FieldStatsInfo> statsList = statsMap.get(field);
                            FieldStatsInfo item = statsList.get(i);
                            obj.put("$count_" + field, item.getCount());
                            obj.put("$sum_" + field, df.format(item.getSum()));
                            obj.put("$avg_" + field, df.format(item.getMean()));
                            obj.put("$max_" + field, df.format(item.getMax()));
                            obj.put("$min_" + field, df.format(item.getMin()));
                        }
                        list.add(obj);
                    }
                }
            }
            return list;
        } else {
            List<Map<String, Object>> list = new ArrayList<>();//返回集合
            SolrGroupEntity group = grouplist.get(num); //当前分组
            Map<String, String> groupMap = group.getGroupCondition(); //当前分组项
            String groupName = group.getGroupField();
            if (preList != null) {
                //遍历上级递归数据
                for (Map<String, Object> preObj : preList) {
                    //遍历当前分组数据
                    for (Map.Entry<String, String> item : groupMap.entrySet()) {
                        Map<String, Object> obj = new HashMap<>();
                        obj.putAll(preObj); //深拷贝
                        obj.put(groupName, item.getKey());
                        String condition = obj.get(conditionName).toString() + " AND " + item.getValue();
                        obj.put(conditionName, condition);
                        list.add(obj);
                    }
                }
            } else { //第一次遍历
                for (Map.Entry<String, String> item : groupMap.entrySet()) {
                    Map<String, Object> obj = new HashMap<>();
                    obj.put(groupName, item.getKey());
                    obj.put(conditionName, item.getValue());
                    list.add(obj);
                }
            }
            return recStats(core, statsFields, grouplist, q, fq, num + 1, list);
        }
    }
    //endregion 多级数值统计
    //region 求和统计
    /**
     * 多维度求和统计(包含自定义分组)
     * <p>
     * TODO
     * 涉及时间维度聚合统计,目前是按天间隔统计的写法,需要扩展按年、月等间隔统计时,需要定制分支。
     * 具体需要扩展的地方查看 joinAggregationCondition() 方法中备注。
     * -- 张进军 2018.1.26
     *
     * @param core               core名
     * @param q                  查询条件
     * @param fq                 筛选条件
     * @param statsField         统计字段
     * @param dimensionGroupList 分组字段
     * @param customGroups       额外自定义分组
     */
    public List<Map<String, Object>> getSumMultList(String core,
                                                    String q,
                                                    String fq,
                                                    String statsField,
                                                    List<SolrGroupEntity> dimensionGroupList,
                                                    List<SolrGroupEntity> customGroups) throws Exception {
        // 维度字段及最后一个维度基于其他维度组合作为条件的统计结果的集合
        List<Map<String, Object>> resultList = new ArrayList<>();
        if (dimensionGroupList != null && dimensionGroupList.size() > 0) {
            List<SolrGroupEntity> groupList = new ArrayList<>();
            if (customGroups != null && customGroups.size() > 0) {
                groupList = customGroups;
            }
            // 收集根据指标维度分组聚合的条件
            groupList.addAll(joinAggregationCondition(core, q, fq, dimensionGroupList));
            resultList = recGroupSum(core, statsField, groupList, q, fq, 0, null);
        } else { // 纯自定义分组
            if (customGroups != null && customGroups.size() > 0) {
                resultList = recGroupSum(core, statsField, customGroups, q, fq, 0, null);
            }
        }
        return resultList;
    }
    /**
     * 多维度递归求和统计
     */
    private List<Map<String, Object>> recGroupSum(String core,
                                                  String statsField,
                                                  List<SolrGroupEntity> groupList,
                                                  String q,
                                                  String fq,
                                                  int num,
                                                  List<Map<String, Object>> preList) throws Exception {
        // 维度字段、维度组合Key及统计结果
        List<Map<String, Object>> resultList = new ArrayList<>();
        String conditionName = "$condition"; // 拼接最后一个维度分组聚合统计的过滤条件
        String statisticsKeyName = "$statisticsKey"; // 拼接最后一个维度分组聚合统计值对应的唯一健
        if (num == groupList.size() - 1) {
            SolrGroupEntity group = groupList.get(num); // 最后一个分组维度
            String groupField = group.getGroupField();
            Map<String, String> groupConditionMap = group.getGroupCondition();
            DecimalFormat df = new DecimalFormat("#.00");
            if (preList != null && preList.size() > 0) {
                for (Map<String, Object> preObj : preList) {
                    // 遍历前 N-1 维度组合作为筛选条件
                    String query = preObj.get(conditionName).toString();
                    if (StringUtils.isNotEmpty(fq) && !fq.equals("*:*")) {
                        query += " AND " + fq;
                    }
                    // 收集最后一个维度的统计结果
                    for (Map.Entry<String, String> item : groupConditionMap.entrySet()) {
                        String currFq = query + " AND " + item.getValue();
                        Map<String, FieldStatsInfo> statsMap = new HashMap<>();
                        FieldStatsInfo statsInfo = solrUtil.getStats(core, q, currFq, statsField);
                        if (statsInfo.getSum() != null && !statsInfo.getSum().equals(0d)) {
                            Map<String, Object> obj = new HashMap<>();
                            obj.putAll(preObj); // 深拷贝
                            obj.put(groupField, item.getKey());
                            String statisticsKey = preObj.get(statisticsKeyName).toString() + "-" + item.getKey();
                            obj.put(statisticsKeyName, statisticsKey);
                            obj.put("$result", df.format(statsInfo.getSum()));  // 统计值
                            obj.remove(conditionName);
                            resultList.add(obj);
                        }
                    }
                }
            } else { // 只有一个维度分组统计场合
                for (Map.Entry<String, String> item : groupConditionMap.entrySet()) {
                    String currFq = item.getValue();
                    if (StringUtils.isNotEmpty(fq) && !fq.equals("*:*")) {
                        currFq += " AND " + fq;
                    }
                    Map<String, FieldStatsInfo> statsMap = new HashMap<>();
                    FieldStatsInfo statsInfo = solrUtil.getStats(core, q, currFq, statsField);
                    if (statsInfo.getSum() != null && !statsInfo.getSum().equals(0d)) {
                        Map<String, Object> obj = new HashMap<>();
                        obj.put(groupField, item.getKey());
                        obj.put(statisticsKeyName, item.getKey());
                        obj.put("$result", df.format(statsInfo.getSum()));  // 统计值
                        resultList.add(obj);
                    }
                }
            }
            return resultList;
        } else {
            List<Map<String, Object>> list = new ArrayList<>(); // 返回集合
            SolrGroupEntity group = groupList.get(num); // 当前分组项
            Map<String, String> groupConditionMap = group.getGroupCondition();
            String groupField = group.getGroupField();
            if (preList != null) {
                //遍历上级递归数据
                for (Map<String, Object> preObj : preList) {
                    //遍历当前分组数据
                    for (Map.Entry<String, String> item : groupConditionMap.entrySet()) {
                        Map<String, Object> obj = new HashMap<>();
                        obj.putAll(preObj); // 深拷贝
                        obj.put(groupField, item.getKey());
                        String condition = preObj.get(conditionName).toString() + " AND " + item.getValue();
                        obj.put(conditionName, condition);
                        String statisticsKey = preObj.get(statisticsKeyName).toString() + "-" + item.getKey();
                        obj.put(statisticsKeyName, statisticsKey);
                        list.add(obj);
                    }
                }
            } else { //第一次遍历
                for (Map.Entry<String, String> item : groupConditionMap.entrySet()) {
                    Map<String, Object> obj = new HashMap<>();
                    obj.put(groupField, item.getKey());
                    obj.put(conditionName, item.getValue());
                    obj.put(statisticsKeyName, item.getKey());
                    list.add(obj);
                }
            }
            return recGroupSum(core, statsField, groupList, q, fq, num + 1, list);
        }
    }
    //endregion 求和统计
    //region 公共私有方法
    /**
     * 收集多分组聚合的条件
     * 注意:groupList 中指标维度code出现的顺序要与 dimensionGroupList 中的一致。
     *
     * @param core               core名
     * @param q                  查询条件
     * @param fq                 筛选条件
     * @param dimensionGroupList 分组字段
     */
    private List<SolrGroupEntity> joinAggregationCondition(String core,
                                                           String q,
                                                           String fq,
                                                           List<SolrGroupEntity> dimensionGroupList) throws Exception {
        List<SolrGroupEntity> groupList = new ArrayList<>();
        for (SolrGroupEntity dimensionGroup : dimensionGroupList) {
            Object gap = dimensionGroup.getGap();
            if (dimensionGroup.getType().equals(SolrGroupEntity.GroupType.DATE_RANGE)) {
                // 按日期范围统计
                List<RangeFacet> rangeFacets = solrUtil.getFacetDateRange(core, dimensionGroup.getGroupField(), startTime, endTime, gap.toString(), fq, q);
                for (RangeFacet rangeFacet : rangeFacets) {
                    String groupName = rangeFacet.getName();
                    List<RangeFacet.Count> countList = rangeFacet.getCounts();
                    SolrGroupEntity groupEntity = new SolrGroupEntity(groupName);
                    groupEntity.setType(SolrGroupEntity.GroupType.DATE_RANGE);
                    groupEntity.setGap(gap);
                    for (RangeFacet.Count count : countList) {
                        if (count.getCount() > 0) {
                            // TODO 目前是按天间隔统计的写法,需要扩展按年、月等间隔统计时,需要定制分支。 -- 张进军 2018.1.26
                            String day = count.getValue().substring(0, 10);
                            groupEntity.putGroupCondition(day, String.format("%s:[%sT00:00:00Z TO %sT23:59:59Z]", groupName, day, day));
                        }
                    }
                    groupList.add(groupEntity);
                }
            } else {
                // 按分组字段值统计
                String[] groupField = {dimensionGroup.getGroupField()};
                List<FacetField> facets = solrUtil.groupCount(core, q, fq, groupField);
                for (FacetField facet : facets) {
                    String groupName = facet.getName();
                    List<FacetField.Count> counts = facet.getValues();
                    SolrGroupEntity groupEntity = new SolrGroupEntity(groupName);
                    for (FacetField.Count count : counts) {
                        if (count.getCount() > 0) {
                            String value = count.getName();
                            groupEntity.putGroupCondition(value, groupName + ":" + value);
                        }
                    }
                    groupList.add(groupEntity);
                }
            }
        }
        return groupList;
    }
    //endregion 公共私有方法
}

+ 23 - 0
common/commons-data-redis/src/main/java/com/yihu/jw/ehr/redis/annotation/Mapping.java

@ -0,0 +1,23 @@
package com.yihu.jw.ehr.redis.annotation;
import java.lang.annotation.ElementType;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
import java.lang.annotation.Target;
/**
 * @author Sand
 * @version 1.0
 * @created 2016.03.28 15:29
 */
@Target({ElementType.METHOD, ElementType.FIELD})
@Retention(RetentionPolicy.RUNTIME)
public @interface Mapping {
    /**
     * 列
     * @return
     */
    String value() default "";
    String key() default "";
}

+ 17 - 0
common/commons-data-redis/src/main/java/com/yihu/jw/ehr/redis/annotation/Table.java

@ -0,0 +1,17 @@
package com.yihu.jw.ehr.redis.annotation;
import java.lang.annotation.ElementType;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
import java.lang.annotation.Target;
/**
 * @author Sand
 * @version 1.0
 * @created 2016.03.28 15:29
 */
@Target(ElementType.TYPE)
@Retention(RetentionPolicy.RUNTIME)
public @interface Table {
    String value() default "";
}

+ 190 - 0
common/commons-data-redis/src/main/java/com/yihu/jw/ehr/redis/client/RedisClient.java

@ -0,0 +1,190 @@
package com.yihu.jw.ehr.redis.client;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.dao.DataAccessException;
import org.springframework.data.redis.connection.RedisConnection;
import org.springframework.data.redis.connection.StringRedisConnection;
import org.springframework.data.redis.core.RedisCallback;
import org.springframework.data.redis.core.RedisTemplate;
import org.springframework.data.redis.serializer.RedisSerializer;
import org.springframework.data.redis.serializer.SerializationException;
import org.springframework.stereotype.Service;
import org.springframework.util.SerializationUtils;
import java.io.Serializable;
import java.util.*;
/**
 * Redis 数据访问接口。
 *
 * @author Sand
 * @version 1.0
 * @created 2015.08.04 11:12
 */
@Service
public class RedisClient {
    @Autowired
    private RedisTemplate<String, Serializable> redisTemplate;
    /**
     * 添加数据。
     *
     * @param key
     * @param value
     */
    public void set(final String key, final Serializable value) {
        redisTemplate.execute((RedisCallback<Object>) connection -> {
            byte[] key_ = key.getBytes();
            byte[] value_ = SerializationUtils.serialize(value);
            connection.set(key_, value_);
            return true;
        });
    }
    /**
     * 添加数据,含失效时间。
     *
     * @param key
     * @param value
     */
    public void set(final String key, final Serializable value, long seconds) {
        redisTemplate.execute((RedisCallback<Object>) connection -> {
            byte[] key_ = key.getBytes();
            byte[] value_ = SerializationUtils.serialize(value);
            connection.setEx(key_, seconds, value_);
            return true;
        });
    }
    /**
     * 批量设置key-value值。
     *
     * @param data
     */
    public void multiSet(Map<Serializable, Serializable> data) {
        redisTemplate.executePipelined(new RedisCallback<Object>() {
            @Override
            public Object doInRedis(RedisConnection connection) throws DataAccessException {
                StringRedisConnection stringRedisConn = (StringRedisConnection) connection;
                for (Serializable key : data.keySet()) {
                    Serializable value = data.get(key);
                    connection.rPushX(SerializationUtils.serialize(key), SerializationUtils.serialize(value));
                }
                return null;
            }
        });
    }
    /**
     * 批量设置key-value值。
     *
     * @param data
     */
    public void multiSetData(Map<String, Serializable> data) {
        redisTemplate.executePipelined(new RedisCallback<Object>() {
            @Override
            public Object doInRedis(RedisConnection connection) throws DataAccessException {
                for (String key : data.keySet()) {
                    byte[] key_ = key.getBytes();
                    byte[] value_ = SerializationUtils.serialize(data.get(key));
                    connection.setNX(key_, value_);
                }
                return null;
            }
        });
    }
    /**
     * 获取数据
     *
     * @param key
     * @param <T>
     * @return
     */
    public <T> T get(final String key) {
        return (T) redisTemplate.execute((RedisCallback<Serializable>) connection -> {
            byte[] keyBytes = key.getBytes();
            byte[] bytes = connection.get(keyBytes);
            if (bytes == null) {
                return null;
            }
            return (Serializable) SerializationUtils.deserialize(bytes);
        });
    }
    /**
     * 批量获取key关联的值。
     *
     * @param keys
     * @return
     */
    public List<Object> multiGet(Collection<String> keys) {
        return redisTemplate.executePipelined((RedisCallback<Object>) connection -> {
            keys.forEach(key -> {
                byte[] keyBytes = key.getBytes();
                connection.get(keyBytes);
            });
            return null;
        }, new RedisSerializer<Serializable>() {
            @Override
            public byte[] serialize(Serializable serializable) throws SerializationException {
                return SerializationUtils.serialize(serializable);
            }
            @Override
            public Serializable deserialize(byte[] bytes) throws SerializationException {
                return (Serializable) SerializationUtils.deserialize(bytes);
            }
        });
    }
    /**
     * 删除记录,支持Key模糊匹配删除
     *
     * @param key
     */
    public void delete(String key) {
        redisTemplate.delete(redisTemplate.keys(key));
    }
    /**
     * 删除多条记录,如果Key集合过大,建议使用Key模糊匹配删除
     *
     * @param keys
     */
    public void delete(Collection<String> keys) {
        redisTemplate.delete(keys);
    }
    /**
     * 匹配特定模式的Key列表
     *
     * @param pattern
     * @return
     */
    public Set<String> keys(String pattern) {
        return redisTemplate.execute((RedisCallback<Set<String>>) connection -> {
            Set<byte[]> keys = connection.keys(pattern.getBytes());
            Set<String> returnKeys = new HashSet<>();
            for (byte[] key : keys) {
                returnKeys.add(new String(key));
            }
            return returnKeys;
        });
    }
    /**
     * 是否包含指定Key
     *
     * @param key
     * @return
     */
    public boolean hasKey(String key) {
        return redisTemplate.execute((RedisCallback<Boolean>) connection -> connection.exists(SerializationUtils.serialize(key)));
    }
}

+ 39 - 0
common/commons-data-redis/src/main/java/com/yihu/jw/ehr/redis/config/RedisContext.java

@ -0,0 +1,39 @@
package com.yihu.jw.ehr.redis.config;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.data.redis.connection.RedisConnectionFactory;
import org.springframework.data.redis.core.RedisTemplate;
import org.springframework.data.redis.serializer.GenericToStringSerializer;
import org.springframework.data.redis.serializer.StringRedisSerializer;
import java.io.Serializable;
/**
 * @author Sand
 * @version 1.0
 * @created 2015.11.25 17:33
 *
 * Modify by Progr1mmer on 2018/02/13
 */
@Configuration
public class RedisContext {
    @Bean
    RedisTemplate<String, Serializable> redisTemplate(RedisConnectionFactory jedisConnectionFactory) {
        RedisTemplate<String, Serializable> redisTemplate = new RedisTemplate<>();
        redisTemplate.setConnectionFactory(jedisConnectionFactory);
        redisTemplate.setKeySerializer(new StringRedisSerializer());
        redisTemplate.setHashKeySerializer(new StringRedisSerializer());
        redisTemplate.setValueSerializer(new GenericToStringSerializer<>(Serializable.class));
        redisTemplate.setHashValueSerializer(new GenericToStringSerializer<>(Long.class));
        return redisTemplate;
    }
    /*@Bean
    public static ConfigureRedisAction configureRedisAction() {
        return ConfigureRedisAction.NO_OP;
    }*/
}

+ 37 - 0
common/commons-data-redis/src/main/java/com/yihu/jw/ehr/redis/schema/AddressDictSchema.java

@ -0,0 +1,37 @@
package com.yihu.jw.ehr.redis.schema;
import org.springframework.stereotype.Component;
/**
 *  行政区划
 * @author HZY
 * @created 2018/8/21 16:58
 */
@Component
public class AddressDictSchema extends KeySchema {
    public AddressDictSchema() {
        super.table = "area";
        super.column = "name";
    }
    /**
     * 清空当前表的所有缓存
     */
    public void delete(){
        redisClient.delete(makeKey(this.table , "*",column));
    }
    public String getAreaName( String code) {
        return redisClient.get(makeKey(table, code, column));
    }
    public void setAreaName( String code, String value) {
        redisClient.set(makeKey(table, code, column), value);
    }
}

+ 20 - 0
common/commons-data-redis/src/main/java/com/yihu/jw/ehr/redis/schema/HealthArchiveSchema.java

@ -0,0 +1,20 @@
package com.yihu.jw.ehr.redis.schema;
import org.springframework.stereotype.Component;
import java.io.Serializable;
/**
 * Created by wxw on 2018/3/14.
 */
@Component
public class HealthArchiveSchema extends KeySchema {
    public HealthArchiveSchema() {
        super.table = "HealthArchive";
        super.column = "HaName";
    }
    public void set(String key, Serializable val, long seconds) {
        super.redisClient.set(makeKey(table,key,column), val, seconds);
    }
}

+ 15 - 0
common/commons-data-redis/src/main/java/com/yihu/jw/ehr/redis/schema/HealthProblemDictKeySchema.java

@ -0,0 +1,15 @@
package com.yihu.jw.ehr.redis.schema;
import org.springframework.stereotype.Component;
/**
 * Created by hzp on 2017.04.25
 */
@Component
public class HealthProblemDictKeySchema extends KeySchema {
    public HealthProblemDictKeySchema(){
        super.table = "HealthProblemDict";
        super.column = "HpName";
    }
}

+ 66 - 0
common/commons-data-redis/src/main/java/com/yihu/jw/ehr/redis/schema/Icd10KeySchema.java

@ -0,0 +1,66 @@
package com.yihu.jw.ehr.redis.schema;
import org.springframework.stereotype.Component;
/**
 * Created by hzp on 2017.06.05
 */
@Component
public class Icd10KeySchema extends KeySchema {
    public Icd10KeySchema(){
        super.table = "icd10";
        super.column = "name";
    }
    /**
     * 获取对应健康问题redis
     * @param key
     * @return
     */
    public String getHpCode(String key) {
        return redisClient.get(makeKey(table, key,"hpCode"));
    }
    /**
     * 设置对应健康问题redis
     * @param key
     * @param value
     */
    public void setHpCode(String key, String value) {
        redisClient.set(makeKey(table, key,"hpCode"), value);
    }
    /**
     * 获取是否慢病信息,病包含类型 1-2 (如果没有类型则用0表示,例:1-0)
     * @param key
     * @return
     */
    public String getChronicInfo(String key) {
        return redisClient.get(makeKey(table, key,"chronic"));
    }
    /**
     * 设置是否为慢病
     * @param key
     * @param value
     */
    public void setChronicInfo(String key, String value) {
        redisClient.set(makeKey(table, key,"chronic"), value);
    }
    /**
     * 删除对应健康问题redis
     */
    public void deleteHpCode() {
        redisClient.delete(makeKey(table,"*","hpCode"));
    }
    /**
     * 删除慢病信息
     */
    public void deleteChronic() {
        redisClient.delete(makeKey(table,"*","chronic"));
    }
}

+ 17 - 0
common/commons-data-redis/src/main/java/com/yihu/jw/ehr/redis/schema/IndicatorsDictKeySchema.java

@ -0,0 +1,17 @@
package com.yihu.jw.ehr.redis.schema;
import org.springframework.stereotype.Component;
/**
 * Created by hzp on 2017.04.25
 */
@Component
public class IndicatorsDictKeySchema extends KeySchema {
    public IndicatorsDictKeySchema(){
        super.table="IndicatorsDict";
        super.column="code";
    }
}

+ 92 - 0
common/commons-data-redis/src/main/java/com/yihu/jw/ehr/redis/schema/KeySchema.java

@ -0,0 +1,92 @@
package com.yihu.jw.ehr.redis.schema;
import com.yihu.jw.ehr.redis.client.RedisClient;
import com.yihu.jw.util.string.StringBuilderEx;
import org.springframework.beans.factory.annotation.Autowired;
import java.io.Serializable;
import java.util.HashMap;
import java.util.Map;
import java.util.Set;
/**
 * Created by hzp on 2017.04.25
 */
public class KeySchema {
    protected final String keySchema = "%1:%2:%3";
    protected String table = "table";
    protected String column = "column";
    @Autowired
    protected RedisClient redisClient;
    /**
     * 获取组合键值 table:key:column
     * @param table 表名
     * @param key 主键
     * @param column 列名
     * @return
     */
    public String makeKey(String table, String key, String column) {
        return new StringBuilderEx(keySchema)
                .arg(table)
                .arg(key)
                .arg(column)
                .toString();
    }
    /**
     * 获取单条缓存
     * @param key
     * @param <T>
     * @return
     */
    public <T> T get(String key) {
        return redisClient.get(makeKey(table, key, column));
    }
    /**
     * 保存单条缓存
     * @param key
     * @param val
     */
    public void set(String key, Serializable val){
        redisClient.set(makeKey(table, key, column), val);
    }
    /**
     * 删除单条缓存
     * @param key
     */
    public void delete(String key) {
        redisClient.delete(makeKey(table, key, column));
    }
    /**
     * 删除默认缓存
     */
    public void deleteAll(){
        redisClient.delete(makeKey(table,"*", column));
    }
    /**
     * 获取所有缓存数据
     */
    public Map<String,Object> getAll(){
        Map<String, Object> re = new HashMap<>();
        Set<String> keys = redisClient.keys(makeKey(table,"*", column));
        for (String key : keys) {
            String val = redisClient.get(key);
            re.put(key,val);
        }
        return re;
    }
    /**
     * 判断是否存在
     */
    public boolean hasKey(String key){
        return redisClient.hasKey(makeKey(table, key, column));
    }
}

+ 83 - 0
common/commons-data-redis/src/main/java/com/yihu/jw/ehr/redis/schema/OrgKeySchema.java

@ -0,0 +1,83 @@
package com.yihu.jw.ehr.redis.schema;
import org.springframework.stereotype.Component;
/**
 * Created by hzp on 2017.04.25
 */
@Component
public class OrgKeySchema extends KeySchema {
    public OrgKeySchema(){
        super.table = "organizations";
        super.column = "name";
    }
    /**
     * 获取机构区域Redis
     */
    public String getOrgArea(String key) {
          return redisClient.get(makeKey(table, key, "area"));
    }
    /**
     *设置机构区域Redis
     */
    public void setOrgArea(String key, String value) {
        redisClient.set(makeKey(table, key, "area"), value);
    }
    /**
     * 删除机构区域Redis
     */
    public void deleteOrgArea() {
        redisClient.delete(makeKey(table, "*", "area"));
    }
    /**
     * 获取机构SAAS区域权限范围redis
     * @return
     */
    public String getOrgSaasArea(String key) {
        return redisClient.get(makeKey(table, key ,"saasArea"));
    }
    /**
     * 设置机构SAAS区域权限范围redis
     * @return
     */
    public void setOrgSaasArea(String key, String value) {
        redisClient.set(makeKey(table, key, "saasArea"), value);
    }
    /**
     * 删除机构SAAS区域权限范围redis
     */
    public void deleteOrgSaasArea() {
        redisClient.delete(makeKey(table, "*", "saasArea"));
    }
    /**
     * 获取机构SAAS机构权限范围redis
     * @return
     */
    public String getOrgSaasOrg(String key) {
        return redisClient.get(makeKey(table, key, "saasOrg"));
    }
    /**
     * 设置机构SAAS机构权限范围redis
     * @return
     */
    public void setOrgSaasOrg(String key,String value) {
        redisClient.set(makeKey(table, key, "saasOrg"), value);
    }
    /**
     * 删除机构SAAS机构权限范围redis
     */
    public void deleteOrgSaasOrg() {
        redisClient.delete(makeKey(table, "*", "saasOrg"));
    }
}

+ 27 - 0
common/commons-data-redis/src/main/java/com/yihu/jw/ehr/redis/schema/RsAdapterMetaKeySchema.java

@ -0,0 +1,27 @@
package com.yihu.jw.ehr.redis.schema;
import org.springframework.stereotype.Service;
/**
 * Created by hzp on 2017.04.25
 */
@Service
public class RsAdapterMetaKeySchema extends KeySchema {
    public RsAdapterMetaKeySchema(){
        super.table="rs_adapter_metadata";
        super.column="resource_metadata";
    }
    public String getMetaData(String cdaVersion, String dataSet, String ehrMetaData){
        return get(String.format("%s.%s.%s", cdaVersion, dataSet, ehrMetaData));
    }
    public void setMetaData(String cdaVersion, String dataSet, String ehrMetaData, String val){
        set(String.format("%s.%s.%s", cdaVersion, dataSet, ehrMetaData), val);
    }
    public void deleteVersion (String cdaVersion) {
        delete(String.format("%s.%s.%s", cdaVersion, "*", "*"));
    }
}

+ 16 - 0
common/commons-data-redis/src/main/java/com/yihu/jw/ehr/redis/schema/RsMetadataKeySchema.java

@ -0,0 +1,16 @@
package com.yihu.jw.ehr.redis.schema;
import org.springframework.stereotype.Component;
/**
 * Created by progr1mmer on 2018/6/15.
 */
@Component
public class RsMetadataKeySchema extends KeySchema {
    public RsMetadataKeySchema(){
        super.table="rs_metadata";
        super.column="dict_code";
    }
}

+ 15 - 0
common/commons-data-redis/src/main/java/com/yihu/jw/ehr/redis/schema/StdCdaVersionKeySchema.java

@ -0,0 +1,15 @@
package com.yihu.jw.ehr.redis.schema;
import org.springframework.stereotype.Component;
/**
 * Created by hzp on 2017.04.25
 */
@Component
public class StdCdaVersionKeySchema extends KeySchema {
    public StdCdaVersionKeySchema(){
        super.table="std_cda_versions";
        super.column="name";
    }
}

+ 68 - 0
common/commons-data-redis/src/main/java/com/yihu/jw/ehr/redis/schema/StdDataSetKeySchema.java

@ -0,0 +1,68 @@
package com.yihu.jw.ehr.redis.schema;
import org.springframework.stereotype.Component;
/**
 * 标准化数据 Redis Key生成器. 格式:
 *
 *  表名:主键值:列名
 *
 * 如:
 *  std_cda_versions:000000000000:name
 *
 * Created by hzp on 2017.04.25
 */
@Component
public class StdDataSetKeySchema extends KeySchema {
    private String DataSetTable = "std_data_set_";
    private String DataSetCodeColumn = "code";
    private String DataSetNameColumn = "name";
    private String DataSetIsMultiRecordColumn = "multi_record";
    private String MetadataColumn = "metada_code";
    public String dataSetCode(String version, String id){
        return redisClient.get(makeKey(DataSetTable + version, id, DataSetCodeColumn));
    }
    public void setDataSetCode(String version, String id,String value){
        redisClient.set(makeKey(DataSetTable + version, id, DataSetCodeColumn),value);
    }
    public String dataSetName(String version, String id){
        return redisClient.get(makeKey(DataSetTable + version, id, DataSetNameColumn));
    }
    public void setDataSetName(String version, String id,String value){
        redisClient.set(makeKey(DataSetTable + version, id, DataSetNameColumn),value);
    }
    public String dataSetNameByCode(String version, String code){
        return redisClient.get(makeKey(DataSetTable + version, code, DataSetNameColumn));
    }
    public void setDataSetNameByCode(String version, String code,String value){
        redisClient.set(makeKey(DataSetTable + version, code, DataSetNameColumn),value);
    }
    public Boolean dataSetMultiRecord(String version, String code){
        return redisClient.get(makeKey(DataSetTable + version, code, DataSetIsMultiRecordColumn));
    }
    public void setDataSetMultiRecord(String version, String code,boolean value){
        redisClient.set(makeKey(DataSetTable + version, code, DataSetIsMultiRecordColumn),value);
    }
    public void setMetadataCode(String version, String code,String value){
        redisClient.set(makeKey(DataSetTable + version, code, MetadataColumn),value);
    }
    public String metadataCodes(String version, String datasetCode) {
        return redisClient.get(makeKey(DataSetTable + version, datasetCode, MetadataColumn));
    }
}

+ 143 - 0
common/commons-data-redis/src/main/java/com/yihu/jw/ehr/redis/schema/StdMetaDataKeySchema.java

@ -0,0 +1,143 @@
package com.yihu.jw.ehr.redis.schema;
import org.apache.commons.lang3.StringUtils;
import org.springframework.stereotype.Component;
/**
 * Created by hzp on 2017.04.25
 */
@Component
public class StdMetaDataKeySchema extends KeySchema {
    private String MetaDataTable = "std_meta_data_";
    private String MetaDataDictIdColumn = "dict_id";
    private String MetaDataTypeColumn = "type";
    private String MetaDataNameColumn = "name";
    private String MetaDataFormatColumn = "format";
    private String MetaDataNullableColumn = "nullable";
    private String DictEntryTable = "std_dictionary_entry_";
    private String DictEntryValueColumn = "value";
    private String DictEntryCodeColumn = "code";
    public String metaDataDict(String version, String dataSetCode, String innerCode) {
        assert version != null && version.length() != 0 &&
                dataSetCode != null && dataSetCode.length() != 0 &&
                innerCode != null && innerCode.length() != 0;
        return redisClient.get(makeKey(MetaDataTable + version, dataSetCode + "." + innerCode, MetaDataDictIdColumn));
    }
    public void setMetaDataDict(String version, String dataSetCode, String innerCode, String value) {
        redisClient.set(makeKey(MetaDataTable + version, dataSetCode + "." + innerCode, MetaDataDictIdColumn), value);
    }
    public void deleteMetaDataDict(String version, String dataSetCode, String innerCode) {
        redisClient.delete(makeKey(MetaDataTable + version, dataSetCode + "." + innerCode, MetaDataDictIdColumn));
    }
    public String metaDataType(String version, String dataSetCode, String innerCode) {
        assert version != null && version.length() != 0 &&
                dataSetCode != null && dataSetCode.length() != 0 &&
                innerCode != null && innerCode.length() != 0;
        return redisClient.get(makeKey(MetaDataTable + version, dataSetCode + "." + innerCode, MetaDataTypeColumn));
    }
    public void setMetaDataType(String version, String dataSetCode, String innerCode, String value) {
        redisClient.set(makeKey(MetaDataTable + version, dataSetCode + "." + innerCode, MetaDataTypeColumn), value);
    }
    public void deleteMetaDataType(String version, String dataSetCode, String innerCode) {
        redisClient.delete(makeKey(MetaDataTable + version, dataSetCode + "." + innerCode, MetaDataTypeColumn));
    }
    public String metaDataFormat(String version, String dataSetCode, String code) {
        assert version != null && version.length() != 0 &&
                dataSetCode != null && dataSetCode.length() != 0 &&
                code != null && code.length() != 0;
        return redisClient.get(makeKey(MetaDataTable + version, dataSetCode + "." + code, MetaDataFormatColumn));
    }
    public void setMetaDataFormat(String version, String dataSetCode, String code, String value) {
        redisClient.set(makeKey(MetaDataTable + version, dataSetCode + "." + code, MetaDataFormatColumn), value);
    }
    public void deleteMetaDataFormat(String version, String dataSetCode, String code) {
        redisClient.delete(makeKey(MetaDataTable + version, dataSetCode + "." + code, MetaDataFormatColumn));
    }
    public Boolean metaDataNullable(String version, String dataSetCode, String code) {
        assert version != null && version.length() != 0 &&
                dataSetCode != null && dataSetCode.length() != 0 &&
                code != null && code.length() != 0;
        String nullable = redisClient.get(makeKey(MetaDataTable + version, dataSetCode + "." + code, MetaDataNullableColumn));
        return "1".equals(nullable);
    }
    public void setMetaDataNullable(String version, String dataSetCode, String code, String value) {
        redisClient.set(makeKey(MetaDataTable + version, dataSetCode + "." + code, MetaDataNullableColumn), value);
    }
    public void deleteMetaDataNullable(String version, String dataSetCode, String code) {
        redisClient.delete(makeKey(MetaDataTable + version, dataSetCode + "." + code, MetaDataNullableColumn));
    }
    public String dictEntryValue(String version, String dictId, String entryCode) {
        assert StringUtils.isNotBlank(version) && StringUtils.isNotBlank(entryCode);
        return redisClient.get(makeKey(DictEntryTable + version, dictId + "." + entryCode, DictEntryValueColumn));
    }
    public void setDictEntryValue(String version, String dictId, String entryCode, String value) {
        redisClient.set(makeKey(DictEntryTable + version, dictId + "." + entryCode, DictEntryValueColumn), value);
    }
    public void deleteDictEntryValue(String version, String dictId, String entryCode) {
        redisClient.delete(makeKey(DictEntryTable + version, dictId + "." + entryCode, DictEntryValueColumn));
    }
    public String dictEntryCode(String version, String dictId, String entryValue) {
        assert StringUtils.isNotBlank(version) && StringUtils.isNotBlank(entryValue);
        return redisClient.get(makeKey(DictEntryTable + version, dictId + "." + entryValue, DictEntryCodeColumn));
    }
    public void setDictEntryCode(String version, String dictId, String entryValue, String value) {
        redisClient.set(makeKey(DictEntryTable + version, dictId + "." + entryValue, DictEntryCodeColumn), value);
    }
    public void deleteDictEntryCode(String version, String dictId, String entryValue) {
        redisClient.delete(makeKey(DictEntryTable + version, dictId + "." + entryValue, DictEntryCodeColumn));
    }
    public Boolean isDictCodeExist(String version, String dictId, String entryCode) {
        assert StringUtils.isNotBlank(version) && StringUtils.isNotBlank(entryCode);
        return redisClient.hasKey(makeKey(DictEntryTable + version, dictId + "." + entryCode, DictEntryCodeColumn));
    }
    public Boolean isDictValueExist(String version, String dictId, String entryValue) {
        assert StringUtils.isNotBlank(version) && StringUtils.isNotBlank(entryValue);
        return redisClient.hasKey(makeKey(DictEntryTable + version, dictId + "." + entryValue, DictEntryValueColumn));
    }
    public void setMetaDataName(String version, String dataSetCode, String innerCode, String name) {
        redisClient.set(makeKey(MetaDataTable + version, dataSetCode + "." + innerCode, MetaDataNameColumn),name);
    }
    public String metaName(String version, String dataSetCode, String innerCode) {
        assert version != null && version.length() != 0 &&
                dataSetCode != null && dataSetCode.length() != 0 &&
                innerCode != null && innerCode.length() != 0;
        return redisClient.get(makeKey(MetaDataTable + version, dataSetCode + "." + innerCode, MetaDataNameColumn));
    }
}

+ 0 - 23
common/commons-data-redis/src/main/java/com/yihu/jw/redis/annotation/Mapping.java

@ -1,23 +0,0 @@
package com.yihu.jw.redis.annotation;
import java.lang.annotation.ElementType;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
import java.lang.annotation.Target;
/**
 * @author Sand
 * @version 1.0
 * @created 2016.03.28 15:29
 */
@Target({ElementType.METHOD, ElementType.FIELD})
@Retention(RetentionPolicy.RUNTIME)
public @interface Mapping {
    /**
     * 列
     * @return
     */
    String value() default "";
    String key() default "";
}

+ 0 - 17
common/commons-data-redis/src/main/java/com/yihu/jw/redis/annotation/Table.java

@ -1,17 +0,0 @@
package com.yihu.jw.redis.annotation;
import java.lang.annotation.ElementType;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
import java.lang.annotation.Target;
/**
 * @author Sand
 * @version 1.0
 * @created 2016.03.28 15:29
 */
@Target(ElementType.TYPE)
@Retention(RetentionPolicy.RUNTIME)
public @interface Table {
    String value() default "";
}

+ 0 - 163
common/commons-data-redis/src/main/java/com/yihu/jw/redis/client/RedisClient.java

@ -1,163 +0,0 @@
package com.yihu.jw.redis.client;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.dao.DataAccessException;
import org.springframework.data.redis.connection.RedisConnection;
import org.springframework.data.redis.connection.StringRedisConnection;
import org.springframework.data.redis.core.RedisCallback;
import org.springframework.data.redis.core.RedisTemplate;
import org.springframework.stereotype.Service;
import org.springframework.util.SerializationUtils;
import java.io.Serializable;
import java.util.*;
/**
 * Redis 数据访问接口。
 *
 * @author Sand
 * @version 1.0
 * @created 2015.08.04 11:12
 */
@Service
public class RedisClient {
    @Autowired
    private RedisTemplate<String, Serializable> redisTemplate;
    /**
     * 添加数据。
     *
     * @param key
     * @param value
     */
    public void set(final String key, final Serializable value) {
        redisTemplate.execute((RedisCallback<Object>) connection -> {
            byte[] key_ = key.getBytes();
            byte[] value_ = SerializationUtils.serialize(value);
            connection.set(key_, value_);
            return true;
        });
    }
    /**
     * 添加数据,含失效时间。
     *
     * @param key
     * @param value
     */
    public void set(final String key, final Serializable value,long seconds) {
        redisTemplate.execute((RedisCallback<Object>) connection -> {
            byte[] key_ = key.getBytes();
            byte[] value_ = SerializationUtils.serialize(value);
            connection.setEx(key_,seconds, value_);
            return true;
        });
    }
    /**
     * 批量设置key-value值。
     *
     * @param data
     */
    public void multiSet(Map<Serializable, Serializable> data){
        redisTemplate.executePipelined(new RedisCallback<Object>() {
            @Override
            public Object doInRedis(RedisConnection connection) throws DataAccessException {
                StringRedisConnection stringRedisConn = (StringRedisConnection)connection;
                for(Serializable key : data.keySet()){
                    Serializable value = data.get(key);
                    connection.rPushX(SerializationUtils.serialize(key), SerializationUtils.serialize(value));
                }
                return null;
            }
        });
    }
    public void multiSetData(final Map<String, Serializable> data) {
        this.redisTemplate.executePipelined(new RedisCallback<Object>() {
            public Object doInRedis(RedisConnection connection) throws DataAccessException {
                Iterator var2 = data.keySet().iterator();
                while(var2.hasNext()) {
                    String key = (String)var2.next();
                    byte[] key_ = key.getBytes();
                    byte[] value_ = SerializationUtils.serialize(data.get(key));
                    connection.setNX(key_, value_);
                }
                return null;
            }
        });
    }
    /**
     * 获取数据
     *
     * @param key
     * @param <T>
     * @return
     */
    public <T> T get(final String key) {
        return (T)redisTemplate.execute((RedisCallback<Serializable>) connection -> {
            byte[] keyBytes = key.getBytes();
            byte[] bytes = connection.get(keyBytes);
            if(bytes == null) return null;
            return (Serializable) SerializationUtils.deserialize(bytes);
        });
    }
    /**
     * 批量获取key关联的值。
     *
     * @param keys
     * @return
     */
    public List<Serializable> multiGet(Collection<String> keys){
        return redisTemplate.opsForValue().multiGet(keys);
    }
    /**
     * 删除记录,支持Key模糊匹配删除
     *
     * @param key
     */
    public void delete(String key) {
        redisTemplate.delete(redisTemplate.keys(key));
    }
    /**
     * 删除多条记录,如果Key集合过大,建议使用Key模糊匹配删除
     * @param keys
     */
    public void delete(Collection<String> keys) {
        redisTemplate.delete(keys);
    }
    /**
     * 匹配特定模式的Key列表
     * @param pattern
     * @return
     */
    public Set<String> keys(String pattern) {
        return redisTemplate.execute((RedisCallback<Set<String>>) connection -> {
            Set<byte[]> keys = connection.keys(pattern.getBytes());
            Set<String> returnKeys = new HashSet<>();
            for (byte[] key : keys) {
                returnKeys.add(new String(key));
            }
            return returnKeys;
        });
    }
    /**
     * 是否包含指定Key
     * @param key
     * @return
     */
    public boolean hasKey(String key) {
        return redisTemplate.execute((RedisCallback<Boolean>) connection -> connection.exists(SerializationUtils.serialize(key)));
    }
}

+ 0 - 39
common/commons-data-redis/src/main/java/com/yihu/jw/redis/config/RedisContext.java

@ -1,39 +0,0 @@
package com.yihu.jw.redis.config;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.data.redis.connection.RedisConnectionFactory;
import org.springframework.data.redis.core.RedisTemplate;
import org.springframework.data.redis.serializer.GenericToStringSerializer;
import org.springframework.data.redis.serializer.StringRedisSerializer;
import java.io.Serializable;
/**
 * @author Sand
 * @version 1.0
 * @created 2015.11.25 17:33
 *
 * Modify by Progr1mmer on 2018/02/13
 */
@Configuration
public class RedisContext {
    @Bean
    RedisTemplate<String, Serializable> redisTemplate(RedisConnectionFactory jedisConnectionFactory) {
        RedisTemplate<String, Serializable> redisTemplate = new RedisTemplate<>();
        redisTemplate.setConnectionFactory(jedisConnectionFactory);
        redisTemplate.setKeySerializer(new StringRedisSerializer());
        redisTemplate.setHashKeySerializer(new StringRedisSerializer());
        redisTemplate.setValueSerializer(new GenericToStringSerializer<>(Serializable.class));
        redisTemplate.setHashValueSerializer(new GenericToStringSerializer<>(Long.class));
        return redisTemplate;
    }
    /*@Bean
    public static ConfigureRedisAction configureRedisAction() {
        return ConfigureRedisAction.NO_OP;
    }*/
}

+ 0 - 37
common/commons-data-redis/src/main/java/com/yihu/jw/redis/schema/AddressDictSchema.java

@ -1,37 +0,0 @@
package com.yihu.jw.redis.schema;
import org.springframework.stereotype.Component;
/**
 *  行政区划
 * @author HZY
 * @created 2018/8/21 16:58
 */
@Component
public class AddressDictSchema extends KeySchema {
    public AddressDictSchema() {
        super.table = "area";
        super.column = "name";
    }
    /**
     * 清空当前表的所有缓存
     */
    public void delete(){
        redisClient.delete(makeKey(this.table , "*",column));
    }
    public String getAreaName( String code) {
        return redisClient.get(makeKey(table, code, column));
    }
    public void setAreaName( String code, String value) {
        redisClient.set(makeKey(table, code, column), value);
    }
}

+ 0 - 20
common/commons-data-redis/src/main/java/com/yihu/jw/redis/schema/HealthArchiveSchema.java

@ -1,20 +0,0 @@
package com.yihu.jw.redis.schema;
import org.springframework.stereotype.Component;
import java.io.Serializable;
/**
 * Created by wxw on 2018/3/14.
 */
@Component
public class HealthArchiveSchema extends KeySchema {
    public HealthArchiveSchema() {
        super.table = "HealthArchive";
        super.column = "HaName";
    }
    public void set(String key, Serializable val, long seconds) {
        super.redisClient.set(makeKey(table,key,column), val, seconds);
    }
}

+ 0 - 15
common/commons-data-redis/src/main/java/com/yihu/jw/redis/schema/HealthProblemDictKeySchema.java

@ -1,15 +0,0 @@
package com.yihu.jw.redis.schema;
import org.springframework.stereotype.Component;
/**
 * Created by hzp on 2017.04.25
 */
@Component
public class HealthProblemDictKeySchema extends KeySchema {
    public HealthProblemDictKeySchema(){
        super.table = "HealthProblemDict";
        super.column = "HpName";
    }
}

+ 0 - 66
common/commons-data-redis/src/main/java/com/yihu/jw/redis/schema/Icd10KeySchema.java

@ -1,66 +0,0 @@
package com.yihu.jw.redis.schema;
import org.springframework.stereotype.Component;
/**
 * Created by hzp on 2017.06.05
 */
@Component
public class Icd10KeySchema extends KeySchema {
    public Icd10KeySchema(){
        super.table = "icd10";
        super.column = "name";
    }
    /**
     * 获取对应健康问题redis
     * @param key
     * @return
     */
    public String getHpCode(String key) {
        return redisClient.get(makeKey(table, key,"hpCode"));
    }
    /**
     * 设置对应健康问题redis
     * @param key
     * @param value
     */
    public void setHpCode(String key, String value) {
        redisClient.set(makeKey(table, key,"hpCode"), value);
    }
    /**
     * 获取是否慢病信息,病包含类型 1-2 (如果没有类型则用0表示,例:1-0)
     * @param key
     * @return
     */
    public String getChronicInfo(String key) {
        return redisClient.get(makeKey(table, key,"chronic"));
    }
    /**
     * 设置是否为慢病
     * @param key
     * @param value
     */
    public void setChronicInfo(String key, String value) {
        redisClient.set(makeKey(table, key,"chronic"), value);
    }
    /**
     * 删除对应健康问题redis
     */
    public void deleteHpCode() {
        redisClient.delete(makeKey(table,"*","hpCode"));
    }
    /**
     * 删除慢病信息
     */
    public void deleteChronic() {
        redisClient.delete(makeKey(table,"*","chronic"));
    }
}

+ 0 - 17
common/commons-data-redis/src/main/java/com/yihu/jw/redis/schema/IndicatorsDictKeySchema.java

@ -1,17 +0,0 @@
package com.yihu.jw.redis.schema;
import org.springframework.stereotype.Component;
/**
 * Created by hzp on 2017.04.25
 */
@Component
public class IndicatorsDictKeySchema extends KeySchema {
    public IndicatorsDictKeySchema(){
        super.table="IndicatorsDict";
        super.column="code";
    }
}

+ 0 - 92
common/commons-data-redis/src/main/java/com/yihu/jw/redis/schema/KeySchema.java

@ -1,92 +0,0 @@
package com.yihu.jw.redis.schema;
import com.yihu.jw.redis.client.RedisClient;
import com.yihu.jw.util.string.StringBuilderEx;
import org.springframework.beans.factory.annotation.Autowired;
import java.io.Serializable;
import java.util.HashMap;
import java.util.Map;
import java.util.Set;
/**
 * Created by hzp on 2017.04.25
 */
public class KeySchema {
    protected final String keySchema = "%1:%2:%3";
    protected String table = "table";
    protected String column = "column";
    @Autowired
    protected RedisClient redisClient;
    /**
     * 获取组合键值 table:key:column
     * @param table 表名
     * @param key 主键
     * @param column 列名
     * @return
     */
    public String makeKey(String table, String key, String column) {
        return new StringBuilderEx(keySchema)
                .arg(table)
                .arg(key)
                .arg(column)
                .toString();
    }
    /**
     * 获取单条缓存
     * @param key
     * @param <T>
     * @return
     */
    public <T> T get(String key) {
        return redisClient.get(makeKey(table, key, column));
    }
    /**
     * 保存单条缓存
     * @param key
     * @param val
     */
    public void set(String key, Serializable val){
        redisClient.set(makeKey(table, key, column), val);
    }
    /**
     * 删除单条缓存
     * @param key
     */
    public void delete(String key) {
        redisClient.delete(makeKey(table, key, column));
    }
    /**
     * 删除默认缓存
     */
    public void deleteAll(){
        redisClient.delete(makeKey(table,"*", column));
    }
    /**
     * 获取所有缓存数据
     */
    public Map<String,Object> getAll(){
        Map<String, Object> re = new HashMap<>();
        Set<String> keys = redisClient.keys(makeKey(table,"*", column));
        for (String key : keys) {
            String val = redisClient.get(key);
            re.put(key,val);
        }
        return re;
    }
    /**
     * 判断是否存在
     */
    public boolean hasKey(String key){
        return redisClient.hasKey(makeKey(table, key, column));
    }
}

+ 0 - 83
common/commons-data-redis/src/main/java/com/yihu/jw/redis/schema/OrgKeySchema.java

@ -1,83 +0,0 @@
package com.yihu.jw.redis.schema;
import org.springframework.stereotype.Component;
/**
 * Created by hzp on 2017.04.25
 */
@Component
public class OrgKeySchema extends KeySchema {
    public OrgKeySchema(){
        super.table = "organizations";
        super.column = "name";
    }
    /**
     * 获取机构区域Redis
     */
    public String getOrgArea(String key) {
          return redisClient.get(makeKey(table, key, "area"));
    }
    /**
     *设置机构区域Redis
     */
    public void setOrgArea(String key, String value) {
        redisClient.set(makeKey(table, key, "area"), value);
    }
    /**
     * 删除机构区域Redis
     */
    public void deleteOrgArea() {
        redisClient.delete(makeKey(table, "*", "area"));
    }
    /**
     * 获取机构SAAS区域权限范围redis
     * @return
     */
    public String getOrgSaasArea(String key) {
        return redisClient.get(makeKey(table, key ,"saasArea"));
    }
    /**
     * 设置机构SAAS区域权限范围redis
     * @return
     */
    public void setOrgSaasArea(String key, String value) {
        redisClient.set(makeKey(table, key, "saasArea"), value);
    }
    /**
     * 删除机构SAAS区域权限范围redis
     */
    public void deleteOrgSaasArea() {
        redisClient.delete(makeKey(table, "*", "saasArea"));
    }
    /**
     * 获取机构SAAS机构权限范围redis
     * @return
     */
    public String getOrgSaasOrg(String key) {
        return redisClient.get(makeKey(table, key, "saasOrg"));
    }
    /**
     * 设置机构SAAS机构权限范围redis
     * @return
     */
    public void setOrgSaasOrg(String key,String value) {
        redisClient.set(makeKey(table, key, "saasOrg"), value);
    }
    /**
     * 删除机构SAAS机构权限范围redis
     */
    public void deleteOrgSaasOrg() {
        redisClient.delete(makeKey(table, "*", "saasOrg"));
    }
}

+ 0 - 27
common/commons-data-redis/src/main/java/com/yihu/jw/redis/schema/RsAdapterMetaKeySchema.java

@ -1,27 +0,0 @@
package com.yihu.jw.redis.schema;
import org.springframework.stereotype.Service;
/**
 * Created by hzp on 2017.04.25
 */
@Service
public class RsAdapterMetaKeySchema extends KeySchema {
    public RsAdapterMetaKeySchema(){
        super.table="rs_adapter_metadata";
        super.column="resource_metadata";
    }
    public String getMetaData(String cdaVersion, String dataSet, String ehrMetaData){
        return get(String.format("%s.%s.%s", cdaVersion, dataSet, ehrMetaData));
    }
    public void setMetaData(String cdaVersion, String dataSet, String ehrMetaData, String val){
        set(String.format("%s.%s.%s", cdaVersion, dataSet, ehrMetaData), val);
    }
    public void deleteVersion (String cdaVersion) {
        delete(String.format("%s.%s.%s", cdaVersion, "*", "*"));
    }
}

+ 0 - 16
common/commons-data-redis/src/main/java/com/yihu/jw/redis/schema/RsMetadataKeySchema.java

@ -1,16 +0,0 @@
package com.yihu.jw.redis.schema;
import org.springframework.stereotype.Component;
/**
 * Created by progr1mmer on 2018/6/15.
 */
@Component
public class RsMetadataKeySchema extends KeySchema {
    public RsMetadataKeySchema(){
        super.table="rs_metadata";
        super.column="dict_code";
    }
}

+ 0 - 15
common/commons-data-redis/src/main/java/com/yihu/jw/redis/schema/StdCdaVersionKeySchema.java

@ -1,15 +0,0 @@
package com.yihu.jw.redis.schema;
import org.springframework.stereotype.Component;
/**
 * Created by hzp on 2017.04.25
 */
@Component
public class StdCdaVersionKeySchema extends KeySchema {
    public StdCdaVersionKeySchema(){
        super.table="std_cda_versions";
        super.column="name";
    }
}

+ 0 - 63
common/commons-data-redis/src/main/java/com/yihu/jw/redis/schema/StdDataSetKeySchema.java

@ -1,63 +0,0 @@
package com.yihu.jw.redis.schema;
import org.springframework.stereotype.Component;
/**
 * 标准化数据 Redis Key生成器. 格式:
 *
 *  表名:主键值:列名
 *
 * 如:
 *  std_cda_versions:000000000000:name
 *
 * Created by hzp on 2017.04.25
 */
@Component
public class StdDataSetKeySchema extends KeySchema {
    private String DataSetTable = "std_data_set_";
    private String DataSetCodeColumn = "code";
    private String DataSetNameColumn = "name";
    private String DataSetIsMultiRecordColumn = "multi_record";
    private String MetadataColumn = "metada_code";
    public String dataSetCode(String version, String id){
        return redisClient.get(makeKey(DataSetTable + version, id, DataSetCodeColumn));
    }
    public void setDataSetCode(String version, String id,String value){
        redisClient.set(makeKey(DataSetTable + version, id, DataSetCodeColumn),value);
    }
    public String dataSetName(String version, String id){
        return redisClient.get(makeKey(DataSetTable + version, id, DataSetNameColumn));
    }
    public void setDataSetName(String version, String id,String value){
        redisClient.set(makeKey(DataSetTable + version, id, DataSetNameColumn),value);
    }
    public String dataSetNameByCode(String version, String code){
        return redisClient.get(makeKey(DataSetTable + version, code, DataSetNameColumn));
    }
    public void setDataSetNameByCode(String version, String code,String value){
        redisClient.set(makeKey(DataSetTable + version, code, DataSetNameColumn),value);
    }
    public Boolean dataSetMultiRecord(String version, String code){
        return redisClient.get(makeKey(DataSetTable + version, code, DataSetIsMultiRecordColumn));
    }
    public void setDataSetMultiRecord(String version, String code,boolean value){
        redisClient.set(makeKey(DataSetTable + version, code, DataSetIsMultiRecordColumn),value);
    }
    public void setMetadataCode(String version, String code, String value) {
        this.redisClient.set(this.makeKey(this.DataSetTable + version, code, this.MetadataColumn), value);
    }
    public String metadataCodes(String version, String datasetCode) {
        return (String)this.redisClient.get(this.makeKey(this.DataSetTable + version, datasetCode, this.MetadataColumn));
    }
}

+ 0 - 140
common/commons-data-redis/src/main/java/com/yihu/jw/redis/schema/StdMetaDataKeySchema.java

@ -1,140 +0,0 @@
package com.yihu.jw.redis.schema;
import org.apache.commons.lang3.StringUtils;
import org.springframework.stereotype.Component;
/**
 * Created by hzp on 2017.04.25
 */
@Component
public class StdMetaDataKeySchema extends KeySchema {
    private String MetaDataTable = "std_meta_data_";
    private String MetaDataDictIdColumn = "dict_id";
    private String MetaDataNameColumn = "name";
    private String MetaDataTypeColumn = "type";
    private String MetaDataFormatColumn = "format";
    private String MetaDataNullableColumn = "nullable";
    private String DictEntryTable = "std_dictionary_entry_";
    private String DictEntryValueColumn = "value";
    private String DictEntryCodeColumn = "code";
    public String metaDataDict(String version, String dataSetCode, String innerCode) {
        assert version != null && version.length() != 0 &&
                dataSetCode != null && dataSetCode.length() != 0 &&
                innerCode != null && innerCode.length() != 0;
        return redisClient.get(makeKey(MetaDataTable + version, dataSetCode + "." + innerCode, MetaDataDictIdColumn));
    }
    public void setMetaDataDict(String version, String dataSetCode, String innerCode, String value) {
        redisClient.set(makeKey(MetaDataTable + version, dataSetCode + "." + innerCode, MetaDataDictIdColumn), value);
    }
    public void deleteMetaDataDict(String version, String dataSetCode, String innerCode) {
        redisClient.delete(makeKey(MetaDataTable + version, dataSetCode + "." + innerCode, MetaDataDictIdColumn));
    }
    public String metaDataType(String version, String dataSetCode, String innerCode) {
        assert version != null && version.length() != 0 &&
                dataSetCode != null && dataSetCode.length() != 0 &&
                innerCode != null && innerCode.length() != 0;
        return redisClient.get(makeKey(MetaDataTable + version, dataSetCode + "." + innerCode, MetaDataTypeColumn));
    }
    public void setMetaDataType(String version, String dataSetCode, String innerCode, String value) {
        redisClient.set(makeKey(MetaDataTable + version, dataSetCode + "." + innerCode, MetaDataTypeColumn), value);
    }
    public void deleteMetaDataType(String version, String dataSetCode, String innerCode) {
        redisClient.delete(makeKey(MetaDataTable + version, dataSetCode + "." + innerCode, MetaDataTypeColumn));
    }
    public String metaDataFormat(String version, String dataSetCode, String code) {
        assert version != null && version.length() != 0 &&
                dataSetCode != null && dataSetCode.length() != 0 &&
                code != null && code.length() != 0;
        return redisClient.get(makeKey(MetaDataTable + version, dataSetCode + "." + code, MetaDataFormatColumn));
    }
    public void setMetaDataFormat(String version, String dataSetCode, String code, String value) {
        redisClient.set(makeKey(MetaDataTable + version, dataSetCode + "." + code, MetaDataFormatColumn), value);
    }
    public void deleteMetaDataFormat(String version, String dataSetCode, String code) {
        redisClient.delete(makeKey(MetaDataTable + version, dataSetCode + "." + code, MetaDataFormatColumn));
    }
    public Boolean metaDataNullable(String version, String dataSetCode, String code) {
        assert version != null && version.length() != 0 &&
                dataSetCode != null && dataSetCode.length() != 0 &&
                code != null && code.length() != 0;
        String nullable = redisClient.get(makeKey(MetaDataTable + version, dataSetCode + "." + code, MetaDataNullableColumn));
        return nullable.equals("1");
    }
    public void setMetaDataNullable(String version, String dataSetCode, String code, String value) {
        redisClient.set(makeKey(MetaDataTable + version, dataSetCode + "." + code, MetaDataNullableColumn), value);
    }
    public void deleteMetaDataNullable(String version, String dataSetCode, String code) {
        redisClient.delete(makeKey(MetaDataTable + version, dataSetCode + "." + code, MetaDataNullableColumn));
    }
    public String dictEntryValue(String version, String dictId, String entryCode) {
        assert StringUtils.isNotBlank(version) && StringUtils.isNotBlank(entryCode);
        return redisClient.get(makeKey(DictEntryTable + version, dictId + "." + entryCode, DictEntryValueColumn));
    }
    public void setDictEntryValue(String version, String dictId, String entryCode, String value) {
        redisClient.set(makeKey(DictEntryTable + version, dictId + "." + entryCode, DictEntryValueColumn), value);
    }
    public void deleteDictEntryValue(String version, String dictId, String entryCode) {
        redisClient.delete(makeKey(DictEntryTable + version, dictId + "." + entryCode, DictEntryValueColumn));
    }
    public String dictEntryCode(String version, String dictId, String entryValue) {
        assert StringUtils.isNotBlank(version) && StringUtils.isNotBlank(entryValue);
        return redisClient.get(makeKey(DictEntryTable + version, dictId + "." + entryValue, DictEntryCodeColumn));
    }
    public void setDictEntryCode(String version, String dictId, String entryValue, String value) {
        redisClient.set(makeKey(DictEntryTable + version, dictId + "." + entryValue, DictEntryCodeColumn), value);
    }
    public void deleteDictEntryCode(String version, String dictId, String entryValue) {
        redisClient.delete(makeKey(DictEntryTable + version, dictId + "." + entryValue, DictEntryCodeColumn));
    }
    public Boolean isDictCodeExist(String version, String dictId, String entryCode) {
        assert StringUtils.isNotBlank(version) && StringUtils.isNotBlank(entryCode);
        return redisClient.hasKey(makeKey(DictEntryTable + version, dictId + "." + entryCode, DictEntryCodeColumn));
    }
    public Boolean isDictValueExist(String version, String dictId, String entryValue) {
        assert StringUtils.isNotBlank(version) && StringUtils.isNotBlank(entryValue);
        return redisClient.hasKey(makeKey(DictEntryTable + version, dictId + "." + entryValue, DictEntryValueColumn));
    }
    public void setMetaDataName(String version, String dataSetCode, String innerCode, String name) {
        this.redisClient.set(this.makeKey(this.MetaDataTable + version, dataSetCode + "." + innerCode, this.MetaDataNameColumn), name);
    }
    public String metaName(String version, String dataSetCode, String innerCode) {
        assert version != null && version.length() != 0 && dataSetCode != null && dataSetCode.length() != 0 && innerCode != null && innerCode.length() != 0;
        return (String)this.redisClient.get(this.makeKey(this.MetaDataTable + version, dataSetCode + "." + innerCode, this.MetaDataNameColumn));
    }
}

+ 47 - 0
common/commons-data-solr/pom.xml

@ -0,0 +1,47 @@
<?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.0.0"
         xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
         xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
    <modelVersion>4.0.0</modelVersion>
    <parent>
        <groupId>com.yihu.jw</groupId>
        <artifactId>wlyy-parent-pom</artifactId>
        <version>2.4.0</version>
        <relativePath>../../wlyy-parent-pom/pom.xml</relativePath>
    </parent>
    <artifactId>commons-data-solr</artifactId>
    <packaging>jar</packaging>
    <properties>
    </properties>
    <dependencies>
        <!-- true -->
        <dependency>
            <groupId>org.springframework</groupId>
            <artifactId>spring-context</artifactId>
        </dependency>
        <dependency>
            <groupId>org.springframework.data</groupId>
            <artifactId>spring-data-solr</artifactId>
        </dependency>
        <dependency>
            <groupId>org.springframework.boot</groupId>
            <artifactId>spring-boot</artifactId>
        </dependency>
        <dependency>
            <groupId>com.yihu.jw</groupId>
            <artifactId>commons-util</artifactId>
        </dependency>
    </dependencies>
    <build>
        <plugins>
            <plugin>
                <groupId>org.apache.maven.plugins</groupId>
                <artifactId>maven-compiler-plugin</artifactId>
            </plugin>
        </plugins>
    </build>
</project>

+ 115 - 0
common/commons-data-solr/src/main/java/com/yihu/jw/solr/SolrAdmin.java

@ -0,0 +1,115 @@
package com.yihu.jw.solr;
import org.apache.solr.client.solrj.SolrClient;
import org.apache.solr.client.solrj.SolrQuery;
import org.apache.solr.client.solrj.response.*;
import org.apache.solr.common.SolrDocument;
import org.apache.solr.common.SolrDocumentList;
import org.apache.solr.common.SolrInputDocument;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.config.ConfigurableBeanFactory;
import org.springframework.context.annotation.Scope;
import org.springframework.stereotype.Service;
import java.util.*;
/**
 * Solr底层操作类
 *
 * @author hzp
 * @version 1.0
 * @created 2017.05.06
 */
@Service
@Scope(ConfigurableBeanFactory.SCOPE_PROTOTYPE)
public class SolrAdmin {
    private static final Logger logger = LoggerFactory.getLogger(SolrAdmin.class);
    @Autowired
    private SolrPool pool;
    /**
     * 新建单条索引
     */
    public Boolean create(String core,Map<String, Object> map) throws Exception {
        SolrClient client = pool.getConnection(core);
        SolrInputDocument doc = new SolrInputDocument();
        //注意date的格式,要进行适当的转化
        for(String key:map.keySet()) {
            doc.addField(key, map.get(key));
        }
        UpdateResponse re = client.add(doc);
        client.commit();
        if (re.getStatus() != 0) {
            logger.info("create index cost " + re.getQTime());
            return true;
        } else{
            logger.warn("create index failed!");
            return false;
        }
    }
    /**
     * 修改单条索引单字段
     */
    public Boolean update(String core,String uniqueKey,String uniqueKeyValue,String key,Object value) throws Exception {
        Map<String,Object> map = new HashMap();
        map.put(key, value);
        return update(core,uniqueKey + ":" + uniqueKeyValue, map);
    }
    /**
     * 修改索引多字段
     */
    public Boolean update(String core,String keyQuery,Map<String, Object> map) throws Exception {
        SolrClient client = pool.getConnection(core);
        QueryResponse qr = client.query(new SolrQuery(keyQuery));
        SolrDocumentList docs = qr.getResults();
        if(docs != null && docs.size() > 0) {
            List<SolrInputDocument> solrList = new ArrayList<>();
            for(int i = 0; i < docs.size(); i++) {
                SolrDocument doc = docs.get(i);
                SolrInputDocument newItem = new SolrInputDocument();
                newItem.addField("rowkey",doc.get("rowkey"));
                for(String key :map.keySet()) {
                    newItem.addField(key,map.get(key));
                }
                solrList.add(newItem);
            }
            UpdateResponse re = client.add(solrList);
            client.commit();
            if(re.getStatus() != 0) {
                logger.info("update index cost " + re.getQTime());
                return true;
            } else{
                logger.warn("update index failed!");
                return false;
            }
        } else{
            logger.warn("Null result!");
        }
        return true;
    }
    /**
     * 删除单条索引
     */
    public Boolean delete(String core,String keyQuery) throws Exception {
        SolrClient client = pool.getConnection(core);
        UpdateResponse de = client.deleteByQuery(keyQuery);
        client.commit();
        if (de.getStatus() != 0) {
            logger.info("delete index cost " + de.getQTime());
            return true;
        } else{
            logger.warn("delete index failed!");
            return false;
        }
    }
}

+ 53 - 0
common/commons-data-solr/src/main/java/com/yihu/jw/solr/SolrPool.java

@ -0,0 +1,53 @@
package com.yihu.jw.solr;
import com.yihu.jw.solr.config.SolrConfig;
import org.apache.solr.client.solrj.SolrClient;
import org.apache.solr.client.solrj.impl.CloudSolrClient;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.config.ConfigurableBeanFactory;
import org.springframework.context.annotation.Scope;
import org.springframework.data.solr.server.support.HttpSolrClientFactory;
import org.springframework.stereotype.Service;
import org.xml.sax.SAXException;
import javax.xml.parsers.ParserConfigurationException;
import java.io.IOException;
import java.util.Optional;
/**
 * Solr连接池
 * @author hzp
 * @version 1.0
 * @created 2016.04.26
 */
@Service
@Scope(ConfigurableBeanFactory.SCOPE_SINGLETON)
public class SolrPool {
    @Autowired
    private SolrConfig solrConfig;
    private static volatile HttpSolrClientFactory factory;
    protected HttpSolrClientFactory getFactory() throws ParserConfigurationException, IOException, SAXException {
        if (factory != null) {
            return factory;
        }
        synchronized (HttpSolrClientFactory.class) {
            if (null == factory) {
                Optional<String> zkChroot = Optional.of("/");
                CloudSolrClient client = new CloudSolrClient.Builder(solrConfig.getZkHosts(),zkChroot).build();
                factory = new HttpSolrClientFactory(client);
            }
        }
        return factory;
    }
    public SolrClient getConnection(String core) throws Exception{
        if (factory != null) {
            return factory.getSolrClient();
        }
        return getFactory().getSolrClient();
    }
}

+ 668 - 0
common/commons-data-solr/src/main/java/com/yihu/jw/solr/SolrUtil.java

@ -0,0 +1,668 @@
package com.yihu.jw.solr;
import org.apache.commons.lang3.StringUtils;
import org.apache.solr.client.solrj.SolrClient;
import org.apache.solr.client.solrj.SolrQuery;
import org.apache.solr.client.solrj.response.*;
import org.apache.solr.common.SolrDocumentList;
import org.apache.solr.common.params.FacetParams;
import org.apache.solr.common.params.GroupParams;
import org.apache.solr.common.util.NamedList;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.config.ConfigurableBeanFactory;
import org.springframework.context.annotation.Scope;
import org.springframework.stereotype.Service;
import java.util.*;
/**
 * Solr底层查询类
 *
 * @author hzp
 * @version 1.0
 * @created 2016.04.26
 */
@Service
@Scope(ConfigurableBeanFactory.SCOPE_PROTOTYPE)
public class SolrUtil {
    private static final Logger logger = LoggerFactory.getLogger(SolrUtil.class);
    private final static String ASC = "asc";
    @Autowired
    private SolrPool pool;
    /**
     * 简单查询方法
     */
    public SolrDocumentList query(String core, String q, Map<String, String> sort, long start, long rows) throws Exception {
        return query(core, q, null, sort, start, rows, null);
    }
    /**
     * 简单查询返回字段
     */
    public SolrDocumentList queryReturnFieldList(String core, String q, String fq, Map<String, String> sort, long start, long rows, String... fields) throws Exception {
        return query(core, q, fq, sort, start, rows, fields);
    }
    /**
     * Solr查询方法
     * @param core
     * @param q      查询字符串
     * @param fq     过滤查询
     * @param sort   排序
     * @param start  查询起始行
     * @param rows   查询行数
     * @param fields 返回字段
     * @return
     */
    public SolrDocumentList query(String core, String q, String fq, Map<String, String> sort, long start, long rows, String... fields) throws Exception {
        SolrClient conn = pool.getConnection(core);
        SolrQuery query = new SolrQuery();
        //设置查询条件
        if (StringUtils.isNotEmpty(q)) {
            query.setQuery(q);
        } else {
            query.setQuery("*:*");
        }
        //设置过滤条件
        if (StringUtils.isNotEmpty(fq)) {
            query.setFilterQueries(fq);
        }
        query.setFields(fields);
        //设置查询起始行
        query.setStart(Integer.parseInt(String.valueOf(start)));
        //设置查询行数
        query.setRows(Integer.parseInt(String.valueOf(rows)));
        //设置排序
        if (sort != null) {
            for (Object co : sort.keySet()) {
                if (ASC.equals(sort.get(co).toLowerCase())) {
                    query.addSort(co.toString(), SolrQuery.ORDER.asc);
                } else {
                    query.addSort(co.toString(), SolrQuery.ORDER.desc);
                }
            }
        }
        QueryResponse rsp = conn.query(query);
        SolrDocumentList docs = rsp.getResults();
        return docs;
    }
    /**
     * Solr查询方法
     * @param core
     * @param q 查询字符串
     * @param fq 过滤查询
     * @param sort 排序
     * @param start 查询起始行
     * @param rows 查询行数
     * @param hl 高亮字段
     * @param fields 返回字段
     * @return
     * @throws Exception
     */
    public QueryResponse highlight(String core, String q, String fq, Map<String, String> sort, long start, long rows, String hl, String... fields) throws Exception {
        SolrClient conn = pool.getConnection(core);
        SolrQuery query = new SolrQuery();
        //设置查询条件
        if (StringUtils.isNotEmpty(q)) {
            query.setQuery(q);
        } else {
            query.setQuery("*:*");
        }
        //设置过滤条件
        if (StringUtils.isNotEmpty(fq)) {
            query.setFilterQueries(fq);
        }
        query.setFields(fields);
        //设置查询起始行
        query.setStart(Integer.parseInt(String.valueOf(start)));
        //设置查询行数
        query.setRows(Integer.parseInt(String.valueOf(rows)));
        //设置排序
        if (sort != null) {
            for (Object co : sort.keySet()) {
                if (ASC.equals(sort.get(co).toLowerCase())) {
                    query.addSort(co.toString(), SolrQuery.ORDER.asc);
                } else {
                    query.addSort(co.toString(), SolrQuery.ORDER.desc);
                }
            }
        }
        //高亮
        if (StringUtils.isNotEmpty(hl)) {
            query.setHighlight(true);
            query.addHighlightField(hl);
            query.setHighlightSimplePre("<em>");
            query.setHighlightSimplePost("</em>");
            query.setHighlightFragsize(3000);
        }
        QueryResponse rsp = conn.query(query);
        return rsp;
    }
    /**
     * Solr单个字段去重查询
     *
     * @param q          可选,查询字符串
     * @param fq         可选,过滤查询
     * @param sort       可选,排序
     * @param start      必填,查询起始行
     * @param rows       必填,查询行数
     * @param fields     必填,返回字段
     * @param groupField 必填,分组去重字段。针对一个字段去重。
     * @param groupSort  可选,组内排序字段,如:"event_date asc"
     * @return
     */
    public List<Group> queryDistinctOneField(String core, String q, String fq, Map<String, String> sort, int start, int rows,
                                                  String[] fields, String groupField, String groupSort) throws Exception {
        SolrClient conn = pool.getConnection(core);
        SolrQuery query = new SolrQuery();
        if (StringUtils.isNotEmpty(q)) {
            query.setQuery(q);
        } else {
            query.setQuery("*:*");
        }
        if (StringUtils.isNotEmpty(fq)) {
            query.setFilterQueries(fq);
        }
        if (sort != null) {
            for (Object co : sort.keySet()) {
                if (ASC.equals(sort.get(co).toLowerCase())) {
                    query.addSort(co.toString(), SolrQuery.ORDER.asc);
                } else {
                    query.addSort(co.toString(), SolrQuery.ORDER.desc);
                }
            }
        }
        query.setFields(fields);
        query.setStart(start);
        query.setRows(10000000);
        query.setParam(GroupParams.GROUP, true);
        query.setParam(GroupParams.GROUP_FORMAT, "grouped");
        query.setParam(GroupParams.GROUP_FIELD, groupField);
        if (StringUtils.isNotEmpty(groupSort)) {
            query.setParam(GroupParams.GROUP_SORT, groupSort);
        }
        List<Group> groups = new ArrayList<>();
        QueryResponse response = conn.query(query);
        GroupResponse groupResponse = response.getGroupResponse();
        if (groupResponse != null) {
            List<GroupCommand> groupList = groupResponse.getValues();
            for (GroupCommand groupCommand : groupList) {
                groups = groupCommand.getValues();
            }
        }
        return groups;
    }
    /**
     * Solr单个字段去重查询
     *
     * @param q          可选,查询字符串
     * @param fq         可选,过滤查询
     * @param sort       可选,排序
     * @param start      必填,查询起始行
     * @param rows       必填,查询行数
     * @param fields     必填,返回字段
     * @param groupField 必填,分组去重字段。针对一个字段去重。
     * @param groupSort  可选,组内排序字段,如:"event_date asc"
     * @return
     */
    public SolrDocumentList queryDistinctOneFieldForDocList(String core, String q, String fq, Map<String, String> sort, int start, int rows,
                                             String[] fields, String groupField, String groupSort) throws Exception {
        SolrDocumentList solrDocumentList = new SolrDocumentList();
        SolrClient conn = pool.getConnection(core);
        SolrQuery query = new SolrQuery();
        QueryResponse response = conn.query(query);
        GroupResponse groupResponse = response.getGroupResponse();
        if (groupResponse != null) {
            List<GroupCommand> groupList = groupResponse.getValues();
            for (GroupCommand groupCommand : groupList) {
                List<Group> groups = groupCommand.getValues();
                for (Group group : groups) {
                    if (group.getResult().size() > 0) {
                        solrDocumentList.add(group.getResult().get(0));
                    }
                }
            }
        }
        return solrDocumentList;
    }
    /**
     * Solr查询方法 多个过滤条件
     *
     * @param q     查询字符串
     * @param fq    过滤查询  多个过滤条件
     * @param sort  排序
     * @param start 查询起始行
     * @param rows  查询行数
     * @return
     */
    public SolrDocumentList queryByfqs(String core, String q, String[] fq, Map<String, String> sort, long start, long rows) throws Exception {
        SolrClient conn = pool.getConnection(core);
        SolrQuery query = new SolrQuery();
        if (null != q && !q.equals("")) {
            query.setQuery(q);
        } else {
            query.setQuery("*:*");
        }
        if (null != fq && fq.length > 0) {
            query.setFilterQueries(fq);
        }
        //设置查询起始行
        query.setStart(Integer.parseInt(String.valueOf(start)));
        //设置查询行数
        query.setRows(Integer.parseInt(String.valueOf(rows)));
        //设置排序
        if (sort != null) {
            for (Object co : sort.keySet()) {
                if (ASC == sort.get(co).toLowerCase() || ASC.equals(sort.get(co).toLowerCase())) {
                    query.addSort(co.toString(), SolrQuery.ORDER.asc);
                } else {
                    query.addSort(co.toString(), SolrQuery.ORDER.desc);
                }
            }
        }
        QueryResponse rsp = conn.query(query);
        return rsp.getResults();
    }
    /**
     * 总数查询方法
     */
    public long count(String core, String q) throws Exception {
        return count(core, q, null);
    }
    /**
     * 总数查询方法
     */
    public long count(String core, String q, String fq) throws Exception {
        SolrClient conn = pool.getConnection(core);
        SolrQuery query = new SolrQuery();
        //设置查询条件
        if (null != q && !q.equals("")) {
            query.setQuery(q);
        } else {
            query.setQuery("*:*");
        }
        if (null != fq && !fq.equals("")) {
            query.setFilterQueries(fq);
        }
        query.setStart(0);
        query.setRows(0);
        QueryResponse rsp = conn.query(query);
        long count = rsp.getResults().getNumFound();
        return count;
    }
    /**
     * 单组分组Count统计(start从0开始)
     *
     * @param core       core名
     * @param q          查询条件
     * @param fq         筛选条件
     * @param groupField 分组字段名
     * @param start      起始偏移位
     * @param limit      结果条数,为负数则不限制
     */
    public Map<String, Long> groupCount(String core, String q, String fq, String groupField, int start, int limit) throws Exception {
        SolrClient conn = pool.getConnection(core);
        SolrQuery query = new SolrQuery();
        if (null != q && !q.equals("")) {
            query.setQuery(q);
        } else {
            query.setQuery("*:*");
        }
        if (null != fq && !fq.equals("")) {
            query.setFilterQueries(fq);
        }
        //设置facet=on
        query.setFacet(true);
        query.setRows(0);
        query.addFacetField(groupField);
        //限制每次返回结果数
        query.setFacetLimit(limit);
        query.set(FacetParams.FACET_OFFSET, start);
        //不统计null的值
        query.setFacetMissing(false);
        // 设置返回的数据中每个分组的数据最小值,比如设置为0,则统计数量最小为0,不然不显示
        query.setFacetMinCount(0);
        QueryResponse rsp = conn.query(query);
        List<FacetField.Count> countList = rsp.getFacetField(groupField).getValues();
        Map<String, Long> rmap = new HashMap<>();
        for (FacetField.Count count : countList) {
            if (count.getCount() > 0)
                rmap.put(count.getName(), (long) count.getCount());
        }
        return rmap;
    }
    /**
     * 单组分组Count统计(start从0开始),筛选出聚合值>=groupCount
     *
     * @param core       core名
     * @param q          查询条件
     * @param fq         筛选条件
     * @param groupField 分组字段名
     * @param start      起始偏移位
     * @param limit      结果条数,为负数则不限制
     * @param groupCount 聚合值
     */
    public Map<String, Long> groupCountLte(String core, String q, String fq, String groupField, int start, int limit, int groupCount) throws Exception {
        SolrClient conn = pool.getConnection(core);
        SolrQuery query = new SolrQuery();
        if (null != q && !q.equals("")) {
            query.setQuery(q);
        } else {
            query.setQuery("*:*");
        }
        if (null != fq && !fq.equals("")) {
            query.setFilterQueries(fq);
        }
        //设置facet=on
        query.setFacet(true);
        query.setRows(0);
        query.addFacetField(groupField);
        //限制每次返回结果数
        query.setFacetLimit(limit);
        query.set(FacetParams.FACET_OFFSET, start);
        //不统计null的值
        query.setFacetMissing(false);
        // 设置返回的数据中每个分组的数据最小值,比如设置为0,则统计数量最小为0,不然不显示
        query.setFacetMinCount(0);
        QueryResponse rsp = conn.query(query);
        List<FacetField.Count> countList = rsp.getFacetField(groupField).getValues();
        Map<String, Long> rmap = new HashMap<>();
        for (FacetField.Count count : countList) {
            if (count.getCount() >= groupCount) {
                rmap.put(count.getName(), (long) count.getCount());
            }
        }
        return rmap;
    }
    /**
     * 多组分组Count(独立计算)
     *
     * @param core        core名
     * @param q           查询条件
     * @param fq          筛选条件
     * @param groupFields 分组字段名
     */
    public List<FacetField> groupCount(String core, String q, String fq, String[] groupFields) throws Exception {
        SolrClient conn = pool.getConnection(core);
        SolrQuery query = new SolrQuery();
        if (null != q && !q.equals("")) {
            query.setQuery(q);
        } else {
            query.setQuery("*:*");
        }
        if (null != fq && !fq.equals("")) {
            query.setFilterQueries(fq);
        }
        //设置facet=on
        query.setFacet(true);
        query.setRows(0);
        query.addFacetField(groupFields);
        // 限制每次返回结果数
        query.setFacetLimit(-1);
        query.set(FacetParams.FACET_OFFSET, 0);
        // 不统计null的值
        query.setFacetMissing(false);
        // 设置返回的数据中每个分组的数据最小值,比如设置为0,则统计数量最小为0,不然不显示
        query.setFacetMinCount(0);
        QueryResponse rsp = conn.query(query);
        return rsp.getFacetFields();
    }
    /**
     * 多组分组Count统计(关联计算)
     *
     * @param core        core名
     * @param q           查询条件
     * @param fq          筛选条件
     * @param groupFields 分组字段名
     * @param start       起始偏移位
     * @param limit       结果条数,为负数则不限制
     */
    public List<PivotField> groupCountMult(String core, String q, String fq, String groupFields, int start, int limit) throws Exception {
        SolrClient conn = pool.getConnection(core);
        SolrQuery query = new SolrQuery();
        if (null != q && !q.equals("")) {
            query.setQuery(q);
        } else {
            query.setQuery("*:*");
        }
        if (null != fq && !fq.equals("")) {
            query.setFilterQueries(fq);
        }
        //设置facet=on
        query.setFacet(true);
        query.setRows(0);
        query.addFacetPivotField(groupFields);
        query.set(FacetParams.FACET_OFFSET, start);
        //限制每次返回结果数
        query.setFacetLimit(limit);
        //不统计null的值
        query.setFacetMissing(false);
        // 设置返回的数据中每个分组的数据最小值,比如设置为0,则统计数量最小为0,不然不显示
        query.setFacetMinCount(0);
        QueryResponse rsp = conn.query(query);
        NamedList<List<PivotField>> namedList = rsp.getFacetPivot();
        if (namedList != null && namedList.size() > 0) {
            return namedList.getVal(0);
        } else {
            return null;
        }
    }
    /**
     * 分组数值统计
     *
     * @param core       表名
     * @param q          查询条件
     * @param statsField 统计字段
     * @return
     */
    public FieldStatsInfo getStats(String core, String q, String fq, String statsField) throws Exception {
        SolrClient conn = pool.getConnection(core);
        SolrQuery query = new SolrQuery();
        if (null != q && !q.equals("")) {
            query.setQuery(q);
        } else {
            query.setQuery("*:*");
        }
        if (null != fq && !fq.equals("")) {
            query.setFilterQueries(fq);
        }
        query.addGetFieldStatistics(statsField);
        query.setRows(0);
        QueryResponse rsp = conn.query(query);
        Map<String, FieldStatsInfo> stats = rsp.getFieldStatsInfo();
        if (stats != null && stats.size() > 0) {
            return stats.get(statsField);
        }
        return null;
    }
    /**
     * 分组数值统计
     *
     * @param core       表名
     * @param q          查询条件
     * @param statsField 统计字段
     * @param groupField 分组字段
     * @return
     */
    public List<FieldStatsInfo> getStats(String core, String q, String fq, String statsField, String groupField) throws Exception {
        SolrClient conn = pool.getConnection(core);
        SolrQuery query = new SolrQuery();
        if (null != q && !q.equals("")) {
            query.setQuery(q);
        } else {
            query.setQuery("*:*");
        }
        if (null != fq && !fq.equals("")) {
            query.setFilterQueries(fq);
        }
        query.addGetFieldStatistics(statsField);
        query.addStatsFieldFacets(statsField, groupField);
        query.setRows(0);
        QueryResponse rsp = conn.query(query);
        Map<String, FieldStatsInfo> stats = rsp.getFieldStatsInfo();
        if (stats != null && stats.size() > 0) {
            Map<String, List<FieldStatsInfo>> map = stats.get(statsField).getFacets();
            if (map != null) {
                return map.get(groupField);
            }
        }
        return null;
    }
    /**
     * 查询统计
     *
     * @param core       core名
     * @param facetQuery 查询条件
     */
    public Map<String, Integer> getFacetQuery(String core, String facetQuery) throws Exception {
        SolrClient conn = pool.getConnection(core);
        SolrQuery query = new SolrQuery();
        query.setQuery("*:*");
        query.setFacet(true);
        query.addFacetQuery(facetQuery);
        QueryResponse resp = conn.query(query);
        return resp.getFacetQuery();
    }
    /**
     * 单字段分组统计
     *
     * @param core
     * @param facetField
     * @param fq
     * @param minCount
     * @param start
     * @param limit
     * @param missing
     */
    public FacetField getFacetField(String core, String facetField, String fq, int minCount, int start, int limit, boolean missing) throws Exception {
        SolrClient conn = pool.getConnection(core);
        SolrQuery query = new SolrQuery();
        query.setQuery("*:*");
        if (!StringUtils.isBlank(fq)) {
            query.setFilterQueries(fq);
        }
        query.setStart(start)
                .setRows(0)
                .setFacet(true)
                .addFacetField(facetField)
                .setFacetMinCount(minCount)
                .setFacetLimit(limit)
                .setFacetMissing(missing);
        QueryResponse resp = conn.query(query);
        return resp.getFacetField(facetField);
    }
    /**
     * 日期范围分组统计
     */
    public List<RangeFacet> getFacetDateRange(String core, String dateField, Date startTime, Date endTime, String gap, String fq) throws Exception {
        SolrClient conn = pool.getConnection(core);
        SolrQuery query = new SolrQuery();
        query.setQuery("*:*");
        if (!StringUtils.isBlank(fq)) {
            query.setFilterQueries(fq);
        }
        query.setRows(0)
                .setFacet(true)
                .addDateRangeFacet(dateField, startTime, endTime, gap);
        QueryResponse resp = conn.query(query);
        return resp.getFacetRanges();
    }
    /**
     * 日期范围分组统计
     */
    public List<RangeFacet> getFacetDateRange(String core, String field, String start, String end, String gap, String fq, String q) throws Exception {
        SolrClient conn = pool.getConnection(core);
        SolrQuery query = new SolrQuery();
        if (StringUtils.isEmpty(q)) {
            query.setQuery("*:*");
        } else {
            query.setQuery(q);
        }
        if (!StringUtils.isEmpty(fq)) {
            query.setFilterQueries(fq);
        }
        query.setRows(0)
                .setFacet(true)
                .setFacetMissing(false)
                .add("facet.range", new String[]{field})
                .add(String.format(Locale.ROOT, "f.%s.%s", new Object[]{field, "facet.range.start"}), new String[]{start})
                .add(String.format(Locale.ROOT, "f.%s.%s", new Object[]{field, "facet.range.end"}), new String[]{end})
                .add(String.format(Locale.ROOT, "f.%s.%s", new Object[]{field, "facet.range.gap"}), new String[]{gap});
        QueryResponse resp = conn.query(query);
        return resp.getFacetRanges();
    }
    /**
     * 数值型字段范围统计
     */
    public List<RangeFacet> getFacetNumRange(String core, String field, int start, int end, int gap, String fq) throws Exception {
        SolrClient conn = pool.getConnection(core);
        SolrQuery query = new SolrQuery();
        query.setQuery("*:*");
        if (!StringUtils.isBlank(fq)) {
            query.setFilterQueries(fq);
        }
        query.setRows(0)
                .setFacet(true)
                .addNumericRangeFacet(field, start, end, gap);
        QueryResponse resp = conn.query(query);
        return resp.getFacetRanges();
    }
}

+ 40 - 0
common/commons-data-solr/src/main/java/com/yihu/jw/solr/config/SolrConfig.java

@ -0,0 +1,40 @@
package com.yihu.jw.solr.config;
import org.springframework.boot.context.properties.ConfigurationProperties;
import org.springframework.context.annotation.Configuration;
import javax.annotation.PostConstruct;
import java.util.ArrayList;
import java.util.List;
/**
 * Created by progr1mmer on 2018/7/26.
 */
@Configuration
@ConfigurationProperties(prefix = "spring.data.solr")
public class SolrConfig {
    private String zkHost;
    public List<String> getZkHosts(){
        List<String> zkHosts = new ArrayList<>();
        zkHosts.add(zkHost);
        return zkHosts;
    };
    public String getZkHost() {
        return zkHost;
    }
    public void setZkHost(String zkHost) {
        this.zkHost = zkHost;
    }
    @PostConstruct
    private void configInfo() {
        StringBuilder info = new StringBuilder("{");
        info.append("\n  spring.data.solr.zk-host = " + zkHost);
        info.append("\n}");
        System.out.println("Solr.configInfo : " + info.toString());
    }
}

+ 106 - 27
common/commons-profile-core/src/main/java/com/yihu/jw/profile/family/ResourceCells.java

@ -1,5 +1,6 @@
package com.yihu.jw.profile.family;
import com.yihu.jw.profile.ProfileType;
import java.util.ArrayList;
@ -7,38 +8,113 @@ import java.util.Arrays;
import java.util.List;
/**
 * Created by progr1mmer on 2018/6/9.
 * @author progr1mmer
 * @date Created on 2018/6/9.
 */
public class ResourceCells {
    public static final String ROWKEY = "rowkey";
    //Basic
    public static final String PROFILE_ID = "profile_id"; //档案主索引
    public static final String PROFILE_TYPE = "profile_type"; //档案类型
    public static final String EVENT_NO = "event_no"; //事件号
    public static final String EVENT_DATE = "event_date"; //事件时间
    public static final String EVENT_TYPE = "event_type"; //事件类型
    public static final String CARD_ID = "card_id"; //就诊卡号
    public static final String CARD_TYPE = "card_type"; //就诊卡类型
    public static final String PATIENT_ID = "patient_id"; //病人ID
    public static final String PATIENT_NAME = "patient_name"; //病人姓名
    public static final String DEMOGRAPHIC_ID = "demographic_id"; //身份证号码
    public static final String ORG_CODE = "org_code"; //机构编码
    public static final String ORG_NAME = "org_name"; //机构名称
    public static final String ORG_AREA = "org_area"; //机构地区
    public static final String CDA_VERSION = "cda_version"; //CDA版本
    public static final String CREATE_DATE = "create_date"; //创建时间
    public static final String DIAGNOSIS = "diagnosis"; //ICD10诊断代码
    public static final String DIAGNOSIS_NAME = "diagnosis_name"; //ICD10诊断名称
    public static final String HEALTH_PROBLEM = "health_problem"; //健康问题诊断代码
    public static final String HEALTH_PROBLEM_NAME = "health_problem_name"; //健康问题诊断名称
    public static final String DEPT_CODE = "dept_code"; //科室编码
    public static final String SUB_ROWKEYS = "sub_rowkeys"; //细表索引
    public static final String PATIENT_AGE = "patient_age"; //就诊年龄
    public static final String PATIENT_SEX = "patient_sex"; //患者性别
    /**
     * 档案主索引
     */
    public static final String PROFILE_ID = "profile_id";
    /**
     * 档案类型
     */
    public static final String PROFILE_TYPE = "profile_type";
    /**
     * 事件号
     */
    public static final String EVENT_NO = "event_no";
    /**
     * 事件时间
     */
    public static final String EVENT_DATE = "event_date";
    /**
     * 事件类型
     */
    public static final String EVENT_TYPE = "event_type";
    /**
     * 就诊卡号
     */
    public static final String CARD_ID = "card_id";
    /**
     * 就诊卡类型
     */
    public static final String CARD_TYPE = "card_type";
    /**
     * 病人ID
     */
    public static final String PATIENT_ID = "patient_id";
    /**
     * 病人姓名
     */
    public static final String PATIENT_NAME = "patient_name";
    /**
     * 身份证号码
     */
    public static final String DEMOGRAPHIC_ID = "demographic_id";
    /**
     * 机构编码
     */
    public static final String ORG_CODE = "org_code";
    /**
     * 机构名称
     */
    public static final String ORG_NAME = "org_name";
    /**
     * 机构地区
     */
    public static final String ORG_AREA = "org_area";
    /**
     * CDA版本
     */
    public static final String CDA_VERSION = "cda_version";
    /**
     * 创建时间
     */
    public static final String CREATE_DATE = "create_date";
    /**
     * ICD10诊断代码
     */
    public static final String DIAGNOSIS = "diagnosis";
    /**
     * ICD10诊断名称
     */
    public static final String DIAGNOSIS_NAME = "diagnosis_name";
    /**
     * 健康问题诊断代码
     */
    public static final String HEALTH_PROBLEM = "health_problem";
    /**
     * 健康问题诊断名称
     */
    public static final String HEALTH_PROBLEM_NAME = "health_problem_name";
    /**
     * 科室编码
     */
    public static final String DEPT_CODE = "dept_code";
    /**
     * 细表索引
     */
    public static final String SUB_ROWKEYS = "sub_rowkeys";
    /**
     * 就诊年龄
     */
    public static final String PATIENT_AGE = "patient_age";
    /**
     * 患者性别
     */
    public static final String PATIENT_SEX = "patient_sex";
    /**
     * 全文检索字段
     */
    public static final String SEARCH_FIELD = "search_field";
    //RawFiles
    public static final String CDA_DOCUMENT_ID = "cda_document_id";
    public static final String CDA_DOCUMENT_NAME = "cda_document_name";
    public static final String FILE_LIST = "file_list";
@ -68,7 +144,8 @@ public class ResourceCells {
                        DIAGNOSIS,
                        DIAGNOSIS_NAME,
                        HEALTH_PROBLEM,
                        HEALTH_PROBLEM_NAME
                        HEALTH_PROBLEM_NAME,
                        SEARCH_FIELD
                ));
            case File:
                return new ArrayList<>(Arrays.asList(
@ -124,7 +201,8 @@ public class ResourceCells {
                        PATIENT_AGE,
                        PATIENT_SEX,
                        PATIENT_ID,
                        DEPT_CODE
                        DEPT_CODE,
                        CREATE_DATE
                ));
            case File:
                return new ArrayList<>(Arrays.asList(
@ -133,7 +211,8 @@ public class ResourceCells {
                        ORG_AREA,
                        EVENT_TYPE,
                        EVENT_NO,
                        EVENT_DATE
                        EVENT_DATE,
                        CREATE_DATE
                ));
            case Link:
                return new ArrayList<>(Arrays.asList(

+ 47 - 0
common/commons-rest-model/pom.xml

@ -0,0 +1,47 @@
<?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.0.0"
         xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
         xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
    <modelVersion>4.0.0</modelVersion>
    <parent>
        <groupId>com.yihu.jw</groupId>
        <artifactId>wlyy-parent-pom</artifactId>
        <version>2.4.0</version>
        <relativePath>../../wlyy-lib-parent-pom/pom.xml</relativePath>
    </parent>
    <artifactId>commons-rest-model</artifactId>
    <packaging>jar</packaging>
    <dependencies>
        <!-- true -->
        <dependency>
            <groupId>com.yihu.jw</groupId>
            <artifactId>commons-util</artifactId>
            <version>2.4.0</version>
        </dependency>
        <dependency>
            <groupId>com.yihu.jw</groupId>
            <artifactId>commons-ehr-constants</artifactId>
            <version>2.4.0</version>
        </dependency>
        <dependency>
            <groupId>com.yihu.jw</groupId>
            <artifactId>common-rest-model</artifactId>
        </dependency>
        <dependency>
            <groupId>com.yihu.jw</groupId>
            <artifactId>commons-profile-core</artifactId>
            <version>2.4.0</version>
        </dependency>
    </dependencies>
    <build>
        <plugins>
            <plugin>
                <groupId>org.apache.maven.plugins</groupId>
                <artifactId>maven-compiler-plugin</artifactId>
            </plugin>
        </plugins>
    </build>
</project>

+ 100 - 0
common/commons-rest-model/src/main/java/com/yihu/jw/ehr/model/adaption/MAdapterDataSet.java

@ -0,0 +1,100 @@
package com.yihu.jw.ehr.model.adaption;
/**
 * @author lincl
 * @version 1.0
 * @created 2016.2.3
 */
public class MAdapterDataSet {
	private Long id;
	private Long adapterPlanId;
	private Long dataSetId;
	private Long metaDataId;
	private String dataType;
	private Long orgDataSetSeq;
	private Long orgMetaDataSeq;
	private String description;
	private Long stdDict;
	public Long getId() {
		return id;
	}
	public void setId(Long id) {
		this.id = id;
	}
	public Long getStdDict() {
		return stdDict;
	}
	public void setStdDict(Long stdDict) {
		this.stdDict = stdDict;
	}
	public String getDescription() {
		return description;
	}
	public void setDescription(String description) {
		this.description = description;
	}
	public Long getOrgMetaDataSeq() {
		return orgMetaDataSeq;
	}
	public void setOrgMetaDataSeq(Long orgMetaDataSeq) {
		this.orgMetaDataSeq = orgMetaDataSeq;
	}
	public Long getOrgDataSetSeq() {
		return orgDataSetSeq;
	}
	public void setOrgDataSetSeq(Long orgDataSetSeq) {
		this.orgDataSetSeq = orgDataSetSeq;
	}
	public String getDataType() {
		return dataType;
	}
	public void setDataType(String dataType) {
		this.dataType = dataType;
	}
	public Long getMetaDataId() {
		return metaDataId;
	}
	public void setMetaDataId(Long metaDataId) {
		this.metaDataId = metaDataId;
	}
	public Long getDataSetId() {
		return dataSetId;
	}
	public void setDataSetId(Long dataSetId) {
		this.dataSetId = dataSetId;
	}
	public Long getAdapterPlanId() {
		return adapterPlanId;
	}
	public void setAdapterPlanId(Long adapterPlanId) {
		this.adapterPlanId = adapterPlanId;
	}
}

+ 177 - 0
common/commons-rest-model/src/main/java/com/yihu/jw/ehr/model/adaption/MAdapterDataVo.java

@ -0,0 +1,177 @@
package com.yihu.jw.ehr.model.adaption;
/**
 * @author lincl
 * @version 1.0
 * @created 2016.2.3
 */
public class MAdapterDataVo {
	private Long id;
	private Long adapterPlanId;
	private Long dataSetId;
	private String dataSetCode;
	private String dataSetName;
	private Long metaDataId;
	private String metaDataCode;
	private String metaDataName;
	private String dataType;
	private String dataTypeName;
	private Long orgDataSetSeq;
	private String orgDataSetCode;
	private String orgDataSetName;
	private Long orgMetaDataSeq;
	private String orgMetaDataCode;
	private String orgMetaDataName;
	private String description;
	public MAdapterDataVo(){	}
	public void finalize() throws Throwable {	}
	public Long getId() {
		return id;
	}
	public void setId(Long id) {
		this.id = id;
	}
	public Long getAdapterPlanId() {
		return adapterPlanId;
	}
	public void setAdapterPlanId(Long adapterPlanId) {
		this.adapterPlanId = adapterPlanId;
	}
	public Long getDataSetId() {
		return dataSetId;
	}
	public void setDataSetId(Long dataSetId) {
		this.dataSetId = dataSetId;
	}
	public Long getMetaDataId() {
		return metaDataId;
	}
	public void setMetaDataId(Long metaDataId) {
		this.metaDataId = metaDataId;
	}
	public String getDescription() {
		return description;
	}
	public void setDescription(String description) {
		this.description = description;
	}
	public String getDataSetCode() {
		return dataSetCode;
	}
	public void setDataSetCode(String dataSetCode) {
		this.dataSetCode = dataSetCode;
	}
	public String getDataSetName() {
		return dataSetName;
	}
	public void setDataSetName(String dataSetName) {
		this.dataSetName = dataSetName;
	}
	public String getMetaDataCode() {
		return metaDataCode;
	}
	public void setMetaDataCode(String metaDataCode) {
		this.metaDataCode = metaDataCode;
	}
	public String getMetaDataName() {
		return metaDataName;
	}
	public void setMetaDataName(String metaDataName) {
		this.metaDataName = metaDataName;
	}
	public String getDataType() {
		return dataType;
	}
	public void setDataType(String dataType) {
		this.dataType = dataType;
	}
	public String getDataTypeName() {
		return dataTypeName;
	}
	public void setDataTypeName(String dataTypeName) {
		this.dataTypeName = dataTypeName;
	}
	public Long getOrgDataSetSeq() {
		return orgDataSetSeq;
	}
	public void setOrgDataSetSeq(Long orgDataSetSeq) {
		this.orgDataSetSeq = orgDataSetSeq;
	}
	public String getOrgDataSetCode() {
		return orgDataSetCode;
	}
	public void setOrgDataSetCode(String orgDataSetCode) {
		this.orgDataSetCode = orgDataSetCode;
	}
	public String getOrgDataSetName() {
		return orgDataSetName;
	}
	public void setOrgDataSetName(String orgDataSetName) {
		this.orgDataSetName = orgDataSetName;
	}
	public Long getOrgMetaDataSeq() {
		return orgMetaDataSeq;
	}
	public void setOrgMetaDataSeq(Long orgMetaDataSeq) {
		this.orgMetaDataSeq = orgMetaDataSeq;
	}
	public String getOrgMetaDataCode() {
		return orgMetaDataCode;
	}
	public void setOrgMetaDataCode(String orgMetaDataCode) {
		this.orgMetaDataCode = orgMetaDataCode;
	}
	public String getOrgMetaDataName() {
		return orgMetaDataName;
	}
	public void setOrgMetaDataName(String orgMetaDataName) {
		this.orgMetaDataName = orgMetaDataName;
	}
}

+ 92 - 0
common/commons-rest-model/src/main/java/com/yihu/jw/ehr/model/adaption/MAdapterDict.java

@ -0,0 +1,92 @@
package com.yihu.jw.ehr.model.adaption;
/**
 *
 * @author lincl
 * @version 1.0
 * @created 2016.2.3
 */
public class MAdapterDict {
	private Long id;
	private Long adapterPlanId;
	/**
	 * 所属标准字典
	 */
	private Long dictId;
	/**
	 * 标准字典项
	 */
	private Long dictEntryId;
	/**
	 * 所属机构字典
	 */
	private Long orgDictSeq;
	/**
	 * 机构字典项
	 */
	private Long orgDictEntrySeq;
	/**
	 * 说明
	 */
	private String description;
	public Long getId() {
		return id;
	}
	public void setId(Long id) {
		this.id = id;
	}
	public Long getAdapterPlanId() {
		return adapterPlanId;
	}
	public void setAdapterPlanId(Long adapterPlanId) {
		this.adapterPlanId = adapterPlanId;
	}
	public Long getDictId() {
		return dictId;
	}
	public void setDictId(Long dictId) {
		this.dictId = dictId;
	}
	public Long getDictEntryId() {
		return dictEntryId;
	}
	public void setDictEntryId(Long dictEntryId) {
		this.dictEntryId = dictEntryId;
	}
	public Long getOrgDictSeq() {
		return orgDictSeq;
	}
	public void setOrgDictSeq(Long orgDictSeq) {
		this.orgDictSeq = orgDictSeq;
	}
	public Long getOrgDictEntrySeq() {
		return orgDictEntrySeq;
	}
	public void setOrgDictEntrySeq(Long orgDictEntrySeq) {
		this.orgDictEntrySeq = orgDictEntrySeq;
	}
	public String getDescription() {
		return description;
	}
	public void setDescription(String description) {
		this.description = description;
	}
}

+ 160 - 0
common/commons-rest-model/src/main/java/com/yihu/jw/ehr/model/adaption/MAdapterDictVo.java

@ -0,0 +1,160 @@
package com.yihu.jw.ehr.model.adaption;
/**
 *
 * @author lincl
 * @version 1.0
 * @created 2016.2.3
 */
public class MAdapterDictVo {
	private Long id;
	private Long adapterPlanId;
	private Long dictId;
	private String dictCode;
	private String dictName;
	private Long dictEntryId;
	private String dictEntryCode;
	private String dictEntryName;
	private Long orgDictSeq;
	private String orgDictCode;
	private String orgDictName;
	private Long orgDictEntrySeq;
	private String orgDictEntryCode;
	private String orgDictEntryName;
	private String description;
	public MAdapterDictVo(){	}
	public void finalize() throws Throwable {}
	public Long getId() {
		return id;
	}
	public void setId(Long id) {
		this.id = id;
	}
	public Long getAdapterPlanId() {
		return adapterPlanId;
	}
	public void setAdapterPlanId(Long adapterPlanId) {
		this.adapterPlanId = adapterPlanId;
	}
	public Long getDictId() {
		return dictId;
	}
	public void setDictId(Long dictId) {
		this.dictId = dictId;
	}
	public Long getDictEntryId() {
		return dictEntryId;
	}
	public void setDictEntryId(Long dictEntryId) {
		this.dictEntryId = dictEntryId;
	}
	public Long getOrgDictEntrySeq() {
		return orgDictEntrySeq;
	}
	public void setOrgDictEntrySeq(Long orgDictEntrySeq) {
		this.orgDictEntrySeq = orgDictEntrySeq;
	}
	public Long getOrgDictSeq() {
		return orgDictSeq;
	}
	public void setOrgDictSeq(Long orgDictSeq) {
		this.orgDictSeq = orgDictSeq;
	}
	public String getDescription() {
		return description;
	}
	public void setDescription(String description) {
		this.description = description;
	}
	public String getDictCode() {
		return dictCode;
	}
	public void setDictCode(String dictCode) {
		this.dictCode = dictCode;
	}
	public String getDictName() {
		return dictName;
	}
	public void setDictName(String dictName) {
		this.dictName = dictName;
	}
	public String getDictEntryCode() {
		return dictEntryCode;
	}
	public void setDictEntryCode(String dictEntryCode) {
		this.dictEntryCode = dictEntryCode;
	}
	public String getDictEntryName() {
		return dictEntryName;
	}
	public void setDictEntryName(String dictEntryName) {
		this.dictEntryName = dictEntryName;
	}
	public String getOrgDictCode() {
		return orgDictCode;
	}
	public void setOrgDictCode(String orgDictCode) {
		this.orgDictCode = orgDictCode;
	}
	public String getOrgDictName() {
		return orgDictName;
	}
	public void setOrgDictName(String orgDictName) {
		this.orgDictName = orgDictName;
	}
	public String getOrgDictEntryCode() {
		return orgDictEntryCode;
	}
	public void setOrgDictEntryCode(String orgDictEntryCode) {
		this.orgDictEntryCode = orgDictEntryCode;
	}
	public String getOrgDictEntryName() {
		return orgDictEntryName;
	}
	public void setOrgDictEntryName(String orgDictEntryName) {
		this.orgDictEntryName = orgDictEntryName;
	}
}

+ 76 - 0
common/commons-rest-model/src/main/java/com/yihu/jw/ehr/model/adaption/MAdapterOrg.java

@ -0,0 +1,76 @@
package com.yihu.jw.ehr.model.adaption;
/**
 * 适配管理映射机构
 *
 * @author lincl
 * @version 1.0
 * @created 2016.2.4
 */
public class MAdapterOrg {
    private String code;
    private String type;
    private String name;
    private String description;
    private String parent;
    private String org;
    private String area;
    public String getCode() {
        return code;
    }
    public void setCode(String code) {
        this.code = code;
    }
    public String getType() {
        return type;
    }
    public void setType(String type) {
        this.type = type;
    }
    public String getName() {
        return name;
    }
    public void setName(String name) {
        this.name = name;
    }
    public String getDescription() {
        return description;
    }
    public void setDescription(String description) {
        this.description = description;
    }
    public String getParent() {
        return parent;
    }
    public void setParent(String parent) {
        this.parent = parent;
    }
    public String getOrg() {
        return org;
    }
    public void setOrg(String org) {
        this.org = org;
    }
    public String getArea() {
        return area;
    }
    public void setArea(String area) {
        this.area = area;
    }
}

+ 90 - 0
common/commons-rest-model/src/main/java/com/yihu/jw/ehr/model/adaption/MAdapterPlan.java

@ -0,0 +1,90 @@
package com.yihu.jw.ehr.model.adaption;
/**
 * @author lincl
 * @version 1.0
 * @created 2016.2.3
 */
public class MAdapterPlan {
    public Long id;
    public Long parentId;
    public String code = "";
    public String name = "";
    public String type = "";
    public String version = "";
    public String org = "";
    public String description = "";
    private int status;
    public Long getId() {
        return id;
    }
    public void setId(Long id) {
        this.id = id;
    }
    public Long getParentId() {
        return parentId;
    }
    public void setParentId(Long parentId) {
        this.parentId = parentId;
    }
    public String getCode() {
        return code;
    }
    public void setCode(String code) {
        this.code = code;
    }
    public String getName() {
        return name;
    }
    public void setName(String name) {
        this.name = name;
    }
    public String getType() {
        return type;
    }
    public void setType(String type) {
        this.type = type;
    }
    public String getVersion() {
        return version;
    }
    public void setVersion(String version) {
        this.version = version;
    }
    public String getOrg() {
        return org;
    }
    public void setOrg(String org) {
        this.org = org;
    }
    public String getDescription() {
        return description;
    }
    public void setDescription(String description) {
        this.description = description;
    }
    public int getStatus() {
        return status;
    }
    public void setStatus(int status) {
        this.status = status;
    }
}

+ 39 - 0
common/commons-rest-model/src/main/java/com/yihu/jw/ehr/model/adaption/MAdapterRelationship.java

@ -0,0 +1,39 @@
package com.yihu.jw.ehr.model.adaption;
import com.fasterxml.jackson.annotation.JsonAutoDetect;
/**
 * @author lincl
 * @version 1.0
 * @created 2016.2.3
 */
@JsonAutoDetect(fieldVisibility = JsonAutoDetect.Visibility.ANY)
public class MAdapterRelationship {
    long id;
    String code;
    String name;
    public long getId() {
        return id;
    }
    public void setId(long id) {
        this.id = id;
    }
    public String getCode() {
        return code;
    }
    public void setCode(String code) {
        this.code = code;
    }
    public String getName() {
        return name;
    }
    public void setName(String name) {
        this.name = name;
    }
}

+ 108 - 0
common/commons-rest-model/src/main/java/com/yihu/jw/ehr/model/adaption/MOrgDataSet.java

@ -0,0 +1,108 @@
package com.yihu.jw.ehr.model.adaption;
import com.fasterxml.jackson.annotation.JsonFormat;
import java.util.Date;
/**
 * 机构数据集
 *
 * @author lincl
 * @version 1.0
 * @created 2016.1.29
 */
public class MOrgDataSet {
    long id;
    String code;
    String name;
    Date createDate;
    Date updateDate;
    String createUser;
    String updateUser;
    String description;
    String organization;
    int sequence;
    public long getId() {
        return id;
    }
    public void setId(long id) {
        this.id = id;
    }
    public String getCode() {
        return code;
    }
    public void setCode(String code) {
        this.code = code;
    }
    public String getName() {
        return name;
    }
    public void setName(String name) {
        this.name = name;
    }
    public String getCreateUser() {
        return createUser;
    }
    public void setCreateUser(String createUser) {
        this.createUser = createUser;
    }
    public String getUpdateUser() {
        return updateUser;
    }
    public void setUpdateUser(String updateUser) {
        this.updateUser = updateUser;
    }
    public String getDescription() {
        return description;
    }
    public void setDescription(String description) {
        this.description = description;
    }
    public String getOrganization() {
        return organization;
    }
    public void setOrganization(String organization) {
        this.organization = organization;
    }
    public int getSequence() {
        return sequence;
    }
    public void setSequence(int sequence) {
        this.sequence = sequence;
    }
    @JsonFormat(pattern="yyyy-MM-dd HH:mm:ss", timezone = "GMT+8")
    public Date getCreateDate() {
        return createDate;
    }
    public void setCreateDate(Date createDate) {
        this.createDate = createDate;
    }
    @JsonFormat(pattern="yyyy-MM-dd HH:mm:ss", timezone = "GMT+8")
    public Date getUpdateDate() {
        return updateDate;
    }
    public void setUpdateDate(Date updateDate) {
        this.updateDate = updateDate;
    }
}

+ 108 - 0
common/commons-rest-model/src/main/java/com/yihu/jw/ehr/model/adaption/MOrgDict.java

@ -0,0 +1,108 @@
package com.yihu.jw.ehr.model.adaption;
import com.fasterxml.jackson.annotation.JsonFormat;
import java.util.Date;
/**
 * 机构字典
 *
 * @author lincl
 * @version 1.0
 * @created 23-10月-2015 10:19:06
 */
public class MOrgDict {
    long id;
    String code;
    String name;
    Date createDate;
    Date updateDate;
    String createUser;
    String updateUser;
    String description;
    String organization;
    int sequence;
    public long getId() {
        return id;
    }
    public void setId(long id) {
        this.id = id;
    }
    public String getCode() {
        return code;
    }
    public void setCode(String code) {
        this.code = code;
    }
    public String getName() {
        return name;
    }
    public void setName(String name) {
        this.name = name;
    }
    @JsonFormat(pattern="yyyy-MM-dd HH:mm:ss", timezone = "GMT+8")
    public Date getCreateDate() {
        return createDate;
    }
    public void setCreateDate(Date createDate) {
        this.createDate = createDate;
    }
    @JsonFormat(pattern="yyyy-MM-dd HH:mm:ss", timezone = "GMT+8")
    public Date getUpdateDate() {
        return updateDate;
    }
    public void setUpdateDate(Date updateDate) {
        this.updateDate = updateDate;
    }
    public String getCreateUser() {
        return createUser;
    }
    public void setCreateUser(String createUser) {
        this.createUser = createUser;
    }
    public String getUpdateUser() {
        return updateUser;
    }
    public void setUpdateUser(String updateUser) {
        this.updateUser = updateUser;
    }
    public String getDescription() {
        return description;
    }
    public void setDescription(String description) {
        this.description = description;
    }
    public String getOrganization() {
        return organization;
    }
    public void setOrganization(String organization) {
        this.organization = organization;
    }
    public int getSequence() {
        return sequence;
    }
    public void setSequence(int sequence) {
        this.sequence = sequence;
    }
}

+ 0 - 0
common/commons-rest-model/src/main/java/com/yihu/jw/ehr/model/adaption/MOrgDictItem.java


Некоторые файлы не были показаны из-за большого количества измененных файлов