chenweida 7 anni fa
parent
commit
be91878836
100 ha cambiato i file con 8589 aggiunte e 0 eliminazioni
  1. 36 0
      .idea/inspectionProfiles/Project_Default.xml
  2. 7 0
      .idea/inspectionProfiles/profiles_settings.xml
  3. 21 0
      .idea/modules.xml
  4. 21 0
      common-activemq-starter/pom.xml
  5. 0 0
      common-activemq-starter/src/main/resources/template.yml
  6. 22 0
      common-async-starter/pom.xml
  7. 58 0
      common-async-starter/src/main/java/com/yihu/base/async/AsyncExecutorUtils.java
  8. 8 0
      common-async-starter/src/main/resources/template.yml
  9. 32 0
      common-cache-starter/pom.xml
  10. 82 0
      common-cache-starter/src/main/java/com/yihu/base/cache/cache/CustomMapCache.java
  11. 158 0
      common-cache-starter/src/main/java/com/yihu/base/cache/cache/CustomRedisCache.java
  12. 21 0
      common-cache-starter/src/main/java/com/yihu/base/cache/config/CacheKeyGenerator.java
  13. 315 0
      common-cache-starter/src/main/java/com/yihu/base/cache/lock/CacheLock.java
  14. 30 0
      common-cache-starter/src/main/java/com/yihu/base/cache/manager/CacheManagerFactory.java
  15. 22 0
      common-cache-starter/src/main/java/com/yihu/base/cache/manager/CustomCacheManager.java
  16. 149 0
      common-cache-starter/src/main/java/com/yihu/base/cache/manager/CustomMapCacheManager.java
  17. 182 0
      common-cache-starter/src/main/java/com/yihu/base/cache/manager/CustomRedisCacheManager.java
  18. 75 0
      common-cache-starter/src/main/java/com/yihu/base/cache/support/CacheInvocation.java
  19. 33 0
      common-cache-starter/src/main/java/com/yihu/base/cache/support/CacheSupport.java
  20. 89 0
      common-cache-starter/src/main/java/com/yihu/base/cache/support/CacheSupportImpl.java
  21. 87 0
      common-cache-starter/src/main/java/com/yihu/base/cache/support/CachingAnnoationAspect.java
  22. 64 0
      common-cache-starter/src/main/java/com/yihu/base/cache/support/MapCacheSupportImpl.java
  23. 60 0
      common-cache-starter/src/main/java/com/yihu/base/cache/support/RedisCacheSupportImpl.java
  24. 31 0
      common-cache-starter/src/main/java/com/yihu/base/cache/util/RedisTemplateUtils.java
  25. 145 0
      common-cache-starter/src/main/java/com/yihu/base/cache/util/ReflectionUtils.java
  26. 68 0
      common-cache-starter/src/main/java/com/yihu/base/cache/util/SpringContextUtils.java
  27. 20 0
      common-cache-starter/src/main/resources/template.yml
  28. 38 0
      common-data-es-starter/pom.xml
  29. 93 0
      common-data-es-starter/readme.MD
  30. 100 0
      common-data-es-starter/src/main/java/com/yihu/base/es/config/ElasticFactory.java
  31. 226 0
      common-data-es-starter/src/main/java/com/yihu/base/es/config/ElastricSearchHelper.java
  32. 21 0
      common-data-es-starter/src/main/java/com/yihu/base/es/config/model/SaveModel.java
  33. 28 0
      common-data-es-starter/src/main/resources/template.yml
  34. 43 0
      common-data-fastdfs-starter/pom.xml
  35. 117 0
      common-data-fastdfs-starter/src/main/java/com/yihu/base/config/FastDFSConfig.java
  36. 65 0
      common-data-fastdfs-starter/src/main/java/com/yihu/base/fastdfs/FastDFSClientPool.java
  37. 363 0
      common-data-fastdfs-starter/src/main/java/com/yihu/base/fastdfs/FastDFSHelper.java
  38. 13 0
      common-data-fastdfs-starter/src/main/resources/template.yml
  39. 66 0
      common-data-hbase-starter/pom.xml
  40. 37 0
      common-data-hbase-starter/src/main/java/com/yihu/base/hbase/AbstractHBaseClient.java
  41. 155 0
      common-data-hbase-starter/src/main/java/com/yihu/base/hbase/HBaseAdmin.java
  42. 381 0
      common-data-hbase-starter/src/main/java/com/yihu/base/hbase/HBaseHelper.java
  43. 166 0
      common-data-hbase-starter/src/main/java/com/yihu/base/hbase/TableBundle.java
  44. 96 0
      common-data-hbase-starter/src/main/java/com/yihu/base/hbase/config/HbaseConfig.java
  45. 163 0
      common-data-hbase-starter/src/main/resources/hbase/core-site.xml
  46. 243 0
      common-data-hbase-starter/src/main/resources/hbase/hbase-site.xml
  47. 348 0
      common-data-hbase-starter/src/main/resources/hbase/hdfs-site.xml
  48. 7 0
      common-data-hbase-starter/src/main/resources/template.yml
  49. 59 0
      common-data-mysql-starter/pom.xml
  50. 289 0
      common-data-mysql-starter/src/main/java/com/yihu/base/mysql/query/BaseJpaService.java
  51. 198 0
      common-data-mysql-starter/src/main/java/com/yihu/base/mysql/query/FieldCondition.java
  52. 28 0
      common-data-mysql-starter/src/main/java/com/yihu/base/mysql/query/ReturnIdPstCreator.java
  53. 304 0
      common-data-mysql-starter/src/main/java/com/yihu/base/mysql/query/URLHqlQueryParser.java
  54. 264 0
      common-data-mysql-starter/src/main/java/com/yihu/base/mysql/query/URLQueryParser.java
  55. 63 0
      common-data-mysql-starter/src/main/java/com/yihu/base/mysql/query/UpdatePstCallback.java
  56. 22 0
      common-data-redis-starter/pom.xml
  57. 15 0
      common-data-redis-starter/src/main/resources/template.yml
  58. 37 0
      common-data-solr-starter/pom.xml
  59. 133 0
      common-data-solr-starter/src/main/java/com/yihu/base/SolrAdmin.java
  60. 31 0
      common-data-solr-starter/src/main/java/com/yihu/base/SolrContext.java
  61. 416 0
      common-data-solr-starter/src/main/java/com/yihu/base/SolrHelper.java
  62. 52 0
      common-data-solr-starter/src/main/java/com/yihu/base/SolrPool.java
  63. 4 0
      common-data-solr-starter/src/main/resources/template.yml
  64. 31 0
      common-logback-starter/pom.xml
  65. 34 0
      common-logback-starter/src/main/resources/consoleAppender_logback_demo.xml
  66. 28 0
      common-logback-starter/src/main/resources/dailyRollingFileAppender_logback_demo.xml
  67. 35 0
      common-quartz-starter/pom.xml
  68. 97 0
      common-quartz-starter/readme.MD
  69. 25 0
      common-quartz-starter/src/main/java/com/yihu/base/config/quartz/DefaultJobFactory.java
  70. 63 0
      common-quartz-starter/src/main/java/com/yihu/base/config/quartz/DefaultSchedulerConfig.java
  71. 110 0
      common-quartz-starter/src/main/java/com/yihu/base/config/quartz/QuartzHelper.java
  72. 3 0
      common-quartz-starter/src/main/resources/demo.yml
  73. 41 0
      common-quartz-starter/src/main/resources/quartz.properties
  74. 184 0
      common-quartz-starter/src/main/sql/QRTZ_BLOB_TRIGGERS.sql
  75. 98 0
      common-security-starter/pom.xml
  76. 154 0
      common-security-starter/readme.MD
  77. 55 0
      common-security-starter/src/main/java/com.yihu.base.security/SercurityConfig.java
  78. 108 0
      common-security-starter/src/main/java/com.yihu.base.security/config/AuthorizationServerConfig.java
  79. 89 0
      common-security-starter/src/main/java/com.yihu.base.security/config/ResourceServerConfig.java
  80. 140 0
      common-security-starter/src/main/java/com.yihu.base.security/hander/BaseAuthenticationSuccessHandler.java
  81. 44 0
      common-security-starter/src/main/java/com.yihu.base.security/hander/BseAuthenctiationFailureHandler.java
  82. 49 0
      common-security-starter/src/main/java/com.yihu.base.security/properties/AccessTokenPorperties.java
  83. 21 0
      common-security-starter/src/main/java/com.yihu.base.security/properties/SecurityProperties.java
  84. 34 0
      common-security-starter/src/main/java/com.yihu.base.security/properties/SmsValidateProperties.java
  85. 9 0
      common-security-starter/src/main/java/com.yihu.base.security/rbas/ClientServiceProvider.java
  86. 15 0
      common-security-starter/src/main/java/com.yihu.base.security/rbas/IRbasService.java
  87. 11 0
      common-security-starter/src/main/java/com.yihu.base.security/rbas/provider/AuthorizeConfigProvider.java
  88. 24 0
      common-security-starter/src/main/java/com.yihu.base.security/rbas/provider/AuthorizeConfigProviderManager.java
  89. 29 0
      common-security-starter/src/main/java/com.yihu.base.security/rbas/provider/PerssionAllAuthorizeConfigProvider.java
  90. 25 0
      common-security-starter/src/main/java/com.yihu.base.security/rbas/provider/RbasAuthorizeConfigProvider.java
  91. 113 0
      common-security-starter/src/main/java/com.yihu.base.security/sms/SmsCodeAuthenticationFilter.java
  92. 64 0
      common-security-starter/src/main/java/com.yihu.base.security/sms/SmsCodeAuthenticationProvider.java
  93. 53 0
      common-security-starter/src/main/java/com.yihu.base.security/sms/SmsCodeAuthenticationSecurityConfig.java
  94. 81 0
      common-security-starter/src/main/java/com.yihu.base.security/sms/SmsCodeAuthenticationToken.java
  95. 56 0
      common-security-starter/src/main/java/com.yihu.base.security/sms/controller/SmsController.java
  96. 23 0
      common-security-starter/src/main/java/com.yihu.base.security/sms/exception/ValidateCodeException.java
  97. 65 0
      common-security-starter/src/main/java/com.yihu.base.security/sms/filter/SmsvalidateCodeFilter.java
  98. 41 0
      common-security-starter/src/main/java/com.yihu.base.security/sms/generator/SmsValidateCodeGenerator.java
  99. 17 0
      common-security-starter/src/main/java/com.yihu.base.security/sms/generator/ValidateCodeGenerator.java
  100. 0 0
      common-security-starter/src/main/java/com.yihu.base.security/sms/mobile/DefaultMobileCheck.java

+ 36 - 0
.idea/inspectionProfiles/Project_Default.xml

@ -0,0 +1,36 @@
<component name="InspectionProjectProfileManager">
  <profile version="1.0">
    <option name="myName" value="Project Default" />
    <inspection_tool class="JavaDoc" enabled="true" level="WARNING" enabled_by_default="true">
      <option name="TOP_LEVEL_CLASS_OPTIONS">
        <value>
          <option name="ACCESS_JAVADOC_REQUIRED_FOR" value="none" />
          <option name="REQUIRED_TAGS" value="" />
        </value>
      </option>
      <option name="INNER_CLASS_OPTIONS">
        <value>
          <option name="ACCESS_JAVADOC_REQUIRED_FOR" value="none" />
          <option name="REQUIRED_TAGS" value="" />
        </value>
      </option>
      <option name="METHOD_OPTIONS">
        <value>
          <option name="ACCESS_JAVADOC_REQUIRED_FOR" value="none" />
          <option name="REQUIRED_TAGS" value="@return@param@throws or @exception" />
        </value>
      </option>
      <option name="FIELD_OPTIONS">
        <value>
          <option name="ACCESS_JAVADOC_REQUIRED_FOR" value="none" />
          <option name="REQUIRED_TAGS" value="" />
        </value>
      </option>
      <option name="IGNORE_DEPRECATED" value="false" />
      <option name="IGNORE_JAVADOC_PERIOD" value="true" />
      <option name="IGNORE_DUPLICATED_THROWS" value="false" />
      <option name="IGNORE_POINT_TO_ITSELF" value="false" />
      <option name="myAdditionalJavadocTags" value="date" />
    </inspection_tool>
  </profile>
</component>

+ 7 - 0
.idea/inspectionProfiles/profiles_settings.xml

@ -0,0 +1,7 @@
<component name="InspectionProjectProfileManager">
  <settings>
    <option name="PROJECT_PROFILE" value="Project Default" />
    <option name="USE_PROJECT_PROFILE" value="true" />
    <version value="1.0" />
  </settings>
</component>

+ 21 - 0
.idea/modules.xml

@ -0,0 +1,21 @@
<?xml version="1.0" encoding="UTF-8"?>
<project version="4">
  <component name="ProjectModuleManager">
    <modules>
      <module fileurl="file://$PROJECT_DIR$/common-activemq-starter/common-mq.iml" filepath="$PROJECT_DIR$/common-activemq-starter/common-mq.iml" />
      <module fileurl="file://$PROJECT_DIR$/common-async-starter/common-async-starter.iml" filepath="$PROJECT_DIR$/common-async-starter/common-async-starter.iml" />
      <module fileurl="file://$PROJECT_DIR$/common-cache-starter/common-cache-starter.iml" filepath="$PROJECT_DIR$/common-cache-starter/common-cache-starter.iml" />
      <module fileurl="file://$PROJECT_DIR$/common-data-es-starter/common-data-es-starter.iml" filepath="$PROJECT_DIR$/common-data-es-starter/common-data-es-starter.iml" />
      <module fileurl="file://$PROJECT_DIR$/common-data-fastdfs-starter/common-data-fastdfs-starter.iml" filepath="$PROJECT_DIR$/common-data-fastdfs-starter/common-data-fastdfs-starter.iml" />
      <module fileurl="file://$PROJECT_DIR$/common-data-hbase-starter/common-data-hbase-starter.iml" filepath="$PROJECT_DIR$/common-data-hbase-starter/common-data-hbase-starter.iml" />
      <module fileurl="file://$PROJECT_DIR$/common-data-mysql-starter/common-data-mysql-starter.iml" filepath="$PROJECT_DIR$/common-data-mysql-starter/common-data-mysql-starter.iml" />
      <module fileurl="file://$PROJECT_DIR$/common-data-redis-starter/common-data-redis-starter.iml" filepath="$PROJECT_DIR$/common-data-redis-starter/common-data-redis-starter.iml" />
      <module fileurl="file://$PROJECT_DIR$/common-data-solr-starter/common-data-solr-starter.iml" filepath="$PROJECT_DIR$/common-data-solr-starter/common-data-solr-starter.iml" />
      <module fileurl="file://$PROJECT_DIR$/common-logback-starter/common-logback-starter.iml" filepath="$PROJECT_DIR$/common-logback-starter/common-logback-starter.iml" />
      <module fileurl="file://$PROJECT_DIR$/common-quartz-starter/common-quartz.iml" filepath="$PROJECT_DIR$/common-quartz-starter/common-quartz.iml" />
      <module fileurl="file://$PROJECT_DIR$/common-security-starter/common-security.iml" filepath="$PROJECT_DIR$/common-security-starter/common-security.iml" />
      <module fileurl="file://$PROJECT_DIR$/common-swagger-starter/common-swagger.iml" filepath="$PROJECT_DIR$/common-swagger-starter/common-swagger.iml" />
      <module fileurl="file://$PROJECT_DIR$/jkzl-start.iml" filepath="$PROJECT_DIR$/jkzl-start.iml" />
    </modules>
  </component>
</project>

+ 21 - 0
common-activemq-starter/pom.xml

@ -0,0 +1,21 @@
<?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.0.0"
         xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
         xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
    <modelVersion>4.0.0</modelVersion>
    <parent>
        <groupId>com.yihu.jw</groupId>
        <artifactId>jkzl-start</artifactId>
        <version>1.0.0</version>
        <relativePath>../pom.xml</relativePath>
    </parent>
    <artifactId>common-activemq-starter</artifactId>
    <version>1.0.0</version>
    <dependencies>
        <dependency>
            <groupId>org.springframework.boot</groupId>
            <artifactId>spring-boot-starter-activemq</artifactId>
        </dependency>
    </dependencies>
</project>

+ 0 - 0
common-activemq-starter/src/main/resources/template.yml


+ 22 - 0
common-async-starter/pom.xml

@ -0,0 +1,22 @@
<?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.0.0"
         xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
         xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
    <parent>
        <groupId>com.yihu.jw</groupId>
        <artifactId>jkzl-start</artifactId>
        <version>1.0.0</version>
        <relativePath>../pom.xml</relativePath>
    </parent>
    <modelVersion>4.0.0</modelVersion>
    <artifactId>common-async-starter</artifactId>
    <dependencies>
        <dependency>
            <groupId>org.springframework</groupId>
            <artifactId>spring-context</artifactId>
        </dependency>
    </dependencies>
</project>

+ 58 - 0
common-async-starter/src/main/java/com/yihu/base/async/AsyncExecutorUtils.java

@ -0,0 +1,58 @@
package com.yihu.base.async;
import org.springframework.aop.interceptor.AsyncUncaughtExceptionHandler;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.context.annotation.ComponentScan;
import org.springframework.context.annotation.Configuration;
import org.springframework.scheduling.annotation.AsyncConfigurer;
import org.springframework.scheduling.annotation.EnableAsync;
import org.springframework.scheduling.concurrent.ThreadPoolTaskExecutor;
import java.util.concurrent.Executor;
import java.util.concurrent.ThreadPoolExecutor;
/**
 * 异步线程类
 */
@Configuration
@EnableAsync
public class AsyncExecutorUtils implements AsyncConfigurer {
    private ThreadPoolTaskExecutor taskExecutor = null;
    @Value("core-size")
    private int coreSize;
    @Value("max-size")
    private int maxSize;
    @Value("quenue-capacity")
    private int queueCapacity;
    @Value("keep-alive-seconds")
    private int aliveSeconds;
    @Override
    public Executor getAsyncExecutor() {
        taskExecutor.setCorePoolSize(5);
        taskExecutor.setMaxPoolSize(20);
        taskExecutor.setQueueCapacity(25);
        taskExecutor.setKeepAliveSeconds(120);
        // 线程池对拒绝任务(无线程可用)的处理策略 ThreadPoolExecutor.CallerRunsPolicy策略 ,调用者的线程会执行该任务,如果执行器已关闭,则丢弃.
        taskExecutor.setRejectedExecutionHandler(new ThreadPoolExecutor.AbortPolicy());
        taskExecutor.initialize();
        return taskExecutor;
    }
    @Override
    public AsyncUncaughtExceptionHandler getAsyncUncaughtExceptionHandler() {
        return null;
    }
}

+ 8 - 0
common-async-starter/src/main/resources/template.yml

@ -0,0 +1,8 @@
thread:
  pool: #线程池配置
    core-size: 5 # 线程池里最小线程数
    max-size: 50 # 线程池里最大线程数量
    quenue-capacity: 1000 # 线程队列容量
    keep-alive-seconds: 120 # 线程最大的存活时间(毫秒)

+ 32 - 0
common-cache-starter/pom.xml

@ -0,0 +1,32 @@
<?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.0.0"
         xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
         xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
    <modelVersion>4.0.0</modelVersion>
    <parent>
        <groupId>com.yihu.jw</groupId>
        <artifactId>jkzl-start</artifactId>
        <version>1.0.0</version>
        <relativePath>../pom.xml</relativePath>
    </parent>
    <artifactId>common-cache-starter</artifactId>
    <version>1.0.0</version>
    <dependencies>
        <dependency>
            <groupId>org.springframework.boot</groupId>
            <artifactId>spring-boot-starter-data-redis</artifactId>
        </dependency>
        <dependency>
            <groupId>org.aspectj</groupId>
            <artifactId>aspectjrt</artifactId>
            <version>1.8.10</version>
        </dependency>
        <dependency>
            <groupId>com.yihu.base</groupId>
            <artifactId>common-async</artifactId>
        </dependency>
    </dependencies>
    
</project>

+ 82 - 0
common-cache-starter/src/main/java/com/yihu/base/cache/cache/CustomMapCache.java

@ -0,0 +1,82 @@
package com.yihu.base.cache.cache;
import com.yihu.base.cache.config.CacheKeyGenerator;
import com.yihu.base.cache.support.CacheSupport;
import com.yihu.base.cache.util.ReflectionUtils;
import com.yihu.base.cache.util.SpringContextUtils;
import org.springframework.cache.concurrent.ConcurrentMapCache;
import java.util.concurrent.ConcurrentMap;
public class CustomMapCache extends ConcurrentMapCache{
    private Long expireTime;
    private Long refreshTime;
    private Long startTime;
    public Long getStartTime() {
        return startTime;
    }
    public CustomMapCache(String name) {
        super(name);
    }
    public Long getExpireTime() {
        return expireTime;
    }
    public Long getRefreshTime() {
        return refreshTime;
    }
    public CustomMapCache(String name,Long expireTime, Long refreshTime){
        super(name);
        this.expireTime = expireTime;
        this.refreshTime = refreshTime;
        this.startTime = System.currentTimeMillis();
    }
    private CacheSupport getCacheSupport() {
        return SpringContextUtils.getBean(CacheSupport.class);
    }
    /**
     * 重写get方法,获取到缓存后再次取缓存剩余的时间,如果时间小余我们配置的刷新时间就手动刷新缓存,对于内存缓存,直接移除
     * @param key
     * @return
     */
    @Override
    public ValueWrapper get(Object key) {
        String cacheKey = CacheKeyGenerator.getCacheKey();
        ValueWrapper valueWrapper = this.get(key);
        if (null != valueWrapper) {
            // 刷新缓存数据
            refreshCache(key,cacheKey);
        }
        return valueWrapper;
    }
    /**
     * 刷新缓存数据
     */
    private void refreshCache(Object key, String cacheKeyStr) {
        ConcurrentMap<Object, Object> cacheMap = (ConcurrentMap<Object, Object>) ReflectionUtils.getFieldValue(ConcurrentMapCache.class, "store");
        CustomMapCache customMapCache = (CustomMapCache) cacheMap.get(key);
        Long diffTime = System.currentTimeMillis() - customMapCache.getStartTime();
        if (diffTime >= customMapCache.getExpireTime() || diffTime <= customMapCache.getRefreshTime()) {
            synchronized (cacheMap){
                if (diffTime <= customMapCache.getRefreshTime()) {
                    // 通过获取代理方法信息重新加载缓存数据
                    CustomMapCache.this.getCacheSupport().refreshCacheByKey(customMapCache.getName(), key.toString());
                }
            }
        }
    }
}

+ 158 - 0
common-cache-starter/src/main/java/com/yihu/base/cache/cache/CustomRedisCache.java

@ -0,0 +1,158 @@
package com.yihu.base.cache.cache;
import com.yihu.base.async.AsyncExecutorUtils;
import com.yihu.base.cache.config.CacheKeyGenerator;
import com.yihu.base.cache.support.CacheSupport;
import com.yihu.base.cache.lock.CacheLock;
import com.yihu.base.cache.util.SpringContextUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.dao.DataAccessException;
import org.springframework.data.redis.cache.RedisCache;
import org.springframework.data.redis.cache.RedisCacheElement;
import org.springframework.data.redis.cache.RedisCacheKey;
import org.springframework.data.redis.connection.RedisConnection;
import org.springframework.data.redis.core.RedisCallback;
import org.springframework.data.redis.core.RedisOperations;
import org.springframework.data.redis.core.RedisTemplate;
import org.springframework.scheduling.annotation.Async;
import org.springframework.util.Assert;
public class CustomRedisCache extends RedisCache{
    public Logger logger = LoggerFactory.getLogger(CustomRedisCache.class);
    private Long expireTime;
    private Long refreshTime;
    private RedisOperations redisOperations;
    @Autowired
    private AsyncExecutorUtils asyncExecutorUtils;
    private CacheSupport getCacheSupport() {
        return SpringContextUtils.getBean(CacheSupport.class);
    }
    private byte[] prefix;
    public CustomRedisCache(String name, byte[] prefix, RedisOperations<?, ?> redisOperations, long expiration) {
        super(name, prefix, redisOperations, expiration);
    }
    public CustomRedisCache(String name, byte[] prefix, RedisOperations<? extends Object, ? extends Object> redisOperations, long expiration, long refreshTime) {
        super(name, prefix, redisOperations, expiration);
        this.redisOperations = redisOperations;
        this.refreshTime = refreshTime;
        this.prefix = prefix;
    }
    public CustomRedisCache(String name, byte[] prefix, RedisOperations<? extends Object, ? extends Object> redisOperations, long expiration, long refreshTime, boolean allowNullValues) {
        super(name, prefix, redisOperations, expiration, allowNullValues);
        this.redisOperations = redisOperations;
        this.refreshTime = refreshTime;
        this.prefix = prefix;
    }
    /**
     * 重写get方法,获取到缓存后再次取缓存剩余的时间,如果时间小余我们配置的刷新时间就手动刷新缓存。
     * 为了不影响get的性能,启用后台线程去完成缓存的刷新
     * 并且只开一个线程去刷新数据。
     * @param key
     * @return
     */
    @Override
    public ValueWrapper get(Object key) {
        String cacheKey = CacheKeyGenerator.getCacheKey();
        ValueWrapper valueWrapper = this.get(cacheKey);
        if (null != valueWrapper) {
            // 刷新缓存数据
            refreshCache(key, cacheKey);
        }
        return valueWrapper;
    }
    /**
     * 重写父类的get函数。
     * 父类的get方法,是先使用exists判断key是否存在,不存在返回null,存在再到redis缓存中去取值。这样会导致并发问题,
     * 假如有一个请求调用了exists函数判断key存在,但是在下一时刻这个缓存过期了,或者被删掉了。
     * 这时候再去缓存中获取值的时候返回的就是null了。
     * 可以先获取缓存的值,再去判断key是否存在。
     *
     * @param cacheKey
     * @return
     */
    @Override
    public RedisCacheElement get(final RedisCacheKey cacheKey) {
        Assert.notNull(cacheKey, "CacheKey must not be null!");
        // 根据key获取缓存值
        RedisCacheElement redisCacheElement = new RedisCacheElement(cacheKey, fromStoreValue(lookup(cacheKey)));
        // 判断key是否存在
        Boolean exists = (Boolean) redisOperations.execute(new RedisCallback<Boolean>() {
            @Override
            public Boolean doInRedis(RedisConnection connection) throws DataAccessException {
                return connection.exists(cacheKey.getKeyBytes());
            }
        });
        if (!exists.booleanValue()) {
            return null;
        }
        return redisCacheElement;
    }
    /**
     * 新缓存数据
     * @param key
     * @param cacheKeyStr
     */
    @Async("asyncExecutorUtils")
    public void refreshCache(Object key, String cacheKeyStr) {
        Long ttl = this.redisOperations.getExpire(cacheKeyStr);
        if (null != ttl && ttl <= getRefreshTime()) {
            // 尽量少的去开启线程,因为线程池是有限的
            // 加一个分布式锁,只放一个请求去刷新缓存
            CacheLock cacheLock = new CacheLock((RedisTemplate) redisOperations, cacheKeyStr + "_lock");
            try {
                if (cacheLock.lock()) {
                    // 获取锁之后再判断一下过期时间,看是否需要加载数据
                    Long ttl2 = CustomRedisCache.this.redisOperations.getExpire(cacheKeyStr);
                    if (null != ttl2 && ttl2 <= getRefreshTime()) {
                        // 通过获取代理方法信息重新加载缓存数据
                        CustomRedisCache.this.getCacheSupport().refreshCacheByKey(CustomRedisCache.super.getName(), key.toString());
                    }
                }
            } catch (Exception e) {
                logger.error("刷新缓存失败");
            } finally {
                cacheLock.unlock();
            }
        }
    }
    public Long getExpireTime() {
        return expireTime;
    }
    public Long getRefreshTime() {
        return refreshTime;
    }
    public void setExpireTime(Long expireTime) {
        this.expireTime = expireTime;
    }
    public void setRefreshTime(Long refreshTime) {
        this.refreshTime = refreshTime;
    }
}

+ 21 - 0
common-cache-starter/src/main/java/com/yihu/base/cache/config/CacheKeyGenerator.java

@ -0,0 +1,21 @@
package com.yihu.base.cache.config;
/**
 * 生成缓存的key
 */
public class CacheKeyGenerator {
    private static String cacheKey;
    public static void setCacheKey(String moduleName,String saasId,String bussinessModuleName) {
        StringBuilder str = new StringBuilder();
        str.append(moduleName + "-");
        str.append(saasId + "-");
        str.append(bussinessModuleName);
        cacheKey = str.toString();
    }
    public static String getCacheKey(){
        return cacheKey;
    }
}

+ 315 - 0
common-cache-starter/src/main/java/com/yihu/base/cache/lock/CacheLock.java

@ -0,0 +1,315 @@
package com.yihu.base.cache.lock;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.dao.DataAccessException;
import org.springframework.data.redis.connection.RedisConnection;
import org.springframework.data.redis.core.RedisCallback;
import org.springframework.data.redis.core.RedisTemplate;
import org.springframework.util.Assert;
import org.springframework.util.StringUtils;
import redis.clients.jedis.Jedis;
import redis.clients.jedis.JedisCluster;
import redis.clients.jedis.JedisCommands;
import java.util.ArrayList;
import java.util.List;
import java.util.Random;
import java.util.UUID;
/**
 * 缓存锁
 */
public class CacheLock {
    private static Logger logger = LoggerFactory.getLogger(CacheLock.class);
    private RedisTemplate redisTemplate;
    /**
     * 将key 的值设为value ,当且仅当key 不存在,等效于 SETNX。
     */
    public static final String NX = "NX";
    /**
     * seconds — 以秒为单位设置 key 的过期时间,等效于EXPIRE key seconds
     */
    public static final String EX = "EX";
    /**
     * 调用set后的返回值
     */
    public static final String OK = "OK";
    /**
     * 默认请求锁的超时时间(ms 毫秒)
     */
    private static final long TIME_OUT = 100;
    /**
     * 默认锁的有效时间(s)
     */
    public static final int EXPIRE = 60;
    /**
     * 解锁的lua脚本
     */
    public static final String UNLOCK_LUA;
    static {
        StringBuilder sb = new StringBuilder();
        sb.append("if redis.call(\"get\",KEYS[1]) == ARGV[1] ");
        sb.append("then ");
        sb.append("    return redis.call(\"del\",KEYS[1]) ");
        sb.append("else ");
        sb.append("    return 0 ");
        sb.append("end ");
        UNLOCK_LUA = sb.toString();
    }
    /**
     * 锁标志对应的key
     */
    private String lockKey;
    /**
     * 记录到日志的锁标志对应的key
     */
    private String lockKeyLog = "";
    /**
     * 锁对应的值
     */
    private String lockValue;
    /**
     * 锁的有效时间(s)
     */
    private int expireTime = EXPIRE;
    /**
     * 请求锁的超时时间(ms)
     */
    private long timeOut = TIME_OUT;
    /**
     * 锁标记
     */
    private volatile boolean locked = false;
    final Random random = new Random();
    /**
     * 使用默认的锁过期时间和请求锁的超时时间
     *
     * @param redisTemplate
     * @param lockKey       锁的key(Redis的Key)
     */
    public CacheLock(RedisTemplate redisTemplate, String lockKey) {
        this.redisTemplate = redisTemplate;
        this.lockKey = lockKey + "_lock";
    }
    /**
     * 使用默认的请求锁的超时时间,指定锁的过期时间
     *
     * @param redisTemplate
     * @param lockKey       锁的key(Redis的Key)
     * @param expireTime    锁的过期时间(单位:秒)
     */
    public CacheLock(RedisTemplate redisTemplate, String lockKey, int expireTime) {
        this(redisTemplate, lockKey);
        this.expireTime = expireTime;
    }
    /**
     * 使用默认的锁的过期时间,指定请求锁的超时时间
     *
     * @param redisTemplate
     * @param lockKey       锁的key(Redis的Key)
     * @param timeOut       请求锁的超时时间(单位:毫秒)
     */
    public CacheLock(RedisTemplate redisTemplate, String lockKey, long timeOut) {
        this(redisTemplate, lockKey);
        this.timeOut = timeOut;
    }
    /**
     * 锁的过期时间和请求锁的超时时间都是用指定的值
     *
     * @param redisTemplate
     * @param lockKey       锁的key(Redis的Key)
     * @param expireTime    锁的过期时间(单位:秒)
     * @param timeOut       请求锁的超时时间(单位:毫秒)
     */
    public CacheLock(RedisTemplate redisTemplate, String lockKey, int expireTime, long timeOut) {
        this(redisTemplate, lockKey, expireTime);
        this.timeOut = timeOut;
    }
    /**
     * 尝试获取锁 超时返回
     *
     * @return
     */
    public boolean tryLock() {
        // 生成随机key
        lockValue = UUID.randomUUID().toString();
        // 请求锁超时时间,纳秒
        long timeout = timeOut * 1000000;
        // 系统当前时间,纳秒
        long nowTime = System.nanoTime();
        while ((System.nanoTime() - nowTime) < timeout) {
            if (OK.equalsIgnoreCase(this.set(lockKey, lockValue, expireTime))) {
                locked = true;
                // 上锁成功结束请求
                return locked;
            }
            // 每次请求等待一段时间
            seleep(10, 50000);
        }
        return locked;
    }
    /**
     * 尝试获取锁 立即返回
     *
     * @return 是否成功获得锁
     */
    public boolean lock() {
        lockValue = UUID.randomUUID().toString();
        //不存在则添加 且设置过期时间(单位ms)
        String result = set(lockKey, lockValue, expireTime);
        locked = OK.equalsIgnoreCase(result);
        return locked;
    }
    /**
     * 以阻塞方式的获取锁
     *
     * @return 是否成功获得锁
     */
    public boolean lockBlock() {
        lockValue = UUID.randomUUID().toString();
        while (true) {
            //不存在则添加 且设置过期时间(单位ms)
            String result = set(lockKey, lockValue, expireTime);
            if (OK.equalsIgnoreCase(result)) {
                locked = true;
                return locked;
            }
            // 每次请求等待一段时间
            seleep(10, 50000);
        }
    }
    /**
     * 解锁
     * 不使用固定的字符串作为键的值,而是设置一个不可猜测(non-guessable)的长随机字符串,作为口令串(token)。
     * 不使用 DEL 命令来释放锁,而是发送一个 Lua 脚本,这个脚本只在客户端传入的值和键的口令串相匹配时,才对键进行删除。
     * 这两个改动可以防止持有过期锁的客户端误删现有锁的情况出现。
     */
    public Boolean unlock() {
        // 只有加锁成功并且锁还有效才去释放锁
        if (locked) {
            return (Boolean) redisTemplate.execute(new RedisCallback<Boolean>() {
                @Override
                public Boolean doInRedis(RedisConnection connection) throws DataAccessException {
                    Object nativeConnection = connection.getNativeConnection();
                    Long result = 0L;
                    List<String> keys = new ArrayList<>();
                    keys.add(lockKey);
                    List<String> values = new ArrayList<>();
                    values.add(lockValue);
                    // 集群模式
                    if (nativeConnection instanceof JedisCluster) {
                        result = (Long) ((JedisCluster) nativeConnection).eval(UNLOCK_LUA, keys, values);
                    }
                    // 单机模式
                    if (nativeConnection instanceof Jedis) {
                        result = (Long) ((Jedis) nativeConnection).eval(UNLOCK_LUA, keys, values);
                    }
                    if (result == 0 && !StringUtils.isEmpty(lockKeyLog)) {
                        logger.info("Redis分布式锁,解锁{}失败!解锁时间:{}", lockKeyLog, System.currentTimeMillis());
                    }
                    locked = result == 0;
                    return result == 1;
                }
            });
        }
        return true;
    }
    /**
     * @param key     锁的Key
     * @param value   锁里面的值
     * @param seconds 过去时间(秒)
     * @return
     */
    private String set(final String key, final String value, final long seconds) {
        Assert.isTrue(!StringUtils.isEmpty(key), "key不能为空");
        return (String) redisTemplate.execute(new RedisCallback<String>() {
            @Override
            public String doInRedis(RedisConnection connection) throws DataAccessException {
                Object nativeConnection = connection.getNativeConnection();
                String result = null;
                if (nativeConnection instanceof JedisCommands) {
                    result = ((JedisCommands) nativeConnection).set(key, value, NX, EX, seconds);
                }
                if (!StringUtils.isEmpty(lockKeyLog) && !StringUtils.isEmpty(result)) {
                    logger.info("获取锁{}的时间:{}", lockKeyLog, System.currentTimeMillis());
                }
                return result;
            }
        });
    }
    /**
     * @param millis 毫秒
     * @param nanos  纳秒
     * @Title: seleep
     * @Description: 线程等待时间
     * @author yuhao.wang
     */
    private void seleep(long millis, int nanos) {
        try {
            Thread.sleep(millis, random.nextInt(nanos));
        } catch (InterruptedException e) {
            logger.info("获取分布式锁休眠被中断:", e);
        }
    }
    public String getLockKeyLog() {
        return lockKeyLog;
    }
    public void setLockKeyLog(String lockKeyLog) {
        this.lockKeyLog = lockKeyLog;
    }
    public int getExpireTime() {
        return expireTime;
    }
    public void setExpireTime(int expireTime) {
        this.expireTime = expireTime;
    }
    public long getTimeOut() {
        return timeOut;
    }
    public void setTimeOut(long timeOut) {
        this.timeOut = timeOut;
    }
}

+ 30 - 0
common-cache-starter/src/main/java/com/yihu/base/cache/manager/CacheManagerFactory.java

@ -0,0 +1,30 @@
package com.yihu.base.cache.manager;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.cache.CacheManager;
import org.springframework.data.redis.core.RedisTemplate;
import org.springframework.stereotype.Component;
import org.springframework.util.StringUtils;
@Component
public class CacheManagerFactory {
    @Value("cache.type")
    private String caceType;
    public CacheManager getCacheManager(){
        CacheManager cacheManager = null;
        if(StringUtils.isEmpty(caceType)){
            return cacheManager;
        }
        switch (Integer.parseInt(caceType)){
            case 1:
                cacheManager = new CustomMapCacheManager();
                break;
            case 2:
                cacheManager = new CustomRedisCacheManager(new RedisTemplate());
                break;
        }
        return cacheManager;
    }
}

+ 22 - 0
common-cache-starter/src/main/java/com/yihu/base/cache/manager/CustomCacheManager.java

@ -0,0 +1,22 @@
package com.yihu.base.cache.manager;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.cache.CacheManager;
public interface CustomCacheManager extends CacheManager{
     //注解分隔符
    String SEPARATOR = "#";
    // SpEL标示符
    String MARK = "$";
    Long getExpireTime();
    Long getRefreshTime();
    Long getExpireTime(String cacheName,String[] cacheParams);
    Long getRefreshTime(String[] cacheParams);
}

+ 149 - 0
common-cache-starter/src/main/java/com/yihu/base/cache/manager/CustomMapCacheManager.java

@ -0,0 +1,149 @@
package com.yihu.base.cache.manager;
import com.yihu.base.cache.cache.CustomMapCache;
import com.yihu.base.cache.util.ReflectionUtils;
import com.yihu.base.cache.util.SpringContextUtils;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.beans.factory.support.DefaultListableBeanFactory;
import org.springframework.cache.Cache;
import org.springframework.cache.concurrent.ConcurrentMapCacheManager;
import org.springframework.stereotype.Component;
import org.springframework.util.StringUtils;
import java.util.concurrent.ConcurrentHashMap;
@Component
public class CustomMapCacheManager extends ConcurrentMapCacheManager implements CustomCacheManager{
    @Value("expire-time")
    private Long expireTime;
    @Value("refresh-time")
    private Long refreshTime;
    private ConcurrentMapCacheManager concurrentMapCacheManager;
    @Autowired
    private DefaultListableBeanFactory beanFactory;
    private static final String SUPER_CACHEMAP = "cacheMap";
    public ConcurrentMapCacheManager getInstance(){
        if(null == concurrentMapCacheManager){
            concurrentMapCacheManager = SpringContextUtils.getBean(ConcurrentMapCacheManager.class);
        }
        return concurrentMapCacheManager;
    }
    @Override
    public Long getExpireTime() {
        return null;
    }
    @Override
    public Long getRefreshTime() {
        return null;
    }
    /**
     * 获取过期时间
     * @return
     */
    @Override
    public Long getExpireTime(String cacheName, String[] cacheParams) {
        // 有效时间,初始化获取默认的有效时间
        Long expirationSecondTime = null;
        // 设置key有效时间
        if (cacheParams.length > 1) {
            String expirationStr = cacheParams[1];
            if (!StringUtils.isEmpty(expirationStr)) {
                // 支持配置过期时间使用EL表达式读取配置文件时间
                if (expirationStr.contains(MARK)) {
                    expirationStr = beanFactory.resolveEmbeddedValue(expirationStr);
                }
                expirationSecondTime = Long.parseLong(expirationStr);
            }
        }
        if(null == expirationSecondTime){
            expirationSecondTime = this.getExpireTime();
        }
        return expirationSecondTime;
    }
    /**
     * 获取自动刷新时间
     * @return
     */
    @Override
    public Long getRefreshTime(String[] cacheParams) {
        // 自动刷新时间,默认是0
        Long refreshTime = 0L;
        // 设置自动刷新时间
        if (cacheParams.length > 2) {
            String refreshTimeStr = cacheParams[2];
            if (!StringUtils.isEmpty(refreshTimeStr)) {
                // 支持配置刷新时间使用EL表达式读取配置文件时间
                if (refreshTimeStr.contains(MARK)) {
                    refreshTimeStr = beanFactory.resolveEmbeddedValue(refreshTimeStr);
                }
                refreshTime = Long.parseLong(refreshTimeStr);
            }
        }
        if(null == refreshTime){
            refreshTime = this.getRefreshTime();
        }
        return refreshTime;
    }
    @Override
    public Cache getCache(String name){
        String[] cacheParams = name.split(CustomCacheManager.SEPARATOR);
        String cacheName = cacheParams[0];
        if(StringUtils.isEmpty(cacheName)){
            return null;
        }
        //注解里面的过期时间覆盖默认的过期时间
        long expireTime = getExpireTime(name,cacheParams);
        //注解里面的刷新时间
        long refreshTime = getRefreshTime(cacheParams);
        Object obj =  ReflectionUtils.getFieldValue(getInstance(),SUPER_CACHEMAP);
        if(null != obj && obj instanceof ConcurrentHashMap){
            ConcurrentHashMap<String,Cache> cacheMap = (ConcurrentHashMap<String,Cache>)obj;
            return getCache(name,expireTime,refreshTime,cacheMap);
        }else{
            return super.getCache(name);
        }
    }
    /**
     * @param cacheName
     * @param expireTime
     * @param refreshTime
     * @param cacheMap
     * @return
     */
    public Cache getCache(String cacheName, long expireTime, long refreshTime, ConcurrentHashMap<String,Cache> cacheMap){
        Cache cache = cacheMap.get(cacheName);
        if(null != cache){
            return cache;
        }else {
            synchronized (cacheMap){
                cache = cacheMap.get(cacheName);
                if(null == cache){
                    cache = createConcurrentMapCache(cacheName,expireTime,refreshTime);
                    cacheMap.put(cacheName,cache);
                }
            }
        }
        return cache;
    }
    public CustomMapCache createConcurrentMapCache(String cacheName, long expirationTime, long refreshTime) {
        return new CustomMapCache(cacheName,expirationTime,refreshTime);
    }
}

+ 182 - 0
common-cache-starter/src/main/java/com/yihu/base/cache/manager/CustomRedisCacheManager.java

@ -0,0 +1,182 @@
package com.yihu.base.cache.manager;
import com.yihu.base.cache.util.ReflectionUtils;
import com.yihu.base.cache.util.SpringContextUtils;
import com.yihu.base.cache.cache.CustomRedisCache;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.beans.factory.support.DefaultListableBeanFactory;
import org.springframework.cache.Cache;
import org.springframework.data.redis.cache.RedisCacheManager;
import org.springframework.data.redis.core.RedisOperations;
import org.springframework.stereotype.Component;
import org.springframework.util.StringUtils;
import java.util.concurrent.ConcurrentHashMap;
@Component
public class CustomRedisCacheManager extends RedisCacheManager implements CustomCacheManager{
    @Value("expire-time")
    private Long expireTime;
    @Value("refresh-time")
    private Long refreshTime;
    @Autowired
    private DefaultListableBeanFactory beanFactory;
    private RedisCacheManager redisCacheManager = null;
    //父类存放缓存的cacheMap字段
    private static final String SUPER_CACHEMAP = "cacheMap";
    private static final String SUPER_DYNAMIC = "dynamic";
    //父类cacheNullValues字段
    private static final String SUPER_CACHENULLVALUES = "cacheNullValues";
     // 父类updateCacheNames方法
    private static final String SUPER_METHOD_UPDATECACHENAMES = "updateCacheNames";
    @Override
    public Long getExpireTime() {
        return expireTime;
    }
    @Override
    public Long getRefreshTime() {
        return refreshTime;
    }
    public CustomRedisCacheManager(RedisOperations redisOperations) {
        super(redisOperations);
    }
    public RedisCacheManager getInstance(){
        if(null == redisCacheManager){
            redisCacheManager = SpringContextUtils.getBean(RedisCacheManager.class);
        }
        return redisCacheManager;
    }
    /**
     * 覆盖父类获取cache方法
     * @param name,name为注解上的value,以#分隔,第一个为缓存的名字,第二个为缓存的过期时间,第三个为缓存的自动刷新时间
     * @return
     */
    @Override
    public Cache getCache(String name){
        String[] cacheParams = name.split(CustomCacheManager.SEPARATOR);
        String cacheName = cacheParams[0];
        if(StringUtils.isEmpty(cacheName)){
            return null;
        }
        //注解里面的过期时间覆盖默认的过期时间,Redis默认有提供过期时间,如果没有传值,就用配置的默认刷新时间
        Long expireTime = getExpireTime(name,cacheParams);
        //注解里面的刷新时间,如果没有传值,就用配置的默认刷新时间
        Long refreshTime = getRefreshTime(cacheParams);
        Object obj =  ReflectionUtils.getFieldValue(getInstance(),SUPER_CACHEMAP);
        if(null != obj && obj instanceof ConcurrentHashMap){
            ConcurrentHashMap<String,Cache> cacheMap = (ConcurrentHashMap<String,Cache>)obj;
            return getCache(name,expireTime,refreshTime,cacheMap);
        }else{
            return super.getCache(name);
        }
    }
    /**
     * @param cacheName
     * @param expireTime
     * @param refreshTime
     * @param cacheMap
     * @return
     */
    public Cache getCache(String cacheName, long expireTime, long refreshTime, ConcurrentHashMap<String,Cache> cacheMap){
        Cache cache = cacheMap.get(cacheName);
        if(null != cache){
            return cache;
        }else {
            synchronized (cacheMap){
                cache = cacheMap.get(cacheName);
                if(null == cache){
                    //没有则创建一个cache,带上过期时间和刷新时间
                    cache = getMissingCache(cacheName,expireTime,refreshTime);
                    if(null != cache){
                        cache = decorateCache(cache);
                        cacheMap.put(cacheName,cache);
                        //反射调用父类updateCacheNams方法,同步更新缓存名称集合
                        Class<?>[] parameterTypes = {String.class};
                        Object[] paramters = {cacheName};
                        ReflectionUtils.invokeMethod(getInstance(),SUPER_METHOD_UPDATECACHENAMES,parameterTypes,paramters);
                    }
                }
            }
        }
        return cache;
    }
    public CustomRedisCache getMissingCache(String cacheName, long expirationSecondTime, long refreshTime) {
        Boolean dynamic = (Boolean) ReflectionUtils.getFieldValue(getInstance(),SUPER_DYNAMIC);
        Boolean cacheNullValues = (Boolean) ReflectionUtils.getFieldValue(getInstance(), SUPER_CACHENULLVALUES);
        return dynamic ? new CustomRedisCache(cacheName, (this.isUsePrefix() ? this.getCachePrefix().prefix(cacheName) : null),
                this.getRedisOperations(), expirationSecondTime, refreshTime, cacheNullValues) : null;
    }
    /**
     * 获取过期时间
     * @return
     */
    @Override
    public Long getExpireTime(String cacheName, String[] cacheParams) {
        // 有效时间,初始化获取默认的有效时间
        Long expirationSecondTime = this.computeExpiration(cacheName);
        // 设置key有效时间
        if (cacheParams.length > 1) {
            String expirationStr = cacheParams[1];
            if (!StringUtils.isEmpty(expirationStr)) {
                // 支持配置过期时间使用EL表达式读取配置文件时间
                if (expirationStr.contains(MARK)) {
                    expirationStr = beanFactory.resolveEmbeddedValue(expirationStr);
                }
                expirationSecondTime = Long.parseLong(expirationStr);
            }
        }
        if(null == expirationSecondTime){
            expirationSecondTime = this.getExpireTime();
        }
        return expirationSecondTime;
    }
    /**
     * 获取自动刷新时间
     * @return
     */
    @Override
    public Long getRefreshTime(String[] cacheParams) {
        // 自动刷新时间,默认是0
        Long refreshTime = 0L;
        // 设置自动刷新时间
        if (cacheParams.length > 2) {
            String preloadStr = cacheParams[2];
            if (!StringUtils.isEmpty(preloadStr)) {
                // 支持配置刷新时间使用EL表达式读取配置文件时间
                if (preloadStr.contains(MARK)) {
                    preloadStr = beanFactory.resolveEmbeddedValue(preloadStr);
                }
                refreshTime = Long.parseLong(preloadStr);
            }
        }
        if(null == refreshTime){
            refreshTime = this.getRefreshTime();
        }
        return refreshTime;
    }
}

+ 75 - 0
common-cache-starter/src/main/java/com/yihu/base/cache/support/CacheInvocation.java

@ -0,0 +1,75 @@
package com.yihu.base.cache.support;
import java.lang.reflect.Method;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
public final class CacheInvocation {
    private Object key;
    private Object targetBean;
    private Method targetMethod;
    private Object[] arguments;
    private List<String> parameterTypes = new ArrayList<>();
    public CacheInvocation(Object key,Object targetBean,Method targetMethod,Object[] arguments,Class[] parameterTypes){
        this.key = key;
        this.targetBean = targetBean;
        this.targetMethod = targetMethod;
        if(null != arguments && arguments.length > 0){
            this.arguments = Arrays.copyOf(arguments,arguments.length);
        }
        if(null != parameterTypes && parameterTypes.length > 0){
            for( Class cls:parameterTypes){
                this.parameterTypes.add(cls.getName());
            }
        }
    }
    /**
     * 重写equals方法
     * @param obj
     * @return
     */
    @Override
    public boolean equals(Object obj){
        if(obj == this){
            return true;
        }
        if(null == obj || obj.getClass() != getClass()){
            return false;
        }
        CacheInvocation cacheInvocation = (CacheInvocation)obj;
        return key.equals(cacheInvocation.key);
    }
    /**
     * 重写hashCode方法
     * @return
     */
    @Override
    public int hashCode(){
        return key.hashCode();
    }
    public Object getKey() {
        return key;
    }
    public Object getTargetBean() {
        return targetBean;
    }
    public Method getTargetMethod() {
        return targetMethod;
    }
    public Object[] getArguments() {
        return arguments;
    }
}

+ 33 - 0
common-cache-starter/src/main/java/com/yihu/base/cache/support/CacheSupport.java

@ -0,0 +1,33 @@
package com.yihu.base.cache.support;
import java.lang.reflect.Method;
import java.util.Set;
/**
 * 注册和刷新缓存接口
 */
public interface CacheSupport {
    String SPERATOR = "#";
    /**
     * 注册缓存方法信息
     *
     * @param invokedBean          代理Bean
     * @param invokedMethod        代理方法名称
     * @param invocationParamTypes 代理方法参数类型
     * @param invocationArgs       代理方法参数
     * @param annoationCacheNames           缓存名称(@Cacheable注解的value)
     * @param cacheKey             缓存key(@Cacheable注解的key)
     */
    void registerInvocation(Object invokedBean, Method invokedMethod, Class[] invocationParamTypes, Object[] invocationArgs, Set<String> annoationCacheNames, String cacheKey);
    /**
     * 按容器以及指定键更新缓存
     *
     * @param cacheName
     * @param cacheKey
     */
    void refreshCacheByKey(String cacheName, String cacheKey);
}

+ 89 - 0
common-cache-starter/src/main/java/com/yihu/base/cache/support/CacheSupportImpl.java

@ -0,0 +1,89 @@
package com.yihu.base.cache.support;
import com.yihu.base.cache.manager.CacheManagerFactory;
import com.yihu.base.cache.manager.CustomCacheManager;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.cache.Cache;
import org.springframework.stereotype.Component;
import org.springframework.util.CollectionUtils;
import org.springframework.util.MethodInvoker;
import java.lang.reflect.Method;
import java.util.*;
import java.util.concurrent.ConcurrentHashMap;
@Component
public class CacheSupportImpl implements CacheSupport {
    @Autowired
    private CacheManagerFactory cacheManagerFactory;
    /**
     * 根据name获取cache列表
     * @param annoationCacheName
     * @return
     */
    public Collection<? extends Cache> getCache(Set<String> annoationCacheName){
        Collection<String> cacheNames = getAnnoationValues(annoationCacheName);
        if(null == cacheNames){
            return Collections.EMPTY_LIST;
        }
        Collection<Cache> cacheResults = new ArrayList<>();
        for(String cacheName:cacheNames){
            Cache cache = cacheManagerFactory.getCacheManager().getCache(cacheName);
            if(null == cache){
                throw new IllegalArgumentException("cannot find cache named "+cacheName);
            }
            cacheResults.add(cache);
        }
        return cacheResults;
    }
    /**
     * 获取注解上的cacheName值
     * @param annoationNames
     * @return
     */
    public Collection<String> getAnnoationValues(Set<String> annoationNames){
        Collection<String> cacheNames = new HashSet<>();
        for(String cacheName:annoationNames){
            String[] cacheParams =  cacheName.split(CacheSupport.SPERATOR);
            //注解的“#”前的第一个为缓存的name
            String realCacheName = cacheParams[0];
            cacheNames.add(realCacheName);
        }
        return  cacheNames;
    }
    /**
     * 调用方法
     * @param cacheInvocation
     * @return
     */
    public Object invoke(CacheInvocation cacheInvocation){
        Object object = null;
        MethodInvoker methodInvoker =  new MethodInvoker();
        methodInvoker.setTargetObject(cacheInvocation.getTargetBean());
        methodInvoker.setTargetMethod(cacheInvocation.getTargetMethod().getName());
        methodInvoker.setArguments(cacheInvocation.getArguments());
        try {
            methodInvoker.prepare();
            object = methodInvoker.invoke();
        } catch (Exception e) {
            e.printStackTrace();
        }
        return object;
    }
    @Override
    public void registerInvocation(Object invokedBean, Method invokedMethod, Class[] invocationParamTypes, Object[] invocationArgs, Set<String> annoationCacheNames, String cacheKey) {
    }
    @Override
    public void refreshCacheByKey(String cacheName, String cacheKey) {
    }
}

+ 87 - 0
common-cache-starter/src/main/java/com/yihu/base/cache/support/CachingAnnoationAspect.java

@ -0,0 +1,87 @@
package com.yihu.base.cache.support;
import com.yihu.base.cache.support.CacheSupport;
import org.aspectj.lang.ProceedingJoinPoint;
import org.aspectj.lang.annotation.Around;
import org.aspectj.lang.annotation.Aspect;
import org.aspectj.lang.annotation.Pointcut;
import org.aspectj.lang.reflect.MethodSignature;
import org.springframework.aop.framework.AopProxyUtils;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.cache.annotation.Cacheable;
import org.springframework.core.BridgeMethodResolver;
import org.springframework.stereotype.Component;
import org.springframework.util.ClassUtils;
import java.lang.annotation.Annotation;
import java.lang.reflect.AnnotatedElement;
import java.lang.reflect.Array;
import java.lang.reflect.Method;
import java.util.*;
@Aspect
@Component
public class CachingAnnoationAspect {
    @Autowired
    private CacheSupport cacheSupport;
    private <T extends Annotation> List<T> getMethodAnnotations(AnnotatedElement annotatedElement,Class<T> annotationType){
        List<T> anns = new ArrayList<T>();
        T ann = annotatedElement.getAnnotation(annotationType);
        if (null != ann) {
            anns.add(ann);
        }
        for(Annotation annotation:annotatedElement.getAnnotations()){
            ann = annotation.annotationType().getAnnotation(annotationType);
            if(null != ann){
                anns.add(ann);
            }
        }
        return anns.isEmpty() ? null : anns;
    }
    private Method getSpecificMethod(ProceedingJoinPoint proceedingJoinPoint){
        MethodSignature methodSignature = (MethodSignature)proceedingJoinPoint.getSignature();
        Method method = methodSignature.getMethod();
        Class<?> targetClass = AopProxyUtils.ultimateTargetClass(proceedingJoinPoint.getTarget());
        if(null == targetClass && null != proceedingJoinPoint.getTarget()){
            targetClass = proceedingJoinPoint.getTarget().getClass();
        }
        Method specificMethod = ClassUtils.getMostSpecificMethod(method,targetClass);
        specificMethod = BridgeMethodResolver.findBridgedMethod(specificMethod);
        return specificMethod;
    }
    @Pointcut("@annotation(org.springframework.cache.annotation.Cacheable)")
    public void pointcut(){}
    @Around("pointcut()")
    public Object registerInvocation(ProceedingJoinPoint proceedingJoinPoint){
        Object object = null;
        Method method = getSpecificMethod(proceedingJoinPoint);
        List<Cacheable> annotations =  getMethodAnnotations(method,Cacheable.class);
        if(null == annotations){
            return object;
        }
        Set<String> cacheSet = new HashSet<>();
        String cacheKey = "";
        for (Cacheable cacheables:annotations) {
             cacheSet.addAll(Arrays.asList(cacheables.value()));
             cacheKey = cacheables.key();
        }
        if(proceedingJoinPoint.getSignature() instanceof MethodSignature){
            Class[] parameterTpyes = ((MethodSignature) proceedingJoinPoint.getSignature()).getParameterTypes();
            cacheSupport.registerInvocation(proceedingJoinPoint.getTarget(),method,parameterTpyes,proceedingJoinPoint.getArgs(),cacheSet,cacheKey);
        }
        try {
            object = proceedingJoinPoint.proceed();
        } catch (Throwable throwable) {
            throwable.printStackTrace();
        }
        return object;
    }
}

+ 64 - 0
common-cache-starter/src/main/java/com/yihu/base/cache/support/MapCacheSupportImpl.java

@ -0,0 +1,64 @@
package com.yihu.base.cache.support;
import com.yihu.base.cache.cache.CustomMapCache;
import com.yihu.base.cache.config.CacheKeyGenerator;
import com.yihu.base.cache.manager.CustomCacheManager;
import com.yihu.base.cache.util.ReflectionUtils;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.cache.Cache;
import org.springframework.cache.concurrent.ConcurrentMapCache;
import org.springframework.cache.concurrent.ConcurrentMapCacheManager;
import org.springframework.stereotype.Component;
import java.lang.reflect.Method;
import java.util.*;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentMap;
@Component
public class MapCacheSupportImpl extends CacheSupportImpl {
    private ConcurrentMap<String,Object> invocationMap = new ConcurrentHashMap<>();
    @Autowired
    private CustomCacheManager customCacheManager;
    @Override
    public void registerInvocation(Object invokedBean, Method invokedMethod, Class[] invocationParamTypes, Object[] invocationArgs, Set<String> annoationCacheNames, String cacheKey) {
        Collection<? extends Cache> caches = getCache(annoationCacheNames);
        CacheInvocation cacheInvocation = new CacheInvocation(CacheKeyGenerator.getCacheKey(),invokedBean,invokedMethod,invocationArgs,invocationParamTypes);
        for(Cache cache:caches){
            if(cache instanceof CustomMapCache){
                CustomMapCache customMapCache = (CustomMapCache)cache;
                Long diffTime = System.currentTimeMillis() - customMapCache.getStartTime();
                if(diffTime >= customMapCache.getExpireTime() || diffTime <= customMapCache.getRefreshTime()){
                    invocationMap.put(cacheKey,cacheInvocation);
                }
            }
        }
    }
    @Override
    public void refreshCacheByKey(String cacheName, String cacheKey) {
        CacheInvocation cacheInvocation = (CacheInvocation)invocationMap.get(cacheKey);
        if(null != cacheInvocation){
            refreshCache(cacheInvocation,cacheName);
        }
    }
    private void refreshCache(CacheInvocation cacheInvocation,String cacheName){
        Object computed = invoke(cacheInvocation);
        //获取缓存对象
//        ConcurrentMapCacheManager concurrentMapCacheManager = new ConcurrentMapCacheManager();
        Cache cache = customCacheManager.getCache(cacheName);
        //更新缓存
        cache.put(cacheInvocation.getKey(),computed);
        CustomMapCache customMapCache = (CustomMapCache)cache;
        invocationMap.remove(customMapCache.getName());
        ConcurrentMap<Object, Object> cacheMap = (ConcurrentMap<Object, Object>) ReflectionUtils.getFieldValue(ConcurrentMapCache.class, "store");
        cacheMap.remove(customMapCache.getName());
    }
}

+ 60 - 0
common-cache-starter/src/main/java/com/yihu/base/cache/support/RedisCacheSupportImpl.java

@ -0,0 +1,60 @@
package com.yihu.base.cache.support;
import com.yihu.base.cache.cache.CustomRedisCache;
import com.yihu.base.cache.config.CacheKeyGenerator;
import com.yihu.base.cache.manager.CustomCacheManager;
import com.yihu.base.cache.util.RedisTemplateUtils;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.cache.Cache;
import org.springframework.data.redis.connection.RedisConnectionFactory;
import org.springframework.data.redis.core.RedisTemplate;
import org.springframework.stereotype.Component;
import java.lang.reflect.Method;
import java.util.*;
import java.util.concurrent.TimeUnit;
@Component
public class RedisCacheSupportImpl extends CacheSupportImpl {
    @Autowired
    private CustomCacheManager customCacheManager;
    @Autowired
    private RedisConnectionFactory redisConnectionFactory;
    @Override
    public void registerInvocation(Object invokedBean, Method invokedMethod, Class[] invocationParamTypes, Object[] invocationArgs, Set<String> annoationCacheNames, String cacheKey) {
        Collection<? extends Cache> caches = getCache(annoationCacheNames);
        CacheInvocation cacheInvocation = new CacheInvocation(CacheKeyGenerator.getCacheKey(),invokedBean,invokedMethod,invocationArgs,invocationParamTypes);
        for(Cache cache:caches){
            if(cache instanceof CustomRedisCache){
                CustomRedisCache customRedisCache = (CustomRedisCache)cache;
                RedisTemplate redisTemplate = RedisTemplateUtils.getRedisTemplate(redisConnectionFactory);
                redisTemplate.opsForValue().set(CacheKeyGenerator.getCacheKey(),cacheInvocation,customRedisCache.getExpireTime(), TimeUnit.SECONDS);
            }
        }
    }
    @Override
    public void refreshCacheByKey(String cacheName, String cacheKey) {
        RedisTemplate redisTemplate = RedisTemplateUtils.getRedisTemplate(redisConnectionFactory);
        CacheInvocation cacheInvocation = (CacheInvocation)redisTemplate.opsForValue().get(cacheKey);
        if(null != cacheInvocation){
            refreshCache(cacheName,cacheInvocation);
        }
    }
    public void refreshCache(String cacheName,CacheInvocation cacheInvocation){
        Object computed = invoke(cacheInvocation);
        //获取缓存对象
        Cache cache = customCacheManager.getCache(cacheName);
        //更新缓存
        cache.put(cacheInvocation.getKey(),computed);
        RedisTemplate redisTemplate = RedisTemplateUtils.getRedisTemplate(redisConnectionFactory);
        CustomRedisCache customRedisCache = (CustomRedisCache)cache;
        redisTemplate.expire(cacheInvocation.getKey(),customRedisCache.getExpireTime(),TimeUnit.SECONDS);
    }
}

+ 31 - 0
common-cache-starter/src/main/java/com/yihu/base/cache/util/RedisTemplateUtils.java

@ -0,0 +1,31 @@
package com.yihu.base.cache.util;
import org.springframework.data.redis.connection.RedisConnectionFactory;
import org.springframework.data.redis.core.RedisTemplate;
import org.springframework.data.redis.serializer.JdkSerializationRedisSerializer;
import org.springframework.data.redis.serializer.StringRedisSerializer;
public class RedisTemplateUtils {
    private static RedisTemplate redisTemplate;
    //对方法进行同步,以确保redisTemplate只初始化一次
    public static synchronized RedisTemplate getRedisTemplate(RedisConnectionFactory redisConnectionFactory) {
        if (null == redisTemplate) {
            redisTemplate = new RedisTemplate();
            redisTemplate.setConnectionFactory(redisConnectionFactory);
            JdkSerializationRedisSerializer jdkSerializationRedisSerializer = new JdkSerializationRedisSerializer();
            redisTemplate.setValueSerializer(jdkSerializationRedisSerializer);
            redisTemplate.setHashValueSerializer(jdkSerializationRedisSerializer);
            //key采用StringRedisSserializer来序列化
            redisTemplate.setKeySerializer(new StringRedisSerializer());
            redisTemplate.setHashKeySerializer(new StringRedisSerializer());
            redisTemplate.afterPropertiesSet();
        }
        return redisTemplate;
    }
}

+ 145 - 0
common-cache-starter/src/main/java/com/yihu/base/cache/util/ReflectionUtils.java

@ -0,0 +1,145 @@
package com.yihu.base.cache.util;
import java.lang.reflect.Field;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method;
public class ReflectionUtils {
    /**
     * 循环向上转型, 获取对象的 DeclaredMethod
     * @param object : 子类对象
     * @param methodName : 父类中的方法名
     * @param parameterTypes : 父类中的方法参数类型
     * @return 父类中的方法对象
     */
    public static Method getDeclaredMethod(Object object, String methodName, Class<?> ... parameterTypes){
        Method method = null ;
        for(Class<?> clazz = object.getClass() ; clazz != Object.class ; clazz = clazz.getSuperclass()) {
            try {
                method = clazz.getDeclaredMethod(methodName, parameterTypes) ;
                return method ;
            } catch (Exception e) {
                //这里甚么都不要做!并且这里的异常必须这样写,不能抛出去。
                //如果这里的异常打印或者往外抛,则就不会执行clazz = clazz.getSuperclass(),最后就不会进入到父类中了
            }
        }
        return null;
    }
    /**
     * 直接调用对象方法, 而忽略修饰符(private, protected, default)
     * @param object : 子类对象
     * @param methodName : 父类中的方法名
     * @param parameterTypes : 父类中的方法参数类型
     * @param parameters : 父类中的方法参数
     * @return 父类中方法的执行结果
     */
    public static Object invokeMethod(Object object, String methodName, Class<?> [] parameterTypes,
                                      Object [] parameters) {
        //根据 对象、方法名和对应的方法参数 通过反射 调用上面的方法获取 Method 对象
        Method method = getDeclaredMethod(object, methodName, parameterTypes) ;
        //抑制Java对方法进行检查,主要是针对私有方法而言
        method.setAccessible(true) ;
        try {
            if(null != method) {
                //调用object 的 method 所代表的方法,其方法的参数是 parameters
                return method.invoke(object, parameters) ;
            }
        } catch (IllegalArgumentException e) {
            e.printStackTrace();
        } catch (IllegalAccessException e) {
            e.printStackTrace();
        } catch (InvocationTargetException e) {
            e.printStackTrace();
        }
        return null;
    }
    /**
     * 循环向上转型, 获取对象的 DeclaredField
     * @param object : 子类对象
     * @param fieldName : 父类中的属性名
     * @return 父类中的属性对象
     */
    public static Field getDeclaredField(Object object, String fieldName){
        Field field = null ;
        Class<?> clazz = object.getClass() ;
        for(; clazz != Object.class ; clazz = clazz.getSuperclass()) {
            try {
                field = clazz.getDeclaredField(fieldName) ;
                return field ;
            } catch (Exception e) {
                //这里甚么都不要做!并且这里的异常必须这样写,不能抛出去。
                //如果这里的异常打印或者往外抛,则就不会执行clazz = clazz.getSuperclass(),最后就不会进入到父类中了
            }
        }
        return null;
    }
    /**
     * 直接设置对象属性值, 忽略 private/protected 修饰符, 也不经过 setter
     * @param object : 子类对象
     * @param fieldName : 父类中的属性名
     * @param value : 将要设置的值
     */
    public static void setFieldValue(Object object, String fieldName, Object value){
        //根据 对象和属性名通过反射 调用上面的方法获取 Field对象
        Field field = getDeclaredField(object, fieldName) ;
        //抑制Java对其的检查
        field.setAccessible(true) ;
        try {
            //将 object 中 field 所代表的值 设置为 value
            field.set(object, value) ;
        } catch (IllegalArgumentException e) {
            e.printStackTrace();
        } catch (IllegalAccessException e) {
            e.printStackTrace();
        }
    }
    /**
     * 直接读取对象的属性值, 忽略 private/protected 修饰符, 也不经过 getter
     * @param object : 子类对象
     * @param fieldName : 父类中的属性名
     * @return : 父类中的属性值
     */
    public static Object getFieldValue(Object object, String fieldName){
        //根据 对象和属性名通过反射 调用上面的方法获取 Field对象
        Field field = getDeclaredField(object, fieldName) ;
        //抑制Java对其的检查
        field.setAccessible(true) ;
        try {
            //获取 object 中 field 所代表的属性值
            return field.get(object) ;
        } catch(Exception e) {
            e.printStackTrace() ;
        }
        return null;
    }
}

+ 68 - 0
common-cache-starter/src/main/java/com/yihu/base/cache/util/SpringContextUtils.java

@ -0,0 +1,68 @@
package com.yihu.base.cache.util;
import org.springframework.beans.BeansException;
import org.springframework.context.ApplicationContext;
import org.springframework.context.ApplicationContextAware;
import org.springframework.stereotype.Component;
/**
 * Spring上下文管理器
 * @author LiTaohong
 */
@Component
public class SpringContextUtils implements ApplicationContextAware {
    private static ApplicationContext springContext = null;
    /**
     * 获取Spring应用上下文环境
     *
     * @return
     */
    public static ApplicationContext getApplicationContext() {
        return springContext;
    }
    @Override
    public void setApplicationContext(ApplicationContext applicationContext) throws BeansException {
        springContext = applicationContext;
    }
    /**
     * 获取Bean
     *
     * @param serviceName
     * @param <T>
     * @return
     */
    public static <T> T getBean(String serviceName) {
        return (T) springContext.getBean(serviceName);
    }
    public static <T> T getBean(Class<T> beanCls) {
        return (T) springContext.getBean(beanCls);
    }
    /**
     * 获取服务,并用参数初始化对象。
     *
     * @param serviceName
     * @param args
     * @param <T>
     * @return
     */
    public static <T> T getBean(String serviceName, Object... args) {
        T ref = (T)springContext.getBean(serviceName, args);
        if (ref == null) return null;
        return ref;
    }
    public static <T> T getBean(Class<T> beanCls, Object... args){
        T ref = (T)springContext.getBean(beanCls, args);
        if (ref == null) return null;
        return ref;
    }
}

+ 20 - 0
common-cache-starter/src/main/resources/template.yml

@ -0,0 +1,20 @@
cache: # 1-内存 2-redis
  type: 1
  expire-time: 2 #单位为小时
  refresh-time: 1.5
spring:
  redis:
    host: 172.19.103.88 # ip
    port: 6379 # R端口
    database: 0 # 默认使用DB0
    timeout: 0 # 连接超时时间(毫秒)
      #sentinel:
      #  master: # Name of Redis server.
      #  nodes: # Comma-separated list of host:port pairs.
    pool: ##连接池配置
      max-active: 8 # 连接池最大连接数(使用负值表示没有限制)
      max-idle: 8 # 连接池中的最大空闲连接
      max-wait: -1 # 连接池最大阻塞等待时间(使用负值表示没有限制)
      min-idle: 1 # 连接池中的最小空闲连接

+ 38 - 0
common-data-es-starter/pom.xml

@ -0,0 +1,38 @@
<?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.0.0"
         xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
         xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
    <modelVersion>4.0.0</modelVersion>
    <parent>
        <groupId>com.yihu.jw</groupId>
        <artifactId>jkzl-start</artifactId>
        <version>1.0.0</version>
        <relativePath>../pom.xml</relativePath>
    </parent>
    <artifactId>common-data-es-starter</artifactId>
    <version>1.0.0</version>
    <dependencies>
        <dependency>
            <groupId>org.springframework.boot</groupId>
            <artifactId>spring-boot-starter-data-elasticsearch</artifactId>
        </dependency>
        <dependency>
            <groupId>org.nlpcn</groupId>
            <artifactId>elasticsearch-sql</artifactId>
        </dependency>
        <dependency>
            <groupId>org.elasticsearch</groupId>
            <artifactId>elasticsearch</artifactId>
        </dependency>
        <dependency>
            <groupId>com.alibaba</groupId>
            <artifactId>fastjson</artifactId>
        </dependency>
        <dependency>
            <groupId>io.searchbox</groupId>
            <artifactId>jest</artifactId>
        </dependency>
    </dependencies>
</project>

+ 93 - 0
common-data-es-starter/readme.MD

@ -0,0 +1,93 @@
ElasticSearch
    a.服务说明:
        1.该服务是对ElasticSearch搜索引擎的集成,提供了一些相对常用的操作接口
    b.配置说明:
	    2.详见application.yml文件,其中如果有多个cluster-nodes请以逗号(,)分隔
    c.调用说明:
	    1.初始化索引(相当于建立数据库表)
		    1).索引文档之前请勿必请求mapping接口进行字段映射,不然可能发生检索结果不准确的情况
		    2).参数
			    index
				    - 参数格式:"<index>"
				    - 参数说明:索引名称
			    type 
				    - 参数格式:"<type>"
				    - 参数说明:索引类型
			    source
				    - 参数格式:["<field>":{"type":"<string|byte|short|integer|long|float|double|boolean|date>","store":"<yes|or>"},"<...>":{<...>}]
				    - 参数说明:
                        因{"...":{...}}格式的字符串FeignClient请求的时候会出错,故用[]替换{}到后台的时候再进行替换处理
                        field值为字段名称,字段可选属性还有index、analyzer、search_analyzer、format
                        当字段不分词时形如:["name":{"type":"string","index":"not_analyzed"}]
                        当字段要分词时形如:["name":{"type":"string","analyzer":"ik","search_analyzer":"ik"}]
                        当字段为date类型时可指定时间格式:["createDate":{"type":"date","format":"yyyy-MM-dd HH:mm:ss"}]
                        当字段特别长的时候可选择store为yes否则默认为no
        2.索引文档(相当于插入数据)
            1).请求index接口
            2).参数
                index
                    - 同上
                type
                    - 同上
                source
                    - 参数格式:{"<field>":"<value>","<field>":<value>,"field":"<yyyy-MM-dd HH:mm:ss>"}
                    - 参数说明:filed指字段名称,value指字段值,其中时间格式为yyyy-MM-dd HH:mm:ss
        3.删除索引(相当于删除数据)
            1).请求delete接口
            2).参数
                index
                    - 同上
                type
                    - 同上
                id
                    - 参数格式:"id1,id2,..."
                    - 参数说明:多个数值以逗号(,)分隔
        4.更新索引(相当于更新数据)
            1).请求update接口
            2).参数
                index
                    - 同上
                type
                    - 同上
                id
                    - 参数格式:"<id>"
                    - 参数说明:原文档ID
                source
                    - 参数格式:同index接口
                    - 参数说明:数据中请勿包含原文档ID(_id)
        5.获取单条数据
            1).请求findById接口
            2).参数
                index
                    - 同上
                type
                    - 同上
                id
                    - 同上
        6.简单数据检索(findByField)
            1).请求findByField接口
            2).参数
                index
                    - 同上
                type
                    - 同上
                field
                    - 参数格式:"<field>"
                    - 参数说明:字段名称
                value
                    - 参数格式:初始化索引时,该字段对应的类型
                    - 参数说明:字段值
        7.组合数据检索(page)
            1).请求page接口,支持模糊查询,完全匹配,范围查询
            2).参数
                index
                    - 同上
                type
                    - 同上
                filter
                    - 参数格式:[{"andOr":"and|or","condition":">|=|<|>=|<=|?","field":"<field>","value":"<value>"},<{...}>]
                    - 参数说明:andOr跟数据库的中的AND和OR相似;condition指条件匹配程度,?相当于数据库中的like;filed指检索的字段;value为检索的值
                page
                    - 参数说明:页码
                size 
                    - 参数说明:分页大小

+ 100 - 0
common-data-es-starter/src/main/java/com/yihu/base/es/config/ElasticFactory.java

@ -0,0 +1,100 @@
package com.yihu.base.es.config;
import io.searchbox.client.JestClient;
import io.searchbox.client.JestClientFactory;
import io.searchbox.client.config.ClientConfig;
import io.searchbox.client.config.HttpClientConfig;
import org.elasticsearch.client.Client;
import org.elasticsearch.client.transport.TransportClient;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.transport.InetSocketTransportAddress;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.stereotype.Component;
import org.springframework.util.CollectionUtils;
import org.springframework.util.StringUtils;
import java.net.InetAddress;
import java.net.UnknownHostException;
import java.util.HashSet;
import java.util.LinkedHashSet;
import java.util.Set;
import java.util.concurrent.TimeUnit;
/**
 * Created by chenweida on 2017/6/5.
 */
@Component
public class ElasticFactory {
    private static JestClientFactory factory = null;
    @Value("${spring.data.elasticsearch.cluster-name}")
    private String clusterNames;
    @Value("${spring.data.elasticsearch.cluster-nodes}")
    private String clusterNodes; // 120.25.194.233:9300,120.25.194.233:9300,120.25.194.233:9300
    @Value("${spring.elasticsearch.jest.uris}")
    private String jestHost; // http://192.168.226.133:9200
//-----------------------------------jestClient----------------------------------------
    /**
     * @param "http://localhost:9200"
     * @return
     */
    public JestClient getJestClient() {
        if (factory == null) {
            //初始化链接
            init();
        }
        return factory.getObject();
    }
    /**
     * 初始化链接
     */
    public synchronized void init() {
        // Construct a new Jest client according to configuration via factory
        factory = new JestClientFactory();
        Set<String> serverList = new LinkedHashSet<>();
        String[] uris = jestHost.split(",");
        serverList.addAll(CollectionUtils.arrayToList(uris));
        factory.setHttpClientConfig(new HttpClientConfig
                .Builder(serverList)
                .multiThreaded(true)
                .maxTotalConnection(50)// 最大链接
                .maxConnectionIdleTime(120, TimeUnit.SECONDS)//链接等待时间
                .connTimeout(60 * 1000)
                // .discoveryEnabled(true)
                .readTimeout(60 * 1000)//60秒
                .build());//得到链接
    }
    //-----------------------------------TransportClient----------------------------------------
    private TransportClient transportClient;
    public TransportClient getTransportClient() {
        try {
            initTranClient();
            return transportClient;
        } catch (Exception e) {
            e.printStackTrace();
        }
        return null;
    }
    private synchronized void initTranClient() throws UnknownHostException {
        if (transportClient == null) {
            Settings settings = Settings.settingsBuilder()
                    // .put("client.transport.sniff", true)//开启嗅探功能
                    .put("cluster.name", StringUtils.isEmpty(clusterNames) ? "jkzl" : clusterNames)//默认集群名字是jkzl
                    .build();
            transportClient = TransportClient.builder().settings(settings).build();
            String[] ips = clusterNodes.split(",");
            for (String ip : ips) {
                String[] ipAndPost = ip.split(":");
                transportClient.addTransportAddress(new InetSocketTransportAddress(InetAddress.getByName(ipAndPost[0]), Integer.valueOf(ipAndPost[1])));
            }
        }
    }
}

+ 226 - 0
common-data-es-starter/src/main/java/com/yihu/base/es/config/ElastricSearchHelper.java

@ -0,0 +1,226 @@
package com.yihu.base.es.config;
import com.alibaba.fastjson.JSONObject;
import com.google.gson.JsonObject;
import com.yihu.base.es.config.model.SaveModel;
import io.searchbox.client.JestClient;
import io.searchbox.client.JestResult;
import io.searchbox.core.*;
import org.elasticsearch.action.update.UpdateRequest;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.context.annotation.Scope;
import org.springframework.data.elasticsearch.core.ElasticsearchTemplate;
import org.springframework.data.elasticsearch.core.query.UpdateQueryBuilder;
import org.springframework.stereotype.Component;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
;
/**
 * Created by chenweida on 2017/6/2.
 */
@Component
@Scope("prototype")
public class ElastricSearchHelper {
    private Logger logger = LoggerFactory.getLogger(ElastricSearchHelper.class);
    @Autowired
    private ElasticFactory elasticFactory;
    public Boolean save(String index, String type, List<SaveModel> sms) {
        JestClient jestClient = null;
        try {
            //得到链接elasticFactory.getJestClient();
            jestClient = elasticFactory.getJestClient();
            int success = 0;
            int error = 0;
            Bulk.Builder bulk = new Bulk.Builder().defaultIndex(index).defaultType(type);
            for (SaveModel obj : sms) {
                try {
                    Index indexObj = new Index.Builder(obj).build();
                    success++;
                    bulk.addAction(indexObj);
                } catch (Exception e) {
                    logger.error(e.getMessage());
                    error++;
                }
            }
            BulkResult br = jestClient.execute(bulk.build());
            logger.info("save flag:" + br.isSucceeded());
            logger.info("save success:" + success);
            logger.info("save error:" + error);
            return br.isSucceeded();
        } catch (Exception e) {
            logger.error(" save error :" + e.getMessage());
        } finally {
            if (jestClient != null) {
                jestClient.shutdownClient();
            }
        }
        return null;
    }
    public Boolean save(String index, String type, String source) {
        JestClient jestClient = null;
        BulkResult br = null;
        try {
            //得到链接elasticFactory.getJestClient();
            jestClient = elasticFactory.getJestClient();
            int success = 0;
            int error = 0;
            Bulk.Builder bulk = new Bulk.Builder().defaultIndex(index).defaultType(type);
            try {
                Index indexObj = new Index.Builder(source).build();
                success++;
                bulk.addAction(indexObj);
            } catch (Exception e) {
                logger.error(e.getMessage());
                error++;
            }
            br = jestClient.execute(bulk.build());
            logger.info("save flag:" + br.isSucceeded());
            logger.info("save success:" + success);
            logger.info("save error:" + error);
            return br.isSucceeded();
        } catch (Exception e) {
            logger.error(" save error :" + e.getMessage());
        } finally {
            if (jestClient != null) {
                jestClient.shutdownClient();
            }
        }
        return br.isSucceeded();
    }
    public Boolean update(String index, String type, List<SaveModel> sms) {
        JestClient jestClient = null;
        BulkResult br = null;
        try {
            //得到链接
            jestClient = elasticFactory.getJestClient();
            int success = 0;
            int error = 0;
            boolean isSuccessed = true;
            Bulk.Builder bulk = new Bulk.Builder().defaultIndex(index).defaultType(type);
            for (SaveModel obj : sms) {
                try {
                    JSONObject jo = new JSONObject();
                    jo.put("doc", obj);
                    Update indexObj = new Update.Builder(jo.toString()).index(index).type(type).id(obj.getId()).build();
                    bulk.addAction(indexObj);
                    success++;
                } catch (Exception e) {
                    error++;
                    isSuccessed = false;
                }
            }
            br = jestClient.execute(bulk.build());
            logger.info("update flag:" + br.isSucceeded());
            logger.info("update success:" + success);
            logger.info("update error:" + error);
            return isSuccessed;
        } catch (Exception e) {
            logger.error(" update error :" + e.getMessage());
        } finally {
            if (jestClient != null) {
                jestClient.shutdownClient();
            }
        }
        return br.isSucceeded();
    }
    /**
     * 删除
     */
    public void deleteData(String index, String type, List<SaveModel> saveModels) {
        JestClient jestClient = null;
        try {
            jestClient = elasticFactory.getJestClient();
            //根据id批量删除
            Bulk.Builder bulk = new Bulk.Builder().defaultIndex(index).defaultType(type);
            for (SaveModel obj : saveModels) {
                Delete indexObj = new Delete.Builder(obj.getId()).build();
                bulk.addAction(indexObj);
            }
            BulkResult br = jestClient.execute(bulk.build());
            logger.info("delete data count:" + saveModels.size());
            logger.info("delete flag:" + br.isSucceeded());
        } catch (Exception e) {
            e.printStackTrace();
        } finally {
            if (jestClient != null) {
                jestClient.shutdownClient();
            }
        }
    }
    /**
     * 查询
     */
    public SearchResult search(String index, String type, String queryStr) {
        JestClient jestClient = null;
        SearchResult result = null;
        try {
            jestClient = elasticFactory.getJestClient();
            Search search = new Search.Builder(queryStr)
                    // multiple index or types can be added.
                    .addIndex(index)
                    .addType(type)
                    .build();
            result = jestClient.execute(search);
            logger.info("search data count:" + result.getTotal());
        } catch (Exception e) {
            e.printStackTrace();
        } finally {
            if (jestClient != null) {
                jestClient.shutdownClient();
            }
        }
        return result;
    }
    /**
     * 修改
     */
    public boolean update(String index, String type,String _id, JSONObject source) {
        JestClient jestClient = null;
        JestResult jestResult = null;
        try {
            jestClient = elasticFactory.getJestClient();
            JSONObject docSource = new JSONObject();
            docSource.put("doc",source);
            Update update = new Update.Builder(docSource).index(index).type(type).id(_id).build();
            jestResult = jestClient.execute(update);
            logger.info("update info:" + jestResult.isSucceeded());
        } catch (Exception e) {
            logger.error("update fail:" + _id,e.getMessage());
            return false;
        } finally {
            if (jestClient != null) {
                jestClient.shutdownClient();
            }
        }
        return true;
    }
    public static void main(String args[]){
        String json = "";
        JSONObject resultJsonObject = (JSONObject)JSONObject.parse(json);
        JSONObject jsonObject = (JSONObject)resultJsonObject.get("hits");
        System.out.println(jsonObject.get("total"));
    }
}

+ 21 - 0
common-data-es-starter/src/main/java/com/yihu/base/es/config/model/SaveModel.java

@ -0,0 +1,21 @@
package com.yihu.base.es.config.model;
import io.searchbox.annotations.JestId;
/**
 * es保存model的公共父类
 * Created by chenweida on 2017/11/3.
 */
public class SaveModel {
    @JestId
    private String id;
    public String getId() {
        return id;
    }
    public void setId(String id) {
        this.id = id;
    }
}

+ 28 - 0
common-data-es-starter/src/main/resources/template.yml

@ -0,0 +1,28 @@
es:
  host:  59.61.92.90
  port: 9067 #默认是9200
  tPort: 9068 #http端口 默认是9300
  clusterName: jkzl
spring:
  data:
    elasticsearch: #ElasticsearchProperties
      cluster-name: jkzl #默认即为elasticsearch  集群名
      cluster-nodes: 120.25.194.233:9300,120.25.194.233:9300 #配置es节点信息,逗号分隔,如果没有指定,则启动ClientNode
      local: false ##是否本地连接
      properties: # Additional properties used to configure the client.
        enable: true
  # JEST (Elasticsearch HTTP client) (JestProperties)
  elasticsearch:
    jest:
      uris: http://172.17.110.217:9200,http://172.17.110.128:9200
      connection-timeout: # Connection timeout in milliseconds.
      multi-threaded: true # Enable connection requests from multiple execution threads.
      username: # Login user.
      password: # Login password.
      proxy.port:  # Proxy port the HTTP client should use.
      proxy.host:  # Proxy host the HTTP client should use.

+ 43 - 0
common-data-fastdfs-starter/pom.xml

@ -0,0 +1,43 @@
<?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.0.0"
         xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
         xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
    <modelVersion>4.0.0</modelVersion>
    <parent>
        <groupId>com.yihu.jw</groupId>
        <artifactId>jkzl-start</artifactId>
        <version>1.0.0</version>
        <relativePath>../pom.xml</relativePath>
    </parent>
    <artifactId>common-data-fastdfs-starter</artifactId>
    <version>1.0.0</version>
    <dependencies>
        <dependency>
            <groupId>com.yihu.base</groupId>
            <artifactId>common-log</artifactId>
        </dependency>
        <dependency>
            <groupId>com.fasterxml.jackson.core</groupId>
            <artifactId>jackson-databind</artifactId>
        </dependency>
        <dependency>
            <groupId>org.springframework</groupId>
            <artifactId>spring-beans</artifactId>
        </dependency>
        <dependency>
            <groupId>org.springframework</groupId>
            <artifactId>spring-context</artifactId>
        </dependency>
        <dependency>
            <groupId>org.csource</groupId>
            <artifactId>fastdfs_lib</artifactId>
        </dependency>
        <dependency>
            <groupId>org.csource</groupId>
            <artifactId>fastdfs-client-java</artifactId>
            <version>1.0</version>
        </dependency>
    </dependencies>
</project>

+ 117 - 0
common-data-fastdfs-starter/src/main/java/com/yihu/base/config/FastDFSConfig.java

@ -0,0 +1,117 @@
package com.yihu.base.config;
import com.yihu.base.fastdfs.FastDFSClientPool;
import com.yihu.base.fastdfs.FastDFSHelper;
import org.csource.common.MyException;
import org.csource.fastdfs.ClientGlobal;
import org.csource.fastdfs.TrackerGroup;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import javax.annotation.PostConstruct;
import java.net.InetSocketAddress;
/**
 * @author Sand
 * @version 1.0
 * @created 2015.11.27 16:08
 */
@Configuration
public class FastDFSConfig {
    private Logger logger= LoggerFactory.getLogger(FastDFSConfig.class);
    @Value("${fast-dfs.pool.init-size}")
    private int initPoolSize;
    @Value("${fast-dfs.pool.max-size}")
    private int maxPoolSize;
    @Value("${fast-dfs.pool.wait-time}")
    private int waitTime;
    @Value("${fast-dfs.connect-timeout}")
    private int connectTimeout;
    @Value("${fast-dfs.network-timeout}")
    private int networkTimeout;
    @Value("${fast-dfs.charset}")
    private String charset;
    @Value("${fast-dfs.tracker-server}")
    private String trackerServers;
    @Value("${fast-dfs.http.tracker-http-port}")
    private int httpPort;
    @Value("${fast-dfs.http.anti-steal-token}")
    private boolean antiStealToken;
    @Value("${fast-dfs.http.secret-key}")
    private String secretKey;
    @PostConstruct
    void init() {
        try {
            // 此代码复制自:ClientGlobal.init() 方法
            ClientGlobal.g_connect_timeout = connectTimeout;
            if (ClientGlobal.g_connect_timeout < 0) {
                ClientGlobal.g_connect_timeout = 5;
            }
            ClientGlobal.g_connect_timeout *= 1000;
            ClientGlobal.g_network_timeout = networkTimeout;
            if (ClientGlobal.g_network_timeout < 0) {
                ClientGlobal.g_network_timeout = 30;
            }
            ClientGlobal.g_network_timeout *= 1000;
            ClientGlobal.g_charset = charset;
            if (ClientGlobal.g_charset == null || ClientGlobal.g_charset.length() == 0) {
                ClientGlobal.g_charset = "ISO8859-1";
            }
            String[] szTrackerServers = trackerServers.split(";");
            if (szTrackerServers == null) {
                throw new MyException("item \"tracker_server\" not found");
            } else {
                InetSocketAddress[] tracker_servers = new InetSocketAddress[szTrackerServers.length];
                for (int i = 0; i < szTrackerServers.length; ++i) {
                    String[] parts = szTrackerServers[i].split("\\:", 2);
                    if (parts.length != 2) {
                        throw new MyException("the value of item \"tracker_server\" is invalid, the correct format is host:port");
                    }
                    tracker_servers[i] = new InetSocketAddress(parts[0].trim(), Integer.parseInt(parts[1].trim()));
                }
                ClientGlobal.g_tracker_group = new TrackerGroup(tracker_servers);
                ClientGlobal.g_tracker_http_port = httpPort;
                ClientGlobal.g_anti_steal_token = antiStealToken;
                if (ClientGlobal.g_anti_steal_token) {
                    ClientGlobal.g_secret_key = secretKey;
                }
            }
        } catch (MyException e) {
            logger.error("FastDFS初始化失败: " + e.getMessage());
        }
    }
    @Bean
    public FastDFSClientPool fastDFSClientPool(){
        FastDFSClientPool clientPool = new FastDFSClientPool();
        clientPool.setMaxPoolSize(maxPoolSize);
        return clientPool;
    }
    @Bean
    public FastDFSHelper fastDFSUtil(){
        FastDFSHelper util = new FastDFSHelper();
        return util;
    }
}

+ 65 - 0
common-data-fastdfs-starter/src/main/java/com/yihu/base/fastdfs/FastDFSClientPool.java

@ -0,0 +1,65 @@
package com.yihu.base.fastdfs;
import org.csource.fastdfs.StorageClient;
import org.csource.fastdfs.TrackerClient;
import org.csource.fastdfs.TrackerServer;
import java.io.IOException;
import java.util.HashMap;
import java.util.Map;
import java.util.Map.Entry;
/**
 * Created by szx on 2015/9/19.
 */
public class FastDFSClientPool {
    private int maxPoolSize;
    private Map<StorageClient, Boolean> map = new HashMap<>();
    public void setMaxPoolSize(int poolSize){
        this.maxPoolSize = poolSize;
    }
    public TrackerServer getTrackerServer() throws IOException {
        TrackerClient tracker = new TrackerClient();
        return tracker.getConnection();
    }
    private StorageClient getNewStorageClient() throws IOException {
        TrackerClient tracker = new TrackerClient();
        TrackerServer trackerServer = tracker.getConnection();
        StorageClient client = new StorageClient(trackerServer, null);
        return client;
    }
    public synchronized StorageClient getStorageClient() throws IOException {
        StorageClient client = null;
        for (Entry<StorageClient, Boolean> entry : map.entrySet()) {
            if (entry.getValue()) {
                client = entry.getKey();
                map.put(client, false);
                break;
            }
        }
        if (client == null) {
            if (map.size() < maxPoolSize) {
                client = getNewStorageClient();
                map.put(client, false);
            }
        }
        return client;
    }
    public void releaseStorageClient(StorageClient client) {
        if (client == null) return;
        if (map.containsKey(client)) {
            map.put(client, true);
        } else {
            client = null;
        }
    }
}

+ 363 - 0
common-data-fastdfs-starter/src/main/java/com/yihu/base/fastdfs/FastDFSHelper.java

@ -0,0 +1,363 @@
package com.yihu.base.fastdfs;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.node.ObjectNode;
import org.csource.common.NameValuePair;
import org.csource.fastdfs.*;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import java.io.InputStream;
import java.net.InetSocketAddress;
/**
 * FastDFS 客户端工具.
 *
 * 作为Bean方式来调用。
 *
 * @author szx
 * @author Sand
 */
public class FastDFSHelper {
    private Logger logger= LoggerFactory.getLogger(FastDFSHelper.class);
    public final static String GroupField = "groupName";
    public final static String RemoteFileField = "remoteFileName";
    public final static String FileIdField = "fid";
    public final static String FileUrlField = "fileUrl";
    
    @Autowired
    FastDFSClientPool clientPool;
    /**
     * 以输入流的方式上传文件
     * InputStream in = new FileInputStream("C://Desert.jpg");
     * ObjectNode msg = FileUtil.upload(in,"jpg", "沙漠");
     * in.close();
     *
     * @param in            输入流
     * @param fileExtension 文件扩展名,不要带“.”
     * @param description   文件名称(中文)
     * @return 返回值的格式如下:
     * {
     * "groupName": "healthArchiveGroup",
     * "remoteFileName": "/M00/00/24/rBFuH1XdQC6AP3CDAAzodQCbVVc052.jpg",
     * "fid": "group1/M00/00/24/rBFuH1XdQC6AP3CDAAzodQCbVVc052.jpg",
     * "fileURL": "http://172.19.103.13/healthArchiveGroup/M00/00/24/rBFuH1XdQC6AP3CDAAzodQCbVVc052.jpg"
     * }
     * <p>
     * groupName 及 remoteFileName 可以用于查询在 fastDFS 中文件的信息,如果只是图片显示,可以忽略这两个值。
     * fid 保存了在 fastDFS 上的完整路径,为了避免将来服务器域名发生变更,最好使用本值.服务器的域名另外配置。
     * fileURL 保存了完整的 web 访问路径,为了避免将来服务器域名发生变更,最好不要直接使用本值。
     * 如果需要在下载时,可以显示原始文件名,请在访问file_url时,增加 attname 参数,如:
     * <p>
     * http://host/healthArchiveGroup/M00/00/00/rBFuH1XdIseAUTZZAA1rIuRd3Es062.jpg?attname=a.jpg
     * @throws Exception
     */
    public ObjectNode upload(InputStream in, String fileExtension,String description) throws Exception {
        NameValuePair[] fileMetaData = new NameValuePair[1];
        fileMetaData[0] = new NameValuePair("description", description == null ? "" : description);
        return upload(in,fileExtension,fileMetaData);
    }
    /**
     * 以输入流的方式上传文件
     */
    public ObjectNode upload(InputStream in, String fileExtension,NameValuePair[] fileMetaData) throws Exception {
        StorageClient client = clientPool.getStorageClient();
        ObjectNode message = new ObjectMapper().createObjectNode();
        try {
            byte fileBuffer[] = new byte[in.available()];
            int len = 0;
            int temp = 0;                             //所有读取的内容都使用temp接收
            while ((temp = in.read()) != -1) {            //当没有读取完时,继续读取
                fileBuffer[len] = (byte) temp;
                len++;
            }
            in.close();
            TrackerServer trackerServer = clientPool.getTrackerServer();
            String[] results = client.upload_file(fileBuffer, fileExtension, fileMetaData);
            if (results != null) {
                String fileId;
                int ts;
                String token;
                String fileURl;
                InetSocketAddress socketAddress;
                String groupName = results[0];
                String remoteFile = results[1];
                message.put(GroupField, groupName);
                message.put(RemoteFileField, remoteFile);
                fileId = groupName + StorageClient1.SPLIT_GROUP_NAME_AND_FILENAME_SEPERATOR + remoteFile;
                message.put(FileIdField, fileId);
                socketAddress = trackerServer.getInetSocketAddress();
                fileURl = "http://" + socketAddress.getAddress().getHostAddress();
                if (ClientGlobal.g_tracker_http_port != 80) {
                    fileURl += ":" + ClientGlobal.g_tracker_http_port;
                }
                fileURl += "/" + fileId;
                if (ClientGlobal.g_anti_steal_token) {
                    ts = (int) (System.currentTimeMillis() / 1000);
                    token = ProtoCommon.getToken(fileId, ts, ClientGlobal.g_secret_key);
                    fileURl += "?token=" + token + "&ts=" + ts;
                }
                message.put(FileUrlField, fileURl);
                logger.info(client.get_file_info(groupName, remoteFile).toString());
            }
        } finally {
            clientPool.releaseStorageClient(client);
        }
        return message;
    }
    /**
     * 以字节的方式上传文件
     * @param fileBuffer
     * @param fileExtension
     * @param description
     * @return
     * @throws Exception
     */
    public ObjectNode uploadByByte(byte[] fileBuffer, String fileExtension,String description) throws Exception {
        NameValuePair[] fileMetaData = new NameValuePair[1];
        fileMetaData[0] = new NameValuePair("description", description == null ? "" : description);
        return uploadByByte(fileBuffer,fileExtension,fileMetaData);
    }
    /**
     * 以字节的方式上传文件
     */
    public ObjectNode uploadByByte(byte[] fileBuffer, String fileExtension,NameValuePair[] fileMetaData) throws Exception {
        StorageClient client = clientPool.getStorageClient();
        ObjectNode message = new ObjectMapper().createObjectNode();
        try {
            TrackerServer trackerServer = clientPool.getTrackerServer();
            String[] results = client.upload_file(fileBuffer, fileExtension, fileMetaData);
            if (results != null) {
                String fileId;
                int ts;
                String token;
                String fileURl;
                InetSocketAddress socketAddress;
                String groupName = results[0];
                String remoteFile = results[1];
                message.put(GroupField, groupName);
                message.put(RemoteFileField, remoteFile);
                fileId = groupName + StorageClient1.SPLIT_GROUP_NAME_AND_FILENAME_SEPERATOR + remoteFile;
                message.put(FileIdField, fileId);
                socketAddress = trackerServer.getInetSocketAddress();
                fileURl = "http://" + socketAddress.getAddress().getHostAddress();
                if (ClientGlobal.g_tracker_http_port != 80) {
                    fileURl += ":" + ClientGlobal.g_tracker_http_port;
                }
                fileURl += "/" + fileId;
                if (ClientGlobal.g_anti_steal_token) {
                    ts = (int) (System.currentTimeMillis() / 1000);
                    token = ProtoCommon.getToken(fileId, ts, ClientGlobal.g_secret_key);
                    fileURl += "?token=" + token + "&ts=" + ts;
                }
                message.put(FileUrlField, fileURl);
                logger.info(client.get_file_info(groupName, remoteFile).toString());
            }
        } finally {
            clientPool.releaseStorageClient(client);
        }
        return message;
    }
    /**
     * 上传文件,从文件
     */
    public ObjectNode upload(String group_name, String master_filename, String prefix_name, byte[] file_buff, String file_ext_name,NameValuePair[] meta_list) throws Exception{
        StorageClient client = clientPool.getStorageClient();
        ObjectNode message = new ObjectMapper().createObjectNode();
        try {
            TrackerServer trackerServer = clientPool.getTrackerServer();
            String[] results = client.upload_file(group_name,master_filename,prefix_name,file_buff, file_ext_name, meta_list);
            if (results != null) {
                String fileId;
                int ts;
                String token;
                String fileURl;
                InetSocketAddress socketAddress;
                String groupName = results[0];
                String remoteFile = results[1];
                message.put(GroupField, groupName);
                message.put(RemoteFileField, remoteFile);
                fileId = groupName + StorageClient1.SPLIT_GROUP_NAME_AND_FILENAME_SEPERATOR + remoteFile;
                message.put(FileIdField, fileId);
                socketAddress = trackerServer.getInetSocketAddress();
                fileURl = "http://" + socketAddress.getAddress().getHostAddress();
                if (ClientGlobal.g_tracker_http_port != 80) {
                    fileURl += ":" + ClientGlobal.g_tracker_http_port;
                }
                fileURl += "/" + fileId;
                if (ClientGlobal.g_anti_steal_token) {
                    ts = (int) (System.currentTimeMillis() / 1000);
                    token = ProtoCommon.getToken(fileId, ts, ClientGlobal.g_secret_key);
                    fileURl += "?token=" + token + "&ts=" + ts;
                }
                message.put(FileUrlField, fileURl);
                System.out.print(client.get_file_info(groupName, remoteFile).toString());
            }
        } finally {
            clientPool.releaseStorageClient(client);
        }
        return message;
    }
    /**
     * 上传本地文件
     * ObjectNode  a = FileUtil.upload("C://Desert.jpg", "沙漠");
     * System.out.println(a.toString());
     *
     * @param fileName    本地文件的绝对路径,如 C://Desert.jpg
     * @param description 文件备注, 可以为空
     * @return {"groupName":"group1","remoteFileName":"/M00/00/24/rBFuH1XdQC6AP3CDAAzodQCbVVc052.jpg"
     * {
     * "groupName": "healthArchiveGroup",
     * "remoteFileName": "/M00/00/24/rBFuH1XdQC6AP3CDAAzodQCbVVc052.jpg",
     * "fid": "group1/M00/00/24/rBFuH1XdQC6AP3CDAAzodQCbVVc052.jpg",
     * "fileURL": "http://172.19.103.13/healthArchiveGroup/M00/00/24/rBFuH1XdQC6AP3CDAAzodQCbVVc052.jpg"
     * }
     * <p>
     * groupName 及 remoteFileName 可以用于查询在 fastDFS 中文件的信息,如果只是图片显示,可以忽略这两个值。
     * fid 保存了在 fastDFS 上的完整路径,为了避免将来服务器域名发生变更,最好使用本值.服务器的域名另外配置。
     * fileURL 保存了完整的 web 访问路径,为了避免将来服务器域名发生变更,最好不要直接使用本值。
     * 如果需要在下载时,可以显示原始文件名,请在访问file_url时,增加 attname 参数,如:
     * <p>
     * http://host/healthArchiveGroup/M00/00/00/rBFuH1XdIseAUTZZAA1rIuRd3Es062.jpg?attname=a.jpg
     * @throws Exception
     */
    public ObjectNode upload(String fileName, String description) throws Exception {
        StorageClient client = clientPool.getStorageClient();
        try {
            NameValuePair[] fileMetaData;
            fileMetaData = new NameValuePair[1];
            fileMetaData[0] = new NameValuePair("description", description == null ? "" : description);
//            ObjectMapper objectMapper = SpringContext.getService(ObjectMapper.class);
            ObjectNode message = new ObjectMapper().createObjectNode();
            String fileExtension = "";
            if (fileName.contains(".")) {
                fileExtension = fileName.substring(fileName.lastIndexOf(".") + 1);
            } else {
                throw new RuntimeException("上传失败, 文件缺失扩展名.");
            }
            TrackerServer trackerServer = clientPool.getTrackerServer();
            String[] results = client.upload_file(fileName, fileExtension, fileMetaData);
            if (results != null) {
                String fileId;
                int ts;
                String token;
                String fileUrl;
                InetSocketAddress inetSockAddr;
                String groupName = results[0];
                String remoteFileName = results[1];
                message.put(GroupField, groupName);
                message.put(RemoteFileField, remoteFileName);
                fileId = groupName + StorageClient1.SPLIT_GROUP_NAME_AND_FILENAME_SEPERATOR + remoteFileName;
                message.put(FileIdField, fileId);
                inetSockAddr = trackerServer.getInetSocketAddress();
                fileUrl = "http://" + inetSockAddr.getAddress().getHostAddress();
                if (ClientGlobal.g_tracker_http_port != 80) {
                    fileUrl += ":" + ClientGlobal.g_tracker_http_port;
                }
                fileUrl += "/" + fileId;
                if (ClientGlobal.g_anti_steal_token) {
                    ts = (int) (System.currentTimeMillis() / 1000);
                    token = ProtoCommon.getToken(fileId, ts, ClientGlobal.g_secret_key);
                    fileUrl += "?token=" + token + "&ts=" + ts;
                }
                message.put(FileUrlField, fileUrl);
                logger.info(client.get_file_info(groupName, remoteFileName).toString());
                return message;
            } else {
                return null;
            }
        } finally {
            clientPool.releaseStorageClient(client);
        }
    }
    /**
     * 下载文件, 返回文件字节数组.
     *
     * @param groupName      在fastdfs上的卷名
     * @param remoteFileName 在fastdfs上的路径
     * @return 文件的字节码
     * @throws Exception
     */
    public byte[] download(String groupName, String remoteFileName) throws Exception {
        StorageClient client = clientPool.getStorageClient();
        try {
            byte[] b = client.download_file(groupName, remoteFileName);
            return b;
        } finally {
            clientPool.releaseStorageClient(client);
        }
    }
    /**
     * 下载文件到本地路径上.
     *
     * @param groupName      在 fastDFS 上的卷名
     * @param remoteFileName 在 fastDFS 上的路径
     * @param localPath      本地路径
     * @return 是否下载成功
     */
    public String download(String groupName, String remoteFileName, String localPath) throws Exception {
        StorageClient client = clientPool.getStorageClient();
        try {
            String localFileName = localPath + remoteFileName.replaceAll("/", "_");
            client.download_file(groupName, remoteFileName, 0, 0, localFileName);
            return localFileName;
        } finally {
            clientPool.releaseStorageClient(client);
        }
    }
    /**
     * 删除文件。
     *
     * @param groupName
     * @param remoteFileName
     */
    public void delete(String groupName, String remoteFileName) throws Exception {
        StorageClient client = clientPool.getStorageClient();
        try {
            client.delete_file(groupName, remoteFileName);
        } finally {
            clientPool.releaseStorageClient(client);
        }
    }
}

+ 13 - 0
common-data-fastdfs-starter/src/main/resources/template.yml

@ -0,0 +1,13 @@
fast-dfs:
  tracker-server: 11.1.2.9:22122 #服务器地址
  connect-timeout: 2 #链接超时时间
  network-timeout: 30
  charset: ISO8859-1 #编码
  http:
    tracker-http-port: 80
    anti-steal-token: no
    secret-key: FastDFS1234567890
  pool: #连接池大小
    init-size: 5
    max-size: 20
    wait-time: 500

+ 66 - 0
common-data-hbase-starter/pom.xml

@ -0,0 +1,66 @@
<?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.0.0"
         xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
         xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
    <modelVersion>4.0.0</modelVersion>
    <parent>
        <groupId>com.yihu.jw</groupId>
        <artifactId>jkzl-start</artifactId>
        <version>1.0.0</version>
        <relativePath>../pom.xml</relativePath>
    </parent>
    <artifactId>common-data-hbase-starter</artifactId>
    <version>1.0.0</version>
    <dependencies>
        <dependency>
            <groupId>org.apache.commons</groupId>
            <artifactId>commons-lang3</artifactId>
        </dependency>
        <dependency>
            <groupId>org.apache.hbase</groupId>
            <artifactId>hbase-client</artifactId>
            <exclusions>
                <exclusion>
                    <groupId>org.slf4j</groupId>
                    <artifactId>slf4j-log4j12</artifactId>
                </exclusion>
            </exclusions>
        </dependency>
        <dependency>
            <groupId>org.apache.hbase</groupId>
            <artifactId>hbase-common</artifactId>
            <exclusions>
                <exclusion>
                    <groupId>org.slf4j</groupId>
                    <artifactId>slf4j-log4j12</artifactId>
                </exclusion>
            </exclusions>
        </dependency>
        <dependency>
            <groupId>org.apache.hbase</groupId>
            <artifactId>hbase-protocol</artifactId>
        </dependency>
        <dependency>
            <groupId>com.fasterxml.jackson.core</groupId>
            <artifactId>jackson-databind</artifactId>
        </dependency>
        <dependency>
            <groupId>org.springframework</groupId>
            <artifactId>spring-beans</artifactId>
        </dependency>
        <dependency>
            <groupId>org.springframework</groupId>
            <artifactId>spring-context</artifactId>
        </dependency>
        <dependency>
            <groupId>org.springframework.boot</groupId>
            <artifactId>spring-boot</artifactId>
        </dependency>
        <dependency>
            <groupId>org.springframework.data</groupId>
            <artifactId>spring-data-hadoop-hbase</artifactId>
        </dependency>
    </dependencies>
</project>

+ 37 - 0
common-data-hbase-starter/src/main/java/com/yihu/base/hbase/AbstractHBaseClient.java

@ -0,0 +1,37 @@
package com.yihu.base.hbase;
import org.apache.hadoop.hbase.client.Connection;
import org.apache.hadoop.hbase.client.ConnectionFactory;
import org.apache.hadoop.hbase.security.User;
import org.apache.hadoop.security.UserGroupInformation;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.data.hadoop.hbase.HbaseTemplate;
import java.io.IOException;
/**
 * @author hzp
 * @created 2017.05.03
 */
public class AbstractHBaseClient {
    @Autowired
    protected HbaseTemplate hbaseTemplate;
    /**
     * 创建连接
     */
    protected Connection getConnection() throws Exception {
        return getConnection(hbaseTemplate);
    }
    /**
     * 创建连接
     */
    protected Connection getConnection(HbaseTemplate hbaseTemplate) throws Exception {
        Connection connection = ConnectionFactory.createConnection(hbaseTemplate.getConfiguration());
        return connection;
    }
}

+ 155 - 0
common-data-hbase-starter/src/main/java/com/yihu/base/hbase/HBaseAdmin.java

@ -0,0 +1,155 @@
package com.yihu.base.hbase;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.node.ObjectNode;
import com.yihu.base.hbase.AbstractHBaseClient;
import org.apache.hadoop.hbase.HColumnDescriptor;
import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.Admin;
import org.apache.hadoop.hbase.client.Connection;
import org.apache.hadoop.hbase.client.ConnectionFactory;
import org.apache.hadoop.hbase.client.HTableInterface;
import org.apache.hadoop.hbase.util.Bytes;
import org.springframework.data.hadoop.hbase.TableCallback;
import org.springframework.stereotype.Service;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
/**
 * @author hzp
 * @created 2017.05.03
 */
@Service
public class HBaseAdmin extends AbstractHBaseClient {
    /**
     * 判断表是否存在
     */
    public boolean isTableExists(String tableName) throws Exception {
        Connection connection = getConnection();
        Admin admin = connection.getAdmin();
        boolean ex = admin.tableExists(TableName.valueOf(tableName));
        admin.close();
        connection.close();
        return ex;
    }
    /**
     * 创建表
     */
    public void createTable(String tableName, String... columnFamilies) throws Exception {
        Connection connection = getConnection();
        Admin admin = connection.getAdmin();
        if (!admin.tableExists(TableName.valueOf(tableName))) {
            HTableDescriptor tableDescriptor = new HTableDescriptor(TableName.valueOf(tableName));
            for (String fc : columnFamilies) {
                tableDescriptor.addFamily(new HColumnDescriptor(fc));
            }
            admin.createTable(tableDescriptor);
        }
        admin.close();
        connection.close();
    }
    /**
     * 模糊匹配表名
     */
    public List<String> getTableList(String regex, boolean includeSysTables) throws Exception {
        Connection connection = getConnection();
        Admin admin = connection.getAdmin();
        TableName[] tableNames;
        if (regex == null || regex.length() == 0) {
            tableNames = admin.listTableNames();
        } else {
            tableNames = admin.listTableNames(regex, includeSysTables);
        }
        List<String> tables = new ArrayList<>();
        for (TableName tableName : tableNames) {
            tables.add(tableName.getNameAsString());
        }
        admin.close();
        connection.close();
        return tables;
    }
    /**
     * 批量清空表数据
     */
    public void truncate(List<String> tables) throws Exception {
        Connection connection = getConnection();
        Admin admin = connection.getAdmin();
        try {
            for (String tableName : tables) {
                TableName tn = TableName.valueOf(tableName);
                if (admin.tableExists(TableName.valueOf(tableName))) {
                    HTableDescriptor descriptor = admin.getTableDescriptor(tn);
                    admin.disableTable(tn);
                    admin.deleteTable(tn);
                    admin.createTable(descriptor);
                }
                else{
                    System.out.print("not exit table "+tableName+".\r\n");
                }
                /*else{
                    HTableDescriptor descriptor = new HTableDescriptor(tableName);
                    descriptor.addFamily(new HColumnDescriptor("basic"));
                    descriptor.addFamily(new HColumnDescriptor("d"));
                    admin.createTable(descriptor);
                }*/
            }
        } finally {
            admin.close();
            connection.close();
        }
    }
    /**
     * 删除表结构
     */
    public void dropTable(String tableName) throws Exception {
        Connection connection = getConnection();
        Admin admin = connection.getAdmin();
        try {
            admin.disableTable(TableName.valueOf(tableName));
            admin.deleteTable(TableName.valueOf(tableName));
        } finally {
            admin.close();
            connection.close();
        }
    }
    public ObjectNode getTableMetaData(String tableName) {
        return hbaseTemplate.execute(tableName, new TableCallback<ObjectNode>() {
            public ObjectNode doInTable(HTableInterface table) throws Throwable {
                ObjectMapper objectMapper = new ObjectMapper();
                ObjectNode root = objectMapper.createObjectNode();
                HTableDescriptor tableDescriptor = table.getTableDescriptor();
                HColumnDescriptor[] columnDescriptors = tableDescriptor.getColumnFamilies();
                for (int i = 0; i < columnDescriptors.length; ++i) {
                    HColumnDescriptor columnDescriptor = columnDescriptors[i];
                    root.put(Integer.toString(i), Bytes.toString(columnDescriptor.getName()));
                }
                return root;
            }
        });
    }
}

+ 381 - 0
common-data-hbase-starter/src/main/java/com/yihu/base/hbase/HBaseHelper.java

@ -0,0 +1,381 @@
package com.yihu.base.hbase;
import com.fasterxml.jackson.databind.ObjectMapper;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.client.*;
import org.apache.hadoop.hbase.filter.CompareFilter;
import org.apache.hadoop.hbase.filter.RegexStringComparator;
import org.apache.hadoop.hbase.filter.RowFilter;
import org.apache.hadoop.hbase.util.Bytes;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.data.hadoop.hbase.RowMapper;
import org.springframework.data.hadoop.hbase.TableCallback;
import org.springframework.stereotype.Service;
import java.io.IOException;
import java.util.*;
/**
 * 数据增删改查
 */
@Service
public class HBaseHelper extends AbstractHBaseClient {
    @Autowired
    ObjectMapper objectMapper;
    /**
     *模糊匹配rowkey
     */
    public String[] findRowKeys(String tableName, String rowkeyRegEx) throws Exception {
        Scan scan = new Scan();
        scan.addFamily(Bytes.toBytes("basic"));
        scan.setFilter(new RowFilter(CompareFilter.CompareOp.EQUAL, new RegexStringComparator(rowkeyRegEx)));
        List<String> list = new LinkedList<>();
        hbaseTemplate.find(tableName, scan, new RowMapper<Void>() {
            @Override
            public Void mapRow(Result result, int rowNum) throws Exception {
                list.add(Bytes.toString(result.getRow()));
                return null;
            }
        });
        return list.toArray(new String[list.size()]);
    }
    /**
     *表总条数
     */
    public Integer count(String tableName) throws Exception {
        Scan scan = new Scan();
        scan.addFamily(Bytes.toBytes("basic"));
        scan.setFilter(new RowFilter(CompareFilter.CompareOp.EQUAL, new RegexStringComparator("^")));
        List<String> list = new LinkedList<>();
        hbaseTemplate.find(tableName, scan, new RowMapper<Void>() {
            @Override
            public Void mapRow(Result result, int rowNum) throws Exception {
                list.add(Bytes.toString(result.getRow()));
                return null;
            }
        });
        return list.size();
    }
    /**
     * 根据 rowkey获取一条记录
     */
    public String get(String tableName, String rowkey) {
        return hbaseTemplate.get(tableName, rowkey,new RowMapper<String>() {
            public String mapRow(Result result, int rowNum) throws Exception {
                if(!result.isEmpty())
                {
                    List<Cell> ceList = result.listCells();
                    Map<String, Object> map = new HashMap<String, Object>();
                    map.put("rowkey",rowkey);
                    if (ceList != null && ceList.size() > 0) {
                        for (Cell cell : ceList) {
                            //默认不加列族
                            // Bytes.toString(cell.getFamilyArray(), cell.getFamilyOffset(), cell.getFamilyLength()) +"_"
                            map.put(Bytes.toString(cell.getQualifierArray(), cell.getQualifierOffset(), cell.getQualifierLength()),
                                    Bytes.toString(cell.getValueArray(), cell.getValueOffset(), cell.getValueLength()));
                        }
                    }
                    return objectMapper.writeValueAsString(map);
                }
                else{
                    return "";
                }
            }
        });
    }
    /**
     * 通过表名  key 和 列族 和列 获取一个数据
     */
    public String get(String tableName ,String rowkey, String familyName, String qualifier) {
        return hbaseTemplate.get(tableName, rowkey,familyName,qualifier ,new RowMapper<String>(){
            public String mapRow(Result result, int rowNum) throws Exception {
                List<Cell> ceList =   result.listCells();
                String res = "";
                if(ceList!=null&&ceList.size()>0){
                    for(Cell cell:ceList){
                        res = Bytes.toString( cell.getValueArray(), cell.getValueOffset(), cell.getValueLength());
                    }
                }
                return res;
            }
        });
    }
    /**
     * 通过rowkey获取某行数据
     */
    public Result getResult(String tableName, String rowKey) throws Exception {
        return hbaseTemplate.get(tableName, rowKey, new RowMapper<Result>() {
            public Result mapRow(Result result, int rowNum) throws Exception {
                return result;
            }
        });
    }
    /**
     * 通过rowkey获取多行数据
     */
    public Result[] getResultList(String tableName,List<String> rowKeys) throws Exception {
        return hbaseTemplate.execute(tableName, new TableCallback<Result[]>() {
            public Result[] doInTable(HTableInterface table) throws Throwable {
                List<Get> list = new ArrayList<Get>();
                for (String rowKey : rowKeys) {
                    Get get = new Get(Bytes.toBytes(rowKey));
                    list.add(get);
                }
                return  table.get(list);
            }
        });
    }
    /**
     * 通过rowkey获取某行数据
     */
    public Map<String, Object> getResultMap(String tableName, String rowKey) throws Exception {
        return hbaseTemplate.get(tableName, rowKey, new RowMapper<Map<String, Object>>() {
            public Map<String, Object> mapRow(Result result, int rowNum) throws Exception {
                Map<String, Object> map = null;
                if(result!=null) {
                    List<Cell> ceList = result.listCells();
                    if (ceList != null && ceList.size() > 0) {
                        map = new HashMap<String, Object>();
                        map.put("rowkey", rowKey);
                        for (Cell cell : ceList) {
                            //默认不加列族
                            // Bytes.toString(cell.getFamilyArray(), cell.getFamilyOffset(), cell.getFamilyLength()) +"_"
                            map.put(Bytes.toString(cell.getQualifierArray(), cell.getQualifierOffset(), cell.getQualifierLength()),
                                    Bytes.toString(cell.getValueArray(), cell.getValueOffset(), cell.getValueLength()));
                        }
                    }
                }
                return map;
            }
        });
    }
    /**
     * 修改某行某列值
     */
    public void put(String tableName ,String rowkey, String familyName, String qualifier,String value) throws Exception
    {
        hbaseTemplate.execute(tableName, new TableCallback<String>() {
            public String doInTable(HTableInterface table) throws Throwable {
                Put p = new Put(rowkey.getBytes());
                p.add(familyName.getBytes(), qualifier.getBytes(), value.getBytes());
                table.put(p);
                return null;
            }
        });
    }
    /**
     * 新增行
     */
    public void add(String tableName , String rowkey, Map<String,Map<String,String>> family) throws Exception
    {
        hbaseTemplate.execute(tableName, new TableCallback<String>() {
            public String doInTable(HTableInterface table) throws Throwable {
                Put p = new Put(rowkey.getBytes());
                for(String familyName : family.keySet())
                {
                    Map<String,String> map = family.get(familyName);
                    for (String qualifier : map.keySet())
                    {
                        String value = map.get(qualifier);
                        if(value == null){
                            continue;
                        }
                        p.add(familyName.getBytes(), qualifier.getBytes(), value.getBytes());
                    }
                }
                table.put(p);
                return null;
            }
        });
    }
    /**
     * 批量新增行
     */
    public void addBulk(String tableName , List<String> rowkeyList, List<Map<String,Map<String,String>>> familyList) throws Exception
    {
        hbaseTemplate.execute(tableName, new TableCallback<String>() {
            public String doInTable(HTableInterface table) throws Throwable {
                List<Put> list = new ArrayList<>();
                for(int i = 0; i < rowkeyList.size();i++){
                    Put p = new Put(rowkeyList.get(i).getBytes());
                    Map<String,Map<String,String>> family = familyList.get(i);
                    for(String familyName : family.keySet())
                    {
                        Map<String,String> map = family.get(familyName);
                        for (String qualifier : map.keySet())
                        {
                            String value = map.get(qualifier);
                            if(value == null){
                                continue;
                            }
                            p.add(familyName.getBytes(), qualifier.getBytes(), value.getBytes());
                        }
                    }
                    list.add(p);
                }
                table.put(list);
                return null;
            }
        });
    }
    /**
     * 新增数据
     */
    public void add(String tableName, String rowKey, String family, Object[] columns, Object[] values) throws Exception {
        hbaseTemplate.execute(tableName, new TableCallback<Object>() {
            public Object doInTable(HTableInterface htable) throws Throwable {
                Put put = new Put(Bytes.toBytes(rowKey));
                for (int j = 0; j < columns.length; j++) {
                    //为空字段不保存
                    if(values[j]!=null)
                    {
                        String column = String.valueOf(columns[j]);
                        String value = String.valueOf(values[j]);
                        put.addColumn(Bytes.toBytes(family),
                                Bytes.toBytes(column),
                                Bytes.toBytes(value));
                    }
                }
                htable.put(put);
                return null;
            }
        });
    }
    /**
     * 根据 rowkey删除一条记录
     */
    public void delete(String tableName, String rowkey)  {
        hbaseTemplate.execute(tableName, new TableCallback<String>() {
            public String doInTable(HTableInterface table) throws Throwable {
                Delete d = new Delete(rowkey.getBytes());
                table.delete(d);
                return null;
            }
        });
    }
    /**
     * 批量删除数据
     */
    public Object[] deleteBatch(String tableName, String[] rowKeys) throws Exception {
        return hbaseTemplate.execute(tableName, new TableCallback<Object[]>() {
            public Object[] doInTable(HTableInterface table) throws Throwable {
                List<Delete> deletes = new ArrayList<>(rowKeys.length);
                for (String rowKey : rowKeys) {
                    Delete delete = new Delete(Bytes.toBytes(rowKey));
                    deletes.add(delete);
                }
                Object[] results = new Object[deletes.size()];
                table.batch(deletes, results);
                return results;
            }
        });
    }
    /**
     * 删除列族
     */
    public void deleteFamily(String tableName, String rowKey, String familyName) throws Exception {
        hbaseTemplate.delete(tableName, rowKey, familyName);
    }
    /**
     * 删除某列
     */
    public void deleteColumn(String tableName, String rowKey, String familyName, String columnName) throws Exception {
        hbaseTemplate.delete(tableName, rowKey, familyName, columnName);
    }
    /************************************* Bean使用原型模式 ***************************************************************/
    /**
     * 保存数据 原型模式
     */
    public void save(String tableName, TableBundle tableBundle) throws Exception {
        hbaseTemplate.execute(tableName, new TableCallback<Object>() {
            public Object doInTable(HTableInterface htable) throws Throwable {
                List<Put> puts = tableBundle.putOperations();
                Object[] results = new Object[puts.size()];
                htable.batch(puts, results);
                return null;
            }
        });
    }
    /**
     * 查询数据 原型模式
     */
    public Object[] get(String tableName, TableBundle tableBundle) {
        return hbaseTemplate.execute(tableName, new TableCallback<Object[]>() {
            public Object[] doInTable(HTableInterface table) throws Throwable {
                List<Get> gets = tableBundle.getOperations();
                Object[] results = new Object[gets.size()];
                table.batch(gets, results);
                if (results.length > 0 && results[0].toString().equals("keyvalues=NONE")) return null;
                return results;
            }
        });
    }
    /**
     * 删除数据 原型模式
     */
    public void delete(String tableName, TableBundle tableBundle) {
        hbaseTemplate.execute(tableName, new TableCallback<Object[]>() {
            public Object[] doInTable(HTableInterface table) throws Throwable {
                List<Delete> deletes = tableBundle.deleteOperations();
                Object[] results = new Object[deletes.size()];
                table.batch(deletes, results);
                return null;
            }
        });
    }
}

+ 166 - 0
common-data-hbase-starter/src/main/java/com/yihu/base/hbase/TableBundle.java

@ -0,0 +1,166 @@
package com.yihu.base.hbase;
import org.apache.commons.collections.CollectionUtils;
import org.apache.commons.lang3.StringUtils;
import org.apache.commons.lang3.tuple.ImmutablePair;
import org.apache.commons.lang3.tuple.Pair;
import org.apache.hadoop.hbase.client.Delete;
import org.apache.hadoop.hbase.client.Get;
import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.util.Bytes;
import java.util.*;
import java.util.stream.Collectors;
/**
 * 将HBase中的行,列族,列捆绑成一束。并一次性生成所需要的Get, Put操作。
 * <p>
 * 仅支持单表操作。
 * <p>
 * 虽然支持多种HBase操作,但请注意,一次只能用于一种操作,如:Get,Put,Delete不能混用,
 * 否则将出现难以预料的后果。
 *
 * @author Sand
 * @created 2016.04.27 14:38
 */
public class TableBundle {
    Map<String, Row> rows = new HashMap<>();
    public void addRows(String... rowkeys) {
        for (String rowkey : rowkeys) {
            rows.put(rowkey, null);
        }
    }
    public void addFamily(String rowkey, Object family) {
        Row row = getRow(rowkey);
        row.addFamily(family.toString());
    }
    public void addColumns(String rowkey, Object family, String[] columns) {
        Row row = getRow(rowkey);
        row.addColumns(family.toString(), columns);
    }
    public void addValues(String rowkey, Object family, Map<String, String> values) {
        Row row = getRow(rowkey);
        row.addValues(family.toString(), values);
    }
    public void clear() {
        rows.clear();
    }
    public List<Get> getOperations() {
        List<Get> gets = new ArrayList<>(rows.size());
        for (String rowkey : rows.keySet()) {
            Get get = new Get(Bytes.toBytes(rowkey));
            Row row = rows.get(rowkey);
            if (row != null) {
                for (String family : row.getFamilies()) {
                    Set<Object> columns = row.getCells(family);
                    if (CollectionUtils.isEmpty(columns)) {
                        get.addFamily(Bytes.toBytes(family));
                    }
                    for (Object column : columns) {
                        get.addColumn(Bytes.toBytes(family), Bytes.toBytes((String) column));
                    }
                }
            }
            gets.add(get);
        }
        return gets;
    }
    public List<Put> putOperations() {
        List<Put> puts = new ArrayList<>(rows.values().size());
        for (String rowkey : rows.keySet()) {
            Put put = new Put(Bytes.toBytes(rowkey));
            Row row = rows.get(rowkey);
            for (String family : row.getFamilies()) {
                Set<Object> columns = row.getCells(family);
                for (Object column : columns) {
                    Pair<String, String> pair = (Pair<String, String>) column;
                    if (StringUtils.isNotEmpty(pair.getRight())) {
                        put.addColumn(Bytes.toBytes(family),
                                Bytes.toBytes(pair.getLeft()),
                                Bytes.toBytes(pair.getRight()));
                    }
                }
            }
            puts.add(put);
        }
        return puts;
    }
    public List<Delete> deleteOperations() {
        List<Delete> deletes = new ArrayList<>(rows.values().size());
        for (String rowkey : rows.keySet()) {
            Delete delete = new Delete(Bytes.toBytes(rowkey));
            deletes.add(delete);
        }
        return deletes;
    }
    private Row getRow(String rowkey) {
        Row row = rows.get(rowkey);
        if (row == null) {
            row = new Row();
            rows.put(rowkey, row);
        }
        return row;
    }
    /**
     * HBase中的一行
     */
    public static class Row {
        private Map<String, Set<Object>> cells = new HashMap<>();   // key为family,value为columns
        public void addFamily(String family) {
            cells.put(family, null);
        }
        public void addColumns(String family, String... columns) {
            Set value = getFamily(family);
            for (String column : columns) {
                value.add(column);
            }
        }
        public void addValues(String family, Map<String, String> values) {
            Set value = getFamily(family);
            value.addAll(values.keySet().stream().map(key -> new ImmutablePair<>(key, values.get(key))).collect(Collectors.toList()));
        }
        public Set<String> getFamilies() {
            return cells.keySet();
        }
        public Set<Object> getCells(String family) {
            return cells.get(family);
        }
        private Set<Object> getFamily(String family) {
            Set value = cells.get(family);
            if (value == null) {
                value = new TreeSet<>();
                cells.put(family, value);
            }
            return value;
        }
    }
}

+ 96 - 0
common-data-hbase-starter/src/main/java/com/yihu/base/hbase/config/HbaseConfig.java

@ -0,0 +1,96 @@
package com.yihu.base.hbase.config;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.*;
import org.apache.hadoop.hbase.security.User;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.security.UserGroupInformation;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.boot.context.properties.ConfigurationProperties;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.data.hadoop.hbase.HbaseTemplate;
import org.springframework.data.hadoop.hbase.TableCallback;
import java.util.*;
/**
 * @author Sand
 * @version 1.0
 * @created 2015.11.28 16:26
 */
@Configuration
@ConfigurationProperties(prefix = "hadoop")
public class HbaseConfig{
    private static Logger logger = LoggerFactory.getLogger(HbaseConfig.class);
    private Map<String, String> hbaseProperties = new HashMap<>();
    public Map<String, String> getHbaseProperties(){
        return this.hbaseProperties;
    }
    @Value("${hadoop.user.name}")
    private String user;
    @Bean
    public org.apache.hadoop.conf.Configuration configuration() {
        Set<String> keys = new HashSet<>(hbaseProperties.keySet());
        for (String key : keys){
            String value = hbaseProperties.remove(key);
            key = key.replaceAll("^\\d{1,2}\\.", "");
            hbaseProperties.put(key, value);
        }
        org.apache.hadoop.conf.Configuration configuration = HBaseConfiguration.create();
        hbaseProperties.keySet().stream().filter(key -> hbaseProperties.get(key) != null).forEach(key -> {
            configuration.set(key, hbaseProperties.get(key));
        });
        return configuration;
    }
    @Bean
    public HbaseTemplate hbaseTemplate(org.apache.hadoop.conf.Configuration configuration){
        HbaseTemplate hbaseTemplate = new HbaseTemplate();
        hbaseTemplate.setConfiguration(configuration);
        try
        {
            logger.info("set System property for hbase ---",user);
            System.setProperty("HADOOP_USER_NAME", user);
            String tableName = "HealthProfile";
            //覆盖默认的配置文件
            org.apache.hadoop.conf.Configuration.addDefaultResource("core-site.xml");
            org.apache.hadoop.conf.Configuration.addDefaultResource("hbase-site.xml");
            Connection connection = ConnectionFactory.createConnection(configuration);
            logger.info("Hbase createConnection finished---",connection.getConfiguration());
            Admin admin = connection.getAdmin();
            boolean ex = admin.tableExists(TableName.valueOf(tableName));
            //判断是否存在
            if(ex)
            {
                hbaseTemplate.execute(tableName, new TableCallback<Object>() {
                    @Override
                    public Object doInTable(HTableInterface table) throws Throwable {
                        Get get = new Get(Bytes.toBytes("connection-init"));
                        Result result = table.get(get);
                        return result;
                    }
                });
            }
            admin.close();
            connection.close();
        }
        catch (Exception ex)
        {
            logger.info("Hbase createConnection failure",ex.getMessage());
        }
        return hbaseTemplate;
    }
}

+ 163 - 0
common-data-hbase-starter/src/main/resources/hbase/core-site.xml

@ -0,0 +1,163 @@
  <configuration>
    
    <property>
      <name>fs.defaultFS</name>
      <value>hdfs://dev</value>
    </property>
    
    <property>
      <name>fs.trash.interval</name>
      <value>360</value>
    </property>
    
    <property>
      <name>ha.failover-controller.active-standby-elector.zk.op.retries</name>
      <value>120</value>
    </property>
    
    <property>
      <name>ha.zookeeper.quorum</name>
      <value>node1.hde.h3c.com:2181,node2.hde.h3c.com:2181,node3.hde.h3c.com:2181</value>
    </property>
    
    <property>
      <name>hadoop.http.authentication.simple.anonymous.allowed</name>
      <value>true</value>
    </property>
    
    <property>
      <name>hadoop.proxyuser.hbase.groups</name>
      <value>*</value>
    </property>
    
    <property>
      <name>hadoop.proxyuser.hbase.hosts</name>
      <value>*</value>
    </property>
    
    <property>
      <name>hadoop.proxyuser.hcat.groups</name>
      <value>users</value>
    </property>
    
    <property>
      <name>hadoop.proxyuser.hcat.hosts</name>
      <value>node2.hde.h3c.com</value>
    </property>
    
    <property>
      <name>hadoop.proxyuser.hdfs.groups</name>
      <value>*</value>
    </property>
    
    <property>
      <name>hadoop.proxyuser.hdfs.hosts</name>
      <value>*</value>
    </property>
    
    <property>
      <name>hadoop.proxyuser.hive.groups</name>
      <value>*</value>
    </property>
    
    <property>
      <name>hadoop.proxyuser.hive.hosts</name>
      <value>node2.hde.h3c.com</value>
    </property>
    
    <property>
      <name>hadoop.proxyuser.HTTP.groups</name>
      <value>*</value>
    </property>
    
    <property>
      <name>hadoop.proxyuser.HTTP.hosts</name>
      <value>*</value>
    </property>
    
    <property>
      <name>hadoop.proxyuser.hue.groups</name>
      <value>*</value>
    </property>
    
    <property>
      <name>hadoop.proxyuser.hue.hosts</name>
      <value>*</value>
    </property>
    
    <property>
      <name>hadoop.proxyuser.oozie.groups</name>
      <value>*</value>
    </property>
    
    <property>
      <name>hadoop.proxyuser.oozie.hosts</name>
      <value>*</value>
    </property>
    
    <property>
      <name>hadoop.security.auth_to_local</name>
      <value>DEFAULT</value>
    </property>
    
    <property>
      <name>hadoop.security.authentication</name>
      <value>simple</value>
    </property>
    
    <property>
      <name>hadoop.security.authorization</name>
      <value>false</value>
    </property>
    
    <property>
      <name>hadoop.security.key.provider.path</name>
      <value></value>
    </property>
    
    <property>
      <name>io.compression.codecs</name>
      <value>org.apache.hadoop.io.compress.GzipCodec,org.apache.hadoop.io.compress.DefaultCodec,org.apache.hadoop.io.compress.SnappyCodec</value>
    </property>
    
    <property>
      <name>io.file.buffer.size</name>
      <value>131072</value>
    </property>
    
    <property>
      <name>io.serializations</name>
      <value>org.apache.hadoop.io.serializer.WritableSerialization</value>
    </property>
    
    <property>
      <name>ipc.client.connect.max.retries</name>
      <value>2</value><!--50-->
    </property>
    
    <property>
      <name>ipc.client.connection.maxidletime</name>
      <value>1000</value><!--30000-->
    </property>
    
    <property>
      <name>ipc.client.idlethreshold</name>
      <value>8000</value>
    </property>
    
    <property>
      <name>ipc.server.tcpnodelay</name>
      <value>true</value>
    </property>
    
    <property>
      <name>mapreduce.jobtracker.webinterface.trusted</name>
      <value>false</value>
    </property>
    
    <property>
      <name>net.topology.script.file.name</name>
      <value>/etc/hadoop/conf/topology_script.py</value>
    </property>
    
  </configuration>

+ 243 - 0
common-data-hbase-starter/src/main/resources/hbase/hbase-site.xml

@ -0,0 +1,243 @@
  <configuration>
    
    <property>
      <name>dfs.domain.socket.path</name>
      <value>/var/lib/hadoop-hdfs/dn_socket</value>
    </property>
    
    <property>
      <name>hbase.bulkload.staging.dir</name>
      <value>/apps/hbase/staging</value>
    </property>
    
    <property>
      <name>hbase.client.keyvalue.maxsize</name>
      <value>1048576</value>
    </property>
    
    <property>
      <name>hbase.client.retries.number</name>
      <value>1</value><!--35-->
    </property>
    
    <property>
      <name>hbase.client.scanner.caching</name>
      <value>100</value>
    </property>
    
    <property>
      <name>hbase.cluster.distributed</name>
      <value>true</value>
    </property>
    
    <property>
      <name>hbase.coprocessor.master.classes</name>
      <value>org.apache.hadoop.hbase.security.access.AccessController</value>
    </property>
    
    <property>
      <name>hbase.coprocessor.region.classes</name>
      <value>org.apache.hadoop.hbase.security.access.AccessController,org.apache.hadoop.hbase.security.access.SecureBulkLoadEndpoint</value>
    </property>
    
    <property>
      <name>hbase.coprocessor.regionserver.classes</name>
      <value>org.apache.hadoop.hbase.security.access.AccessController</value>
    </property>
    
    <property>
      <name>hbase.defaults.for.version.skip</name>
      <value>true</value>
    </property>
    
    <property>
      <name>hbase.hregion.majorcompaction</name>
      <value>604800000</value>
    </property>
    
    <property>
      <name>hbase.hregion.majorcompaction.jitter</name>
      <value>0.50</value>
    </property>
    
    <property>
      <name>hbase.hregion.max.filesize</name>
      <value>10737418240</value>
    </property>
    
    <property>
      <name>hbase.hregion.memstore.block.multiplier</name>
      <value>4</value>
    </property>
    
    <property>
      <name>hbase.hregion.memstore.flush.size</name>
      <value>134217728</value>
    </property>
    
    <property>
      <name>hbase.hregion.memstore.mslab.enabled</name>
      <value>true</value>
    </property>
    
    <property>
      <name>hbase.hstore.blockingStoreFiles</name>
      <value>10</value>
    </property>
    
    <property>
      <name>hbase.hstore.compaction.max</name>
      <value>10</value>
    </property>
    
    <property>
      <name>hbase.hstore.compactionThreshold</name>
      <value>3</value>
    </property>
    
    <property>
      <name>hbase.local.dir</name>
      <value>${hbase.tmp.dir}/local</value>
    </property>
    
    <property>
      <name>hbase.master.info.bindAddress</name>
      <value>0.0.0.0</value>
    </property>
    
    <property>
      <name>hbase.master.info.port</name>
      <value>16010</value>
    </property>
    
    <property>
      <name>hbase.master.port</name>
      <value>16000</value>
    </property>
    
    <property>
      <name>hbase.regionserver.global.memstore.size</name>
      <value>0.4</value>
    </property>
    
    <property>
      <name>hbase.regionserver.handler.count</name>
      <value>30</value>
    </property>
    
    <property>
      <name>hbase.regionserver.info.port</name>
      <value>16030</value>
    </property>
    
    <property>
      <name>hbase.regionserver.port</name>
      <value>16020</value>
    </property>
    
    <property>
      <name>hbase.regionserver.thrift.http</name>
      <value>true</value>
    </property>
    
    <property>
      <name>hbase.regionserver.wal.codec</name>
      <value>org.apache.hadoop.hbase.regionserver.wal.WALCellCodec</value>
    </property>
    
    <property>
      <name>hbase.replication</name>
      <value>true</value>
    </property>
    
    <property>
      <name>hbase.rootdir</name>
      <value>hdfs://dev/apps/hbase/data</value>
    </property>
    
    <property>
      <name>hbase.rpc.protection</name>
      <value>authentication</value>
    </property>
    
    <property>
      <name>hbase.rpc.timeout</name>
      <value>90000</value>
    </property>
    
    <property>
      <name>hbase.security.authentication</name>
      <value>simple</value>
    </property>
    
    <property>
      <name>hbase.security.authorization</name>
      <value>true</value>
    </property>
    
    <property>
      <name>hbase.superuser</name>
      <value>hbase</value>
    </property>
    
    <property>
      <name>hbase.thrift.support.proxyuser</name>
      <value>true</value>
    </property>
    
    <property>
      <name>hbase.tmp.dir</name>
      <value>/hadoop/hbase</value>
    </property>
    
    <property>
      <name>hbase.zookeeper.property.clientPort</name>
      <value>2181</value>
    </property>
    
    <property>
      <name>hbase.zookeeper.quorum</name>
      <value>node1.hde.h3c.com,node2.hde.h3c.com,node3.hde.h3c.com</value>
    </property>
    
    <property>
      <name>hbase.zookeeper.useMulti</name>
      <value>true</value>
    </property>
    
    <property>
      <name>hfile.block.cache.size</name>
      <value>0.40</value>
    </property>
    
    <property>
      <name>phoenix.query.timeoutMs</name>
      <value>60000</value>
    </property>
    
    <property>
      <name>replication.replicationsource.implementation</name>
      <value>com.ngdata.sep.impl.SepReplicationSource</value>
    </property>
    
    <property>
      <name>replication.source.nb.capacity</name>
      <value>1000</value>
    </property>
    
    <property>
      <name>replication.source.ratio</name>
      <value>1</value>
    </property>
    
    <property>
      <name>zookeeper.session.timeout</name>
      <value>90000</value>
    </property>
    
    <property>
      <name>zookeeper.znode.parent</name>
      <value>/hbase-unsecure</value>
    </property>
    
  </configuration>

+ 348 - 0
common-data-hbase-starter/src/main/resources/hbase/hdfs-site.xml

@ -0,0 +1,348 @@
  <configuration>
    
    <property>
      <name>dfs.block.access.token.enable</name>
      <value>true</value>
    </property>
    
    <property>
      <name>dfs.blockreport.initialDelay</name>
      <value>120</value>
    </property>
    
    <property>
      <name>dfs.blocksize</name>
      <value>134217728</value>
    </property>
    
    <property>
      <name>dfs.client.failover.proxy.provider.dev</name>
      <value>org.apache.hadoop.hdfs.server.namenode.ha.ConfiguredFailoverProxyProvider</value>
    </property>
    
    <property>
      <name>dfs.client.read.shortcircuit</name>
      <value>true</value>
    </property>
    
    <property>
      <name>dfs.client.read.shortcircuit.streams.cache.size</name>
      <value>4096</value>
    </property>
    
    <property>
      <name>dfs.client.retry.policy.enabled</name>
      <value>false</value>
    </property>
    
    <property>
      <name>dfs.cluster.administrators</name>
      <value> hdfs</value>
    </property>
    
    <property>
      <name>dfs.content-summary.limit</name>
      <value>5000</value>
    </property>
    
    <property>
      <name>dfs.datanode.address</name>
      <value>0.0.0.0:50010</value>
    </property>
    
    <property>
      <name>dfs.datanode.balance.bandwidthPerSec</name>
      <value>6250000</value>
    </property>
    
    <property>
      <name>dfs.datanode.data.dir</name>
      <value>/opt/hadoop/hdfs/data</value>
    </property>
    
    <property>
      <name>dfs.datanode.data.dir.perm</name>
      <value>750</value>
    </property>
    
    <property>
      <name>dfs.datanode.du.reserved</name>
      <value>1073741824</value>
    </property>
    
    <property>
      <name>dfs.datanode.failed.volumes.tolerated</name>
      <value>0</value>
    </property>
    
    <property>
      <name>dfs.datanode.http.address</name>
      <value>0.0.0.0:50075</value>
    </property>
    
    <property>
      <name>dfs.datanode.https.address</name>
      <value>0.0.0.0:50475</value>
    </property>
    
    <property>
      <name>dfs.datanode.ipc.address</name>
      <value>0.0.0.0:8010</value>
    </property>
    
    <property>
      <name>dfs.datanode.max.transfer.threads</name>
      <value>4096</value>
    </property>
    
    <property>
      <name>dfs.domain.socket.path</name>
      <value>/var/lib/hadoop-hdfs/dn_socket</value>
    </property>
    
    <property>
      <name>dfs.encrypt.data.transfer.cipher.suites</name>
      <value>AES/CTR/NoPadding</value>
    </property>
    
    <property>
      <name>dfs.encryption.key.provider.uri</name>
      <value></value>
    </property>
    
    <property>
      <name>dfs.ha.automatic-failover.enabled</name>
      <value>true</value>
    </property>
    
    <property>
      <name>dfs.ha.fencing.methods</name>
      <value>shell(/bin/true)</value>
    </property>
    
    <property>
      <name>dfs.ha.namenodes.dev</name>
      <value>nn1,nn2</value>
    </property>
    
    <property>
      <name>dfs.heartbeat.interval</name>
      <value>3</value>
    </property>
    
    <property>
      <name>dfs.hosts.exclude</name>
      <value>/etc/hadoop/conf/dfs.exclude</value>
    </property>
    
    <property>
      <name>dfs.http.policy</name>
      <value>HTTP_ONLY</value>
    </property>
    
    <property>
      <name>dfs.https.port</name>
      <value>50470</value>
    </property>
    
    <property>
      <name>dfs.journalnode.edits.dir</name>
      <value>/hadoop/hdfs/journal</value>
    </property>
    
    <property>
      <name>dfs.journalnode.http-address</name>
      <value>0.0.0.0:8480</value>
    </property>
    
    <property>
      <name>dfs.journalnode.https-address</name>
      <value>0.0.0.0:8481</value>
    </property>
    
    <property>
      <name>dfs.namenode.accesstime.precision</name>
      <value>0</value>
    </property>
    
    <property>
      <name>dfs.namenode.audit.log.async</name>
      <value>true</value>
    </property>
    
    <property>
      <name>dfs.namenode.avoid.read.stale.datanode</name>
      <value>true</value>
    </property>
    
    <property>
      <name>dfs.namenode.avoid.write.stale.datanode</name>
      <value>true</value>
    </property>
    
    <property>
      <name>dfs.namenode.checkpoint.dir</name>
      <value>/opt/hadoop/hdfs/namesecondary</value>
    </property>
    
    <property>
      <name>dfs.namenode.checkpoint.edits.dir</name>
      <value>${dfs.namenode.checkpoint.dir}</value>
    </property>
    
    <property>
      <name>dfs.namenode.checkpoint.period</name>
      <value>21600</value>
    </property>
    
    <property>
      <name>dfs.namenode.checkpoint.txns</name>
      <value>1000000</value>
    </property>
    
    <property>
      <name>dfs.namenode.fslock.fair</name>
      <value>false</value>
    </property>
    
    <property>
      <name>dfs.namenode.handler.count</name>
      <value>100</value>
    </property>
    
    <property>
      <name>dfs.namenode.http-address</name>
      <value>node1.hde.h3c.com:50070</value>
    </property>
    
    <property>
      <name>dfs.namenode.http-address.dev.nn1</name>
      <value>node1.hde.h3c.com:50070</value>
    </property>
    
    <property>
      <name>dfs.namenode.http-address.dev.nn2</name>
      <value>node2.hde.h3c.com:50070</value>
    </property>
    
    <property>
      <name>dfs.namenode.https-address</name>
      <value>node1.hde.h3c.com:50470</value>
    </property>
    
    <property>
      <name>dfs.namenode.https-address.dev.nn1</name>
      <value>node1.hde.h3c.com:50470</value>
    </property>
    
    <property>
      <name>dfs.namenode.https-address.dev.nn2</name>
      <value>node2.hde.h3c.com:50470</value>
    </property>
    
    <property>
      <name>dfs.namenode.name.dir</name>
      <value>/opt/hadoop/hdfs/namenode</value>
    </property>
    
    <property>
      <name>dfs.namenode.name.dir.restore</name>
      <value>true</value>
    </property>
    
    <property>
      <name>dfs.namenode.rpc-address</name>
      <value>node1.hde.h3c.com:8020</value>
    </property>
    
    <property>
      <name>dfs.namenode.rpc-address.dev.nn1</name>
      <value>node1.hde.h3c.com:8020</value>
    </property>
    
    <property>
      <name>dfs.namenode.rpc-address.dev.nn2</name>
      <value>node2.hde.h3c.com:8020</value>
    </property>
    
    <property>
      <name>dfs.namenode.safemode.threshold-pct</name>
      <value>0.99</value>
    </property>
    
    <property>
      <name>dfs.namenode.secondary.http-address</name>
      <value>localhost:50090</value>
    </property>
    
    <property>
      <name>dfs.namenode.shared.edits.dir</name>
      <value>qjournal://node1.hde.h3c.com:8485;node2.hde.h3c.com:8485;node3.hde.h3c.com:8485/dev</value>
    </property>
    
    <property>
      <name>dfs.namenode.stale.datanode.interval</name>
      <value>30000</value>
    </property>
    
    <property>
      <name>dfs.namenode.startup.delay.block.deletion.sec</name>
      <value>3600</value>
    </property>
    
    <property>
      <name>dfs.namenode.write.stale.datanode.ratio</name>
      <value>1.0f</value>
    </property>
    
    <property>
      <name>dfs.nameservices</name>
      <value>dev</value>
    </property>
    
    <property>
      <name>dfs.permissions.enabled</name>
      <value>true</value>
    </property>
    
    <property>
      <name>dfs.permissions.superusergroup</name>
      <value>hdfs</value>
    </property>
    
    <property>
      <name>dfs.replication</name>
      <value>3</value>
    </property>
    
    <property>
      <name>dfs.replication.max</name>
      <value>50</value>
    </property>
    
    <property>
      <name>dfs.support.append</name>
      <value>true</value>
    </property>
    
    <property>
      <name>dfs.webhdfs.enabled</name>
      <value>true</value>
    </property>
    
    <property>
      <name>fs.permissions.umask-mode</name>
      <value>022</value>
    </property>
    
    <property>
      <name>nfs.exports.allowed.hosts</name>
      <value>* rw</value>
    </property>
    
    <property>
      <name>nfs.file.dump.dir</name>
      <value>/tmp/.hdfs-nfs</value>
    </property>
    
  </configuration>

+ 7 - 0
common-data-hbase-starter/src/main/resources/template.yml

@ -0,0 +1,7 @@
hadoop:
  hbase-properties:
    hbase.zookeeper.property.clientPort: 2181 ##端口号
    zookeeper.znode.parent: /hbase-unsecure
    hbase.zookeeper.quorum:  node1.hde.h3c.com,node2.hde.h3c.com,node3.hde.h3c.com ##地址
  user:
    name: root

+ 59 - 0
common-data-mysql-starter/pom.xml

@ -0,0 +1,59 @@
<?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.0.0"
         xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
         xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
    <modelVersion>4.0.0</modelVersion>
    <parent>
        <groupId>com.yihu.jw</groupId>
        <artifactId>jkzl-start</artifactId>
        <version>1.0.0</version>
        <relativePath>../pom.xml</relativePath>
    </parent>
    <artifactId>common-data-mysql-starter</artifactId>
    <version>1.0.0</version>
    <dependencies>
        <dependency>
            <groupId>com.yihu.jw</groupId>
            <artifactId>common-util</artifactId>
        </dependency>
        <dependency>
            <groupId>com.yihu.jw</groupId>
            <artifactId>common-rest-model</artifactId>
        </dependency>
        <dependency>
            <groupId>mysql</groupId>
            <artifactId>mysql-connector-java</artifactId>
        </dependency>
        <dependency>
            <groupId>org.hibernate</groupId>
            <artifactId>hibernate-core</artifactId>
        </dependency>
        <dependency>
            <groupId>org.hibernate.javax.persistence</groupId>
            <artifactId>hibernate-jpa-2.1-api</artifactId>
        </dependency>
        <dependency>
            <groupId>org.springframework.data</groupId>
            <artifactId>spring-data-commons</artifactId>
        </dependency>
        <dependency>
            <groupId>org.springframework</groupId>
            <artifactId>spring-jdbc</artifactId>
        </dependency>
        <dependency>
            <groupId>org.springframework</groupId>
            <artifactId>spring-orm</artifactId>
        </dependency>
        <dependency>
            <groupId>org.springframework.data</groupId>
            <artifactId>spring-data-jpa</artifactId>
        </dependency>
        <dependency>
            <groupId>org.springframework.data</groupId>
            <artifactId>spring-data-commons</artifactId>
        </dependency>
    </dependencies>
</project>

+ 289 - 0
common-data-mysql-starter/src/main/java/com/yihu/base/mysql/query/BaseJpaService.java

@ -0,0 +1,289 @@
package com.yihu.base.mysql.query;
import com.yihu.jw.restmodel.common.PageArg;
import com.yihu.jw.util.spring.SpringContext;
import org.hibernate.Query;
import org.hibernate.Session;
import org.springframework.beans.BeanUtils;
import org.springframework.data.domain.Sort;
import org.springframework.data.jpa.repository.JpaRepository;
import org.springframework.data.repository.PagingAndSortingRepository;
import org.springframework.transaction.annotation.Propagation;
import org.springframework.transaction.annotation.Transactional;
import org.springframework.util.StringUtils;
import javax.persistence.EntityManager;
import javax.persistence.PersistenceContext;
import javax.persistence.criteria.CriteriaBuilder;
import javax.persistence.criteria.CriteriaQuery;
import javax.persistence.criteria.Predicate;
import javax.persistence.criteria.Root;
import javax.persistence.metamodel.EntityType;
import java.io.Serializable;
import java.lang.reflect.ParameterizedType;
import java.lang.reflect.Type;
import java.text.ParseException;
import java.util.*;
/**
 * Service基础类。此类基于Spring DataVO JPA进行封装(Spring DataVO JPA又是基于JPA封装,EHR平台使用Hibernate作为JPA实现者)。
 * 需要注意的是,部分功能会跳过JPA接口而直接使用Hibernate接口,比如访问Hibernate的Session接口,因为它把JPA的EntityManager功能强大。
 *
 * @author lincl
 * @author Sand
 * @version 1.0
 * @created 2016.2.3
 */
@Transactional(propagation = Propagation.SUPPORTS)
public class BaseJpaService<T, R> {
    Class<R> repoClass;
    @PersistenceContext
    protected EntityManager entityManager;
    public BaseJpaService(){
        Type genType = getClass().getGenericSuperclass();
        if ((genType instanceof ParameterizedType)) {
            Type[] params = ((ParameterizedType) genType).getActualTypeArguments();
            if (params.length==2) {
                repoClass = (Class) params[1];
            }
        }
    }
    /**
     * 将实体集合转换为模型集合。
     * @param sources
     * @param targets
     * @param targetCls
     * @param <T>
     * @return
     */
    public <T> List<T> convertToModels(Collection sources, List<T> targets, Class<T> targetCls){
        sources.forEach(one -> {
            T target = (T) BeanUtils.instantiate(targetCls);
            BeanUtils.copyProperties(one, target);
            targets.add(target);
        });
        return targets;
    }
    /**
     * 将实体转换为模型。
     *
     * @param source
     * @param targetCls
     * @param <T>
     * @return
     */
    public <T> T convertToModel(Object source, Class<T> targetCls) {
        if (source == null) {
            return null;
        }
        T target = BeanUtils.instantiate(targetCls);
        BeanUtils.copyProperties(source, target);
        return target;
    }
    public String getCode() {
        return UUID.randomUUID().toString().replaceAll("-", "");
    }
    public T save(T entity) {
        return (T) getRepository().save(entity);
    }
    public T retrieve(Serializable id) {
        return (T) getRepository().findOne(id);
    }
    public void delete(Serializable id) {
        getRepository().delete(id);
    }
    public void delete(T entity) {
        getRepository().delete(entity);
    }
    public void delete(Iterable ids) {
        Iterable list = getRepository().findAll(ids);
        getRepository().delete(list);
    }
    public Class<T> getEntityClass() {
        Type genType = this.getClass().getGenericSuperclass();
        Type[] parameters = ((ParameterizedType) genType).getActualTypeArguments();
        return (Class) parameters[0];
    }
    public List search(String fields, String filters, String sorts, Integer page, Integer size) throws ParseException {
//        if(filters!=null){
//            filters = "status<>-1;"+filters;
//        }else{
//            filters="status<>-1;";
//        }
        URLQueryParser queryParser = createQueryParser(fields, filters, sorts);
        CriteriaQuery query = queryParser.makeCriteriaQuery();
        if (page == null || page <= 0) page = PageArg.DefaultPage;
        if (size == null || size <= 0 || size > 10000) size = PageArg.DefaultSize;
        return entityManager
                .createQuery(query)
                .setFirstResult((page - 1) * size)
                .setMaxResults(size)
                .getResultList();
    }
    public List search(String fields, String filters, String sorts) throws ParseException {
//        if(filters!=null){
//            filters = "status<>-1;"+filters;
//        }else{
//            filters="status<>-1;";
//        }
        URLQueryParser queryParser = createQueryParser(fields, filters, sorts);
        CriteriaQuery query = queryParser.makeCriteriaQuery();
        return entityManager
                .createQuery(query)
                .getResultList();
    }
    public List search(String filters) throws ParseException {
        if(filters!=null){
            filters = "status<>-1;"+filters;
        }else{
            filters="status<>-1;";
        }
        URLQueryParser queryParser = createQueryParser("", filters, "");
        CriteriaQuery query = queryParser.makeCriteriaQuery();
        return entityManager
                .createQuery(query)
                .getResultList();
    }
    public List search(String filters,String sorts) throws ParseException {
//        if(filters!=null){
//            filters = "status<>-1;"+filters;
//        }else{
//            filters="status<>-1;";
//        }
        URLQueryParser queryParser = createQueryParser("", filters, sorts);
        CriteriaQuery query = queryParser.makeCriteriaQuery();
        return entityManager
                .createQuery(query)
                .getResultList();
    }
    public long getCount(String filters) throws ParseException {
//        if(filters!=null){
//            filters = "status<>-1;"+filters;
//        }else{
//            filters="status<>-1;";
//        }
        URLQueryParser queryParser = createQueryParser(filters);
        CriteriaQuery query = queryParser.makeCriteriaCountQuery();
        return (long) entityManager.createQuery(query).getSingleResult();
    }
    protected <T> URLQueryParser createQueryParser(String fields, String filters, String orders) {
        URLQueryParser queryParser = new URLQueryParser<T>(fields, filters, orders)
                .setEntityManager(entityManager)
                .setEntityClass(getEntityClass());
        return queryParser;
    }
    protected <T> URLQueryParser createQueryParser(String filters) {
        URLQueryParser queryParser = new URLQueryParser<T>(filters)
                .setEntityManager(entityManager)
                .setEntityClass(getEntityClass());
        return queryParser;
    }
    protected Sort parseSorts(String sorter){
        if (!StringUtils.isEmpty(sorter)) {
            String[] orderArray = sorter.split(",");
            List<Sort.Order> orderList = new ArrayList<>(orderArray.length);
            Arrays.stream(orderArray).forEach(
                    elem -> orderList.add(
                            elem.startsWith("+") ? new Sort.Order(Sort.Direction.ASC, elem.substring(1)):
                                    new Sort.Order(Sort.Direction.DESC, elem.substring(1))));
            return new Sort(orderList);
        }
        return null;
    }
    protected Session currentSession() {
        return entityManager.unwrap(Session.class);
    }
    public PagingAndSortingRepository getRepository() {
        return (PagingAndSortingRepository) SpringContext.getService(repoClass);
    }
    public JpaRepository getJpaRepository(){
        return (JpaRepository) SpringContext.getService(repoClass);
    }
    public List<T> findByField(String field, Object value){
        return findByFields(
                new String[]{field},
                new Object[]{value}
        );
    }
    public List<T> findByFields(String[] fields, Object[] values){
        CriteriaBuilder criteriaBuilder = entityManager.getCriteriaBuilder();
        CriteriaQuery query = criteriaBuilder.createQuery(getEntityClass());
        Root<T> root = query.from(getEntityClass());
        List<Predicate> ls = new ArrayList<>();
        for(int i=0; i< fields.length; i++){
            if(values[i].getClass().isArray())
                ls.add(criteriaBuilder.in(root.get(fields[i]).in((Object[])values[i])));
            else
                ls.add(criteriaBuilder.equal(root.get(fields[i]), values[i]));
        }
        query.where(ls.toArray(new Predicate[ls.size()]));
        return entityManager
                .createQuery(query)
                .getResultList() ;
    }
    public String getClzName(){
        return getEntityClass().getName();
    }
    public String getEntityIdFiled(){
        EntityType entityType = entityManager.getMetamodel().entity(getEntityClass());
        javax.persistence.metamodel.Type type = entityType.getIdType();
        String s = entityType.getId(type.getJavaType()).getName();
        return s;
    }
    public int delete(Object[] ids){
        String hql = " DELETE FROM "+getEntityClass().getName()+" WHERE "+getEntityIdFiled()+" in(:ids)";
        Query query = currentSession().createQuery(hql);
        query.setParameterList("ids", ids);
        return query.executeUpdate();
    }
    public void batchInsert(List list) {
        for (int i = 0; i < list.size(); i++) {
            entityManager.persist(list.get(i));
            if (i % 30 == 0) {
                entityManager.flush();
                entityManager.clear();
            }
        }
    }
}

+ 198 - 0
common-data-mysql-starter/src/main/java/com/yihu/base/mysql/query/FieldCondition.java

@ -0,0 +1,198 @@
package com.yihu.base.mysql.query;
import org.springframework.util.StringUtils;
import java.util.ArrayList;
import java.util.List;
/**
 * @author lincl
 * @version 1.0
 * @created 2016.2.1
 */
public class FieldCondition {
    private String col;       //过滤字段 ,不可为空
    private String logic;    //过滤方式,默认为=;   =, sw, ew, like, >, <, between, >=, <=
    private List<Object> val;//过滤值, 值为空则不过滤
    private String group;   //分组,  多个过滤器中的group相同时  用or连接
    private String tableCol;//数据库字段, 初始化根据实体自动设置, user设置无效
    public FieldCondition() {
    }
    public FieldCondition(String col, Object val) {
        this.col = col;
        this.addVal(val);
    }
    public FieldCondition(String col, String logic, Object ... vals) {
        this.col = col;
        this.logic = logic;
        this.addVal(vals);
    }
    public FieldCondition(String col, String logic, List<Object> val, String group) {
        this.col = col;
        this.logic = logic;
        this.val = val;
        this.group = group;
    }
    /**
     * 格式化过滤条件
     * @param modelName 视图名
     * @param isSql true:返回sql形式, false:返回jpa形式
     * @return
     */
    public String format(String modelName, boolean isSql){
        if(getCol()==null || getCol().equals("") || getVal()==null || getVal().size()==0)
            return "";
        String val = getValMapping();
        if(val==null)
            return "";
        String rs = (isSql ? getTableCol() : getCol()) + " " + getLogic() + " " + val;
        if(modelName.trim().equals(""))
            return " " + rs;
        return " " +modelName + "." + rs;
    }
    /**
     * 格式化过滤条件
     * @return 返回jpa形式
     */
    public String format(){
        return format("", false);
    }
    /**
     * 格式化过滤条件
     * @return 返回sql形式
     */
    public String formatSql(){
        return format("", true);
    }
    /**
     * 判断是否存在分组信息
     * @return
     */
    public boolean isGroup(){
        return !(getGroup()==null || "".equals(getGroup()));
    }
    /**
     * 添加值
     * @param vals
     */
    public void addVal(Object ... vals){
        if(this.val==null)
            this.val = new ArrayList<>();
        for(Object val:vals){
            this.val.add(val);
        }
    }
    /**
     * 判断数据表是否包含有该过滤字段
     * @return
     */
    public boolean isValid() {
        return !StringUtils.isEmpty(getTableCol()) && !(getVal()==null || getVal().size()==0)
                 && !(getCol()==null || getCol().equals("")) && isLogicValid();
    }
    /**
     * 判断查询方式是否符合规范
     * @return
     */
    public boolean isLogicValid(){
        String logic = getLogic();
        if(logic.equals("=") || logic.equals("like") || logic.equals("sw") || logic.equals("ew") ||
                logic.equals("<") || logic.equals(">") || logic.equals(">=") || logic.equals("<=") ||
                    logic.equals("in") || logic.equals("not in") || logic.equals("between"))
            return true;
        return false;
    }
    /**
     * 获取占位符
     * @return
     */
    private String getValMapping(){
        String logic = getLogic();
        String val = ":" + getCol();
        if(logic.equals("in") || logic.equals("not in"))
            return  "("+val+") ";
        if(logic.equals("between"))
            return val + "1 and " +val+"2 ";
        if(logic.equals("=") || logic.equals("like") || logic.equals("sw") || logic.equals("ew") ||
                logic.equals("<") || logic.equals(">") || logic.equals(">=") || logic.equals("<=")){
            return val;
        }
        return null;
    }
    /**
     * 格式化 值, 不支持between
     * between形式: 调用getVal(), 获取值,  占位符为 between col + "1" and  col + "2"
     * @return
     */
    public Object formatVal(){
        if(getLogic().equals("sw"))
            return "%"+getVal().get(0);
        if (getLogic().equals("ew"))
            return getVal().get(0)+"%";
        if (getLogic().equals("like"))
            return "%"+getVal().get(0)+"%";
        if(getLogic().equals("in") || getLogic().equals("not in"))
            return getVal();
        return getVal().get(0);
    }
    /************************************************************************************/
    /***************            getter  &  setter                            ************/
    /***************                                                         ************/
    /************************************************************************************/
    public String getCol() {
        return col;
    }
    public void setCol(String col) {
        this.col = col;
    }
    public String getLogic() {
        if(logic==null || "".equals(logic))
            return "=";
        return logic;
    }
    public void setLogic(String logic) {
        this.logic = logic;
    }
    public List<Object> getVal() {
        return val;
    }
    public void setVal(List<Object> val) {
        this.val = val;
    }
    public String getGroup() {
        return group;
    }
    public void setGroup(String group) {
        this.group = group;
    }
    public String getTableCol() {
        return tableCol;
    }
    public void setTableCol(String tableCol) {
        this.tableCol = tableCol;
    }
}

+ 28 - 0
common-data-mysql-starter/src/main/java/com/yihu/base/mysql/query/ReturnIdPstCreator.java

@ -0,0 +1,28 @@
package com.yihu.base.mysql.query;
import org.springframework.jdbc.core.PreparedStatementCreator;
import java.sql.Connection;
import java.sql.PreparedStatement;
import java.sql.SQLException;
import java.sql.Statement;
/**
 * @author lincl
 * @version 1.0
 * @created 2016/5/6
 */
public class ReturnIdPstCreator implements PreparedStatementCreator {
    String sql;
    public ReturnIdPstCreator(String sql){
        this.sql = sql;
    }
    @Override
    public PreparedStatement createPreparedStatement(Connection connection) throws SQLException {
        return connection.prepareStatement(sql, Statement.RETURN_GENERATED_KEYS);
    }
}

+ 304 - 0
common-data-mysql-starter/src/main/java/com/yihu/base/mysql/query/URLHqlQueryParser.java

@ -0,0 +1,304 @@
package com.yihu.base.mysql.query;
import javafx.util.Pair;
import org.hibernate.Criteria;
import org.hibernate.Session;
import org.hibernate.criterion.*;
import org.hibernate.metadata.ClassMetadata;
import org.springframework.util.StringUtils;
import java.lang.reflect.Field;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
/**
 * URL 查询串解析器。
 *
 * @author lincl
 * @author Sand
 * @version 1.0
 * @created 2016.02.05 10:17
 */
public class URLHqlQueryParser<T> {
    private String fields;
    private String filters;
    private String orders;
    Session session;
    Class<T> entityCls;
    public URLHqlQueryParser(String fields, String filters, String orders) {
        this.fields = fields;
        this.filters = filters;
        this.orders = orders;
    }
    public URLHqlQueryParser(String filters){
        this.filters = filters;
    }
    public URLHqlQueryParser setSession(Session session) {
        this.session = session;
        return this;
    }
    public URLHqlQueryParser setEntityClass(Class<T> cls) {
        this.entityCls = cls;
        return this;
    }
    /**
     * 生成搜索语句.
     *
     * @return
     */
    public Criteria makeCriteriaQuery() {
        Criteria criteria = session.createCriteria(entityCls);
        ClassMetadata classMetadata = session.getSessionFactory().getClassMetadata(entityCls);
//        makeSelection(criteria, classMetadata);
        makeOrderBy(criteria, classMetadata);
        makeWhere(criteria, classMetadata);
        return criteria;
    }
    /**
     * 生成count语句。
     *
     * @return
     */
    public Criteria makeCriteriaCountQuery() {
        Criteria criteria = session.createCriteria(entityCls);
        ClassMetadata classMetadata = session.getSessionFactory().getClassMetadata(entityCls);
        criteria.setProjection(Projections.rowCount());
        makeWhere(criteria, classMetadata);
        return criteria;
    }
    /**
     * 生成返回值字段。
     *
     * @param criteria
     * @param classMetadata
     */
    private void makeSelection(Criteria criteria, ClassMetadata classMetadata) {
    }
    /**
     * +code 以code字段进行升序排序
     * -code 以code字段进行降序排序
     * 生成排序字段。
     *
     * @param criteria
     * @param classMetadata
     */
    private void makeOrderBy(Criteria criteria, ClassMetadata classMetadata) {
        if (!StringUtils.isEmpty(orders)) {
            String[] orderArray = orders.split(",");
            for(String elem : orderArray){
//                try {
//                    classMetadata.getPropertyType(elem);
//                }catch (Exception e){
//                    throw new IllegalArgumentException("the property not found!");
//                }
                criteria = elem.startsWith("+") ?
                        criteria.addOrder(Order.asc(elem.substring(1)))
                        : criteria.addOrder(Order.desc(elem.substring(1)));
            }
        }
    }
    /**
     * like:使用"?"来表示,如:name?'%医'
     * not in:使用"<>"来表示并用","逗号对值进行分隔,如:status=2,3,4,5
     * in:使用"="来表示并用","逗号对值进行分隔,如:status=2,3,4,5
     * =:使用"="来表示,如:status=2
     * >=:使用大于号和大于等于语法,如:createDate>2012
     * <=:使用小于号和小于等于语法,如:createDate<=2015
     * 分组:在条件后面加上空格,并设置分组号,如:createDate>2012 g1,具有相同组名的条件将使用or连接
     * 多条件组合:使用";"来分隔
     * <p>
     * 生成 where 条件。
     *
     * @param criteria
     * @param classMetadata
     */
    private void makeWhere(Criteria criteria, ClassMetadata classMetadata) {
        if (StringUtils.isEmpty(filters)) return;
        Map<String, List<Criterion>> criterionMap = new HashMap<>();
        String[] filterArray = filters.split(";");
        List<Criterion> groupCriterion = new ArrayList<>();
        for (int i = 0; i < filterArray.length; ++i) {
            String[] tokens = filterArray[i].split(" ");
            if (tokens.length > 2){
                for(int j=1; j<tokens.length; j++){
                    if(j==tokens.length-1)
                        tokens[1] = tokens[j];
                    else
                        tokens[0] += " " + tokens[j] ;
                }
            }
//            if (tokens.length > 2) throw new IllegalArgumentException("无效过滤参数");
            String group = null;
            if (tokens.length >= 2) group = tokens[1];
            Criterion criterion = splitFilter(tokens[0], classMetadata);
            if (group == null)
                group = Integer.toString(i);
            criterionMap.put(group,
                    makeGroupCriterion(criterionMap.get(group), criterion));
        }
        addWhere(criteria, criterionMap);
    }
    private void addWhere(Criteria criteria, Map<String, List<Criterion>> criterionMap) {
        List<Criterion> ls;
        for (String group : criterionMap.keySet()){
            ls = criterionMap.get(group);
            if(ls.size()>1)
                criteria.add(
                        Restrictions.or(ls.toArray(new Criterion[ls.size()]))
                );
            else
                criteria.add(
                        Restrictions.and(ls.toArray(new Criterion[ls.size()]))
                );
        }
    }
    protected List<Criterion> makeGroupCriterion(List<Criterion> ls, Criterion criterion){
        (ls = ls == null ? new ArrayList<>() : ls)
                .add(criterion);
        return ls;
    }
    protected Criterion splitFilter(String filter, ClassMetadata classMetadata) {
        Criterion criterion = null;
        if (filter.contains("?")) {
            Pair<Property, Object> pair = getPair(filter, "[?]", classMetadata);
            criterion = pair.getKey().like("%"+pair.getValue()+"%");
        } else if (filter.contains("<>")) {
            Pair<Property, Object> pair = getPair(filter, "<>", classMetadata);
            if (pair.getValue().getClass().isArray()) {
                criterion = pair.getKey().in((Object[])pair.getValue());
            } else {
                criterion = pair.getKey().eq(pair.getValue());
            }
            criterion = Restrictions.not(criterion);
        }  else if (filter.contains(">=")) {
            Pair<Property, Object> pair = getPair(filter, ">=", classMetadata);
            criterion = pair.getKey().ge(pair.getValue());
        } else if (filter.contains(">")) {
            Pair<Property, Object> pair = getPair(filter, ">", classMetadata);
            //todo:  转成对应类型
            criterion = pair.getKey().gt(pair.getValue());
        } else if (filter.contains("<=")) {
            Pair<Property, Object> pair = getPair(filter, "<=", classMetadata);
            criterion = pair.getKey().le(pair.getValue());
        } else if (filter.contains("<")) {
            Pair<Property, Object> pair = getPair(filter, "<", classMetadata);
            criterion = pair.getKey().lt(pair.getValue());
        } else if (filter.contains("=")) {
            Pair<Property, Object> pair = getPair(filter, "=", classMetadata);
            if (pair.getValue().getClass().isArray()) {
                criterion = pair.getKey().in((Object[])pair.getValue());
            } else {
                criterion = pair.getKey().eq(pair.getValue());
            }
        }
        return criterion;
    }
    protected Pair<Property, Object> getPair(String filter, String splitter, ClassMetadata classMetadata) throws IllegalArgumentException {
        String[] tokens = filter.split(splitter);
        String valStr = tokens[1];
        Object val = tokens[1];
        try {
            if((splitter.equals("=") || splitter.equals("<>")) && valStr.contains(",")){
                val = formatVal(tokens[0], valStr, true);
            }
            else if(!splitter.equals("[?]")){
                val = formatVal(tokens[0], valStr, false);
            }
        } catch (NoSuchFieldException e) {
            e.printStackTrace();
        }
        return new Pair(Property.forName(tokens[0]), val);
    }
    private Object formatVal(String fileName, String valStr, boolean isArr) throws NoSuchFieldException {
        Object val = "";
        if(isLong(fileName)){
            if(isArr){
                val = strToLongArr(valStr);
            }else
                val = Long.parseLong(valStr);
        }else if(isInteger(fileName)){
            if(isArr){
                val = strToIntArr(valStr);
            }else
                val = Integer.parseInt(valStr);
        }else {
            if(isArr)
                val = valStr.split(",");
            else
                val = valStr;
        }
        return val;
    }
    private Long[] strToLongArr(String valStr){
        String[] strArr = valStr.split(",");
        Long[] longArr = new Long[strArr.length];
        for(int i=0; i<strArr.length; i++){
            longArr[i] = Long.parseLong(strArr[i]);
        }
        return longArr;
    }
    private Integer[] strToIntArr(String valStr){
        String[] strArr = valStr.split(",");
        Integer[] intArr = new Integer[strArr.length];
        for(int i=0; i<strArr.length; i++){
            intArr[i] = Integer.parseInt(strArr[i]);
        }
        return intArr;
    }
    private boolean isInteger(String fieldName) throws NoSuchFieldException {
        Field field = getField(fieldName);
        return field.getType().equals(Integer.class) || field.getType().equals(Integer.TYPE);
    }
    private boolean isLong(String fieldName) throws NoSuchFieldException {
        Field field = getField(fieldName);
        return field.getType().equals(Long.class) || field.getType().equals(Long.TYPE);
    }
    private Field getField(String fieldName) throws NoSuchFieldException {
        Field f;
        try {
            f = entityCls.getDeclaredField(fieldName);
        } catch (NoSuchFieldException e) {
            f = entityCls.getSuperclass().getDeclaredField(fieldName);
        }
        return f;
    }
}

+ 264 - 0
common-data-mysql-starter/src/main/java/com/yihu/base/mysql/query/URLQueryParser.java

@ -0,0 +1,264 @@
package com.yihu.base.mysql.query;
import com.yihu.jw.util.date.DateUtil;
import javafx.util.Pair;
import org.springframework.util.StringUtils;
import javax.persistence.EntityManager;
import javax.persistence.criteria.*;
import java.text.ParseException;
import java.util.*;
/**
 * URL 查询串解析器。将 {@link URLQueryBuilder} 中产生的查询字符串反解析。
 *
 * @author Sand
 * @version 1.0
 * @created 2016.02.05 10:17
 */
public class URLQueryParser<T> {
    private String fields;
    private String filters;
    private String orders;
    EntityManager entityManager;
    CriteriaBuilder builder;
    Class<T> entityCls;
    public URLQueryParser(String fields, String filters, String orders) {
        this.fields = fields;
        this.filters = filters;
        this.orders = orders;
    }
    public URLQueryParser(String filters) {
        this.filters = filters;
    }
    public URLQueryParser setEntityManager(EntityManager entityManager) {
        this.entityManager = entityManager;
        builder = entityManager.getCriteriaBuilder();
        return this;
    }
    public URLQueryParser setEntityClass(Class<T> cls) {
        this.entityCls = cls;
        return this;
    }
    /**
     * 生成搜索语句.
     *
     * @return
     */
    public CriteriaQuery makeCriteriaQuery() throws ParseException {
        CriteriaQuery query = builder.createQuery();
        Root<T> root = query.from(entityCls);
        makeSelection(builder, query, root);
        makeOrderBy(builder, query, root);
        makeWhere(builder, query, root);
        return query;
    }
    /**
     * 生成count语句。
     *
     * @return
     */
    public CriteriaQuery makeCriteriaCountQuery() throws ParseException {
        CriteriaQuery<Long> query = builder.createQuery(Long.class);
        Root<T> root = query.from(entityCls);
        query.select(builder.count(root));
        makeWhere(builder, query, root);
        return query;
    }
    /**
     * 生成返回值字段。
     *
     * @param criteriaBuilder
     * @param query
     * @param root
     */
    private void makeSelection(CriteriaBuilder criteriaBuilder, CriteriaQuery query, Root<T> root) {
        if (false/*StringUtils.isNotEmpty(fields)*/) {
            String[] fieldArray = fields.split(",");
            List<Selection<T>> selections = new ArrayList<>(fieldArray.length);
            Arrays.stream(fieldArray).forEach(elem -> selections.add(root.get(elem)));
            query.select(criteriaBuilder.tuple(selections.toArray(new Selection[selections.size()])));
        } else {
            query.select(root);
        }
    }
    /**
     * +code 以code字段进行升序排序
     * -code 以code字段进行降序排序
     * 生成排序字段。
     *
     * @param criteriaBuilder
     * @param query
     * @param root
     */
    private void makeOrderBy(CriteriaBuilder criteriaBuilder, CriteriaQuery query, Root<T> root) {
        if (!StringUtils.isEmpty(orders)) {
            String[] orderArray = orders.split(",");
            List<Order> orderList = new ArrayList<>(orderArray.length);
            Arrays.stream(orderArray).forEach(
                    elem -> orderList.add(
                            elem.startsWith("+") ?
                                    criteriaBuilder.asc(root.get(elem.substring(1))) : criteriaBuilder.desc(root.get(elem.substring(1)))));
            query.orderBy(orderList);
        }
    }
    /**
     * like:使用"?"来表示,如:name?'%医'
     * in:使用"="来表示并用","逗号对值进行分隔,如:status=2,3,4,5
     * not in:使用"<>"来表示并用","逗号对值进行分隔,如:status=2,3,4,5
     * =:使用"="来表示,如:status=2
     * >=:使用大于号和大于等于语法,如:createDate>2012
     * <=:使用小于号和小于等于语法,如:createDate<=2015
     * 分组:在条件后面加上空格,并设置分组号,如:createDate>2012 g1,具有相同组名的条件将使用or连接 GB/T 2261.2-2003
     * 多条件组合:使用";"来分隔
     * <p/>
     * 生成 where 条件。
     *
     * @param criteriaBuilder
     * @param query
     * @param root
     */
    private void makeWhere(CriteriaBuilder criteriaBuilder, CriteriaQuery query, Root<T> root) throws ParseException {
         if (StringUtils.isEmpty(filters)) return;
        Map<String, Predicate> predicateMap = new HashMap<>();
        String[] filterArray = filters.split(";");
        for (int i = 0; i < filterArray.length; ++i) {
            String filter = filterArray[i];
            //查看是否是时间格式 yyyy-MM-dd hh:mm:ss
            String[] tokens;
//            Pattern p = Pattern.compile("[0-9]{4}-[0-9]{2}-[0-9]{2} [0-9]{2}:[0-9]{2}:[0-9]{2}");
//            String[] filters = filter.split("[?]|<>|>=|>|<=|<|=");
//            Matcher m = p.matcher(filters[1]);
//            if (m.matches()) {
//                tokens = new String[]{filter};
//            }else {
//                tokens = filter.split(" ");
//            }
            tokens = filter.split(" ");
            if (tokens.length > 2){
                for(int j=1; j<tokens.length; j++){
                    if(j==tokens.length-1)
                        tokens[1] = tokens[j];
                    else
                        tokens[0] += " " +tokens[j] ;
                }
            }
            String group = null;
            if (tokens.length >= 2) group = tokens[1];
            Predicate predicate = splitFilter(tokens[0], criteriaBuilder, root);
            if (group != null) {
                if (predicateMap.get(group) == null)
                    predicateMap.put(group, predicate);
                else
                    predicateMap.put(group, criteriaBuilder.or(predicateMap.get(group), predicate));
            } else
                predicateMap.put(Integer.toString(i), predicate);
        }
        query.where(predicateMap.values().toArray(new Predicate[predicateMap.size()]));
    }
    protected Predicate splitFilter(String filter, CriteriaBuilder cb, Root<T> root) throws ParseException {
        Predicate predicate = null;
        if (filter.contains("?")) {
            Pair<Path, String> pair = getPair(filter, "[?]", root);
            predicate = cb.like(pair.getKey(), "%" + pair.getValue() + "%");
        } else if (filter.contains("<>")) {
            Pair<Path, String> pair = getPair(filter, "<>", root);
            if (pair.getValue().contains(",")) {
                predicate = cb.not(pair.getKey().in(pair.getValue().split(",")));
            } else {
                predicate = cb.notEqual(pair.getKey(), pair.getValue());
            }
        } else if (filter.contains(">=")) {
            Pair<Path, String> pair = getPair(filter, ">=", root);
            String value = pair.getValue();
            if(pair.getKey().getJavaType() == Date.class){
                Date date = DateUtil.dateTimeParse(pair.getValue());
                predicate = cb.greaterThanOrEqualTo(pair.getKey(), date);
            }else {
                predicate = cb.greaterThanOrEqualTo(pair.getKey(),value);
            }
        } else if (filter.contains(">")) {
            Pair<Path, String> pair = getPair(filter, ">", root);
            String value = pair.getValue();
            if(pair.getKey().getJavaType() == Date.class){
                Date date = DateUtil.dateTimeParse(pair.getValue());
                predicate = cb.greaterThan(pair.getKey(), date);
            }else {
                predicate = cb.greaterThan(pair.getKey(),value);
            }
        } else if (filter.contains("<=")) {
            Pair<Path, String> pair = getPair(filter, "<=", root);
            String value = pair.getValue();
            if(pair.getKey().getJavaType() == Date.class){
                Date date = DateUtil.dateTimeParse(pair.getValue());
                predicate = cb.lessThanOrEqualTo(pair.getKey(), date);
            }else {
                predicate = cb.lessThanOrEqualTo(pair.getKey(),value);
            }
        } else if (filter.contains("<")) {
            Pair<Path, String> pair = getPair(filter, "<", root);
            String value = pair.getValue();
            if(pair.getKey().getJavaType() == Date.class){
                Date date =DateUtil.dateTimeParse(pair.getValue());
                predicate = cb.lessThan(pair.getKey(), date);
            }else {
                predicate = cb.lessThan(pair.getKey(),value);
            }
        } else if (filter.contains("=")) {
            Pair<Path, String> pair = getPair(filter, "=", root);
            Set<Object> values = new HashSet<>();
            for (String value : pair.getValue().split(",")) {
                if (pair.getKey().getJavaType().isEnum()) {
                    values.add(Enum.valueOf(pair.getKey().getJavaType(), value));
                } else if (pair.getKey().getJavaType().equals(Boolean.class) ||
                        pair.getKey().getJavaType().equals(Boolean.TYPE)) {
                    values.add(Boolean.valueOf(value));
                } else if(pair.getKey().getJavaType() == Date.class){
                    Date date = DateUtil.dateTimeParse(pair.getValue());
                    values.add(date);
                }else {
                    values.add(value);
                }
            }
            predicate = pair.getKey().in(values);
        }
        return predicate;
    }
    protected Pair<Path, String> getPair(String filter, String splitter, Root<T> root) {
        String[] tokens = filter.split(splitter);
        return new Pair<>(root.get(tokens[0]), tokens[1]);
    }
}

+ 63 - 0
common-data-mysql-starter/src/main/java/com/yihu/base/mysql/query/UpdatePstCallback.java

@ -0,0 +1,63 @@
package com.yihu.base.mysql.query;
import javafx.util.Pair;
import org.springframework.dao.DataAccessException;
import org.springframework.jdbc.core.PreparedStatementCallback;
import java.lang.reflect.Type;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.util.List;
/**
 * @author lincl
 * @version 1.0
 * @created 2016/5/6
 */
public class UpdatePstCallback implements PreparedStatementCallback<Integer> {
    List<Pair<Type, Object>> values;
    public UpdatePstCallback(List<Pair<Type, Object>> values){
        this.values = values;
    }
    @Override
    public Integer doInPreparedStatement(PreparedStatement preparedStatement) throws SQLException, DataAccessException {
        //设参
        setParams(preparedStatement);
        //执行语句
        preparedStatement.executeUpdate();
        //获取id
        int key = getKey(preparedStatement);
        //关闭
        preparedStatement.close();
        return key;
    }
    private int getKey(PreparedStatement preparedStatement) throws SQLException {
        int autoIncKeyFromApi = -1;
        ResultSet rs = preparedStatement.getGeneratedKeys();
        if (rs.next()) {
            autoIncKeyFromApi = rs.getInt(1);
        }
        rs.close();
        rs = null;
        return autoIncKeyFromApi;
    }
    public PreparedStatement setParams(PreparedStatement pst) throws SQLException {
        int i=1;
        for(Pair<Type, Object> pair : values){
            pst.setObject(i, pair.getValue());
            i++;
        }
        return pst;
    }
}

+ 22 - 0
common-data-redis-starter/pom.xml

@ -0,0 +1,22 @@
<?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.0.0"
         xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
         xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
    <modelVersion>4.0.0</modelVersion>
    <parent>
        <groupId>com.yihu.jw</groupId>
        <artifactId>jkzl-start</artifactId>
        <version>1.0.0</version>
        <relativePath>../pom.xml</relativePath>
    </parent>
    <artifactId>common-data-redis-starter</artifactId>
    <version>1.0.0</version>
    <dependencies>
        <dependency>
            <groupId>org.springframework.boot</groupId>
            <artifactId>spring-boot-starter-data-redis</artifactId>
        </dependency>
    </dependencies>
</project>

+ 15 - 0
common-data-redis-starter/src/main/resources/template.yml

@ -0,0 +1,15 @@
spring:
  redis:
    host: 10.95.22.142 # ip
    port: 6380 # R端口
    password: jkzlehr #密码
    database: 0 # 默认使用DB0
    timeout: 0 # 连接超时时间(毫秒)
      #sentinel:
      #  master: # Name of Redis server.
      #  nodes: # Comma-separated list of host:port pairs.
    pool: ##连接池配置
      max-active: 8 # 连接池最大连接数(使用负值表示没有限制)
      max-idle: 8 # 连接池中的最大空闲连接
      max-wait: -1 # 连接池最大阻塞等待时间(使用负值表示没有限制)
      min-idle: 1 # 连接池中的最小空闲连接

+ 37 - 0
common-data-solr-starter/pom.xml

@ -0,0 +1,37 @@
<?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.0.0"
         xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
         xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
    <modelVersion>4.0.0</modelVersion>
    <parent>
        <groupId>com.yihu.jw</groupId>
        <artifactId>jkzl-start</artifactId>
        <version>1.0.0</version>
        <relativePath>../pom.xml</relativePath>
    </parent>
    <artifactId>common-data-solr-starter</artifactId>
    <version>1.0.0</version>
    <dependencies>
        <dependency>
            <groupId>joda-time</groupId>
            <artifactId>joda-time</artifactId>
        </dependency>
        <dependency>
            <groupId>org.apache.commons</groupId>
            <artifactId>commons-lang3</artifactId>
        </dependency>
        <dependency>
            <groupId>org.apache.solr</groupId>
            <artifactId>solr-core</artifactId>
        </dependency>
        <dependency>
            <groupId>org.apache.solr</groupId>
            <artifactId>solr-solrj</artifactId>
        </dependency>
        <dependency>
            <groupId>org.springframework.data</groupId>
            <artifactId>spring-data-solr</artifactId>
        </dependency>
    </dependencies>
</project>

+ 133 - 0
common-data-solr-starter/src/main/java/com/yihu/base/SolrAdmin.java

@ -0,0 +1,133 @@
package com.yihu.base;
import org.apache.commons.collections.map.HashedMap;
import org.apache.solr.client.solrj.SolrClient;
import org.apache.solr.client.solrj.SolrQuery;
import org.apache.solr.client.solrj.response.QueryResponse;
import org.apache.solr.client.solrj.response.UpdateResponse;
import org.apache.solr.common.SolrDocument;
import org.apache.solr.common.SolrDocumentList;
import org.apache.solr.common.SolrInputDocument;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.config.ConfigurableBeanFactory;
import org.springframework.context.annotation.Scope;
import org.springframework.stereotype.Service;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
/**
 * Solr底层操作类
 *
 * @author hzp
 * @version 1.0
 * @created 2017.05.06
 */
@Service
@Scope(ConfigurableBeanFactory.SCOPE_PROTOTYPE)
public class SolrAdmin {
    @Autowired
    SolrPool pool;
    /************************* 基础操作 **************************************************/
    /**
     * 新建单条索引
     */
    public Boolean create(String core,Map<String, Object> map) throws Exception {
        SolrClient client = pool.getConnection(core);
        SolrInputDocument doc = new SolrInputDocument();
        //注意date的格式,要进行适当的转化
        for(String key:map.keySet())
        {
            doc.addField(key, map.get(key));
        }
        UpdateResponse re = client.add(doc);
        client.commit();
        pool.close(core); //释放连接
        if(re.getStatus()!=0) {
            System.out.print("create index cost " + re.getQTime());
            return true;
        }
        else{
            System.out.print("create index faild!");
            return false;
        }
    }
    /**
     * 修改单条索引单字段
     */
    public Boolean update(String core,String uniqueKey,String uniqueKeyValue,String key,Object value) throws Exception {
        Map<String,Object> map = new HashedMap();
        map.put(key,value);
        return update(core,uniqueKey+":"+uniqueKeyValue,map);
    }
    /**
     * 修改索引多字段
     */
    public Boolean update(String core,String keyQuery,Map<String, Object> map) throws Exception {
        SolrClient client = pool.getConnection(core);
        QueryResponse qr = client.query(new SolrQuery(keyQuery));
        SolrDocumentList docs = qr.getResults();
        if(docs!=null && docs.size()>0)
        {
            List<SolrInputDocument> solrList = new ArrayList<>();
            for(int i=0;i<docs.size();i++)
            {
                SolrDocument doc = docs.get(i);
                SolrInputDocument newItem = new SolrInputDocument();
                newItem.addField("rowkey",doc.get("rowkey"));
                for(String key :map.keySet())
                {
                    newItem.addField(key,map.get(key));
                }
                solrList.add(newItem);
            }
            UpdateResponse re = client.add(solrList);
            client.commit();
            pool.close(core); //释放连接
            if(re.getStatus()!=0) {
                System.out.print("update index cost " + re.getQTime());
                return true;
            }
            else{
                System.out.print("update index faild!");
                return false;
            }
        }
        else{
            System.out.print("Null result!");
        }
        return true;
    }
    /**
     * 删除单条索引
     */
    public Boolean delete(String core,String keyQuery) throws Exception {
        SolrClient client = pool.getConnection(core);
        UpdateResponse de = client.deleteByQuery(keyQuery);
        client.commit();
        pool.close(core); //释放连接
        if(de.getStatus()!=0) {
            System.out.print("delete index cost " + de.getQTime());
            return true;
        }
        else{
            System.out.print("delete index faild!");
            return false;
        }
    }
}

+ 31 - 0
common-data-solr-starter/src/main/java/com/yihu/base/SolrContext.java

@ -0,0 +1,31 @@
package com.yihu.base;
import org.apache.solr.client.solrj.SolrClient;
import org.apache.solr.client.solrj.impl.CloudSolrClient;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.data.solr.core.SolrTemplate;
/**
 * Solr配置。仅支持SolrCloud,不支持单核模式。
 *
 * @author Sand
 * @version 1.0
 * @created 2016.04.18 18:47
 */
@Configuration
public class SolrContext {
    @Value("${spring.data.solr.zk-host}")
    String zkHost;
    @Bean
    public SolrClient solrClient() {
        return new CloudSolrClient(zkHost);
    }
    @Bean
    public SolrTemplate solrTemplate(SolrClient solrClient) throws Exception {
        return new SolrTemplate(solrClient);
    }
}

+ 416 - 0
common-data-solr-starter/src/main/java/com/yihu/base/SolrHelper.java

@ -0,0 +1,416 @@
package com.yihu.base;
import org.apache.commons.lang3.StringUtils;
import org.apache.solr.client.solrj.SolrClient;
import org.apache.solr.client.solrj.SolrQuery;
import org.apache.solr.client.solrj.response.*;
import org.apache.solr.common.SolrDocumentList;
import org.apache.solr.common.params.FacetParams;
import org.apache.solr.common.util.NamedList;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.config.ConfigurableBeanFactory;
import org.springframework.context.annotation.Scope;
import org.springframework.stereotype.Service;
import java.util.Date;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
/**
 * Solr底层查询类
 *
 * @author hzp
 * @version 1.0
 * @created 2016.04.26
 */
@Service
@Scope(ConfigurableBeanFactory.SCOPE_PROTOTYPE)
public class SolrHelper {
    @Autowired
    SolrPool pool;
    private final static String ASC = "asc";
    /**
     * 获取查询耗时
     */
    private int qtime;
    private int getQtime() {
        return qtime;
    }
    /************************** 查询操作 *****************************************************/
    /**
     * 简单查询方法
     */
    public SolrDocumentList query(String tablename, String q, Map<String, String> sort, long start, long rows) throws Exception {
        return query(tablename, q, null, sort, start, rows);
    }
    /**
     * Solr查询方法
     *
     * @param q     查询字符串
     * @param fq    过滤查询
     * @param sort  过滤条件
     * @param start 查询起始行
     * @param rows  查询行数
     * @return
     */
    public SolrDocumentList query(String core, String q, String fq, Map<String, String> sort, long start, long rows) throws Exception {
        SolrClient conn = pool.getConnection(core);
        SolrQuery query = new SolrQuery();
        if (null != q && !q.equals("")) //设置查询条件
        {
            query.setQuery(q);
        } else {
            query.setQuery("*:*");
        }
        if (null != fq && !fq.equals("")) //设置过滤条件
        {
            query.setFilterQueries(fq);
        }
        query.setStart(Integer.parseInt(String.valueOf(start)));//设置查询起始行
        query.setRows(Integer.parseInt(String.valueOf(rows)));//设置查询行数
        //设置排序
        if (sort != null) {
            for (Object co : sort.keySet()) {
                if (ASC == sort.get(co).toLowerCase() || ASC.equals(sort.get(co).toLowerCase())) {
                    query.addSort(co.toString(), SolrQuery.ORDER.asc);
                } else {
                    query.addSort(co.toString(), SolrQuery.ORDER.desc);
                }
            }
        }
        QueryResponse rsp = conn.query(query);
        qtime = rsp.getQTime();
        System.out.print("Solr Query Time:" + qtime);
        SolrDocumentList docs = rsp.getResults();
        pool.close(core); //释放连接
        return docs;
    }
    /******************************* Count 统计 ***********************************************/
    /**
     * 总数查询方法
     */
    public long count(String core, String q) throws Exception {
        return count(core, q, null);
    }
    /**
     * 总数查询方法
     */
    public long count(String core, String q, String fq) throws Exception {
        SolrClient conn = pool.getConnection(core);
        SolrQuery query = new SolrQuery();
        if (null != q && !q.equals("")) //设置查询条件
        {
            query.setQuery(q);
        } else {
            query.setQuery("*:*");
        }
        if (null != fq && !fq.equals("")) //设置过滤条件
        {
            query.setFilterQueries(fq);
        }
        query.setStart(0);
        query.setRows(0);
        QueryResponse rsp = conn.query(query);
        Integer start =  (int)rsp.getResults().getNumFound();
        query.setStart(start);
        rsp = conn.query(query);
        qtime = rsp.getQTime();
        System.out.print("Solr Count Time:" + qtime);
        SolrDocumentList docs = rsp.getResults();
        pool.close(core);
        return docs.getNumFound();
    }
    /**
     * 单组分组Count统计(start从0开始)
     */
    public Map<String, Long> groupCount(String core, String q, String fq, String groupField, int start, int rows) throws Exception {
        SolrClient conn = pool.getConnection(core);
        SolrQuery query = new SolrQuery();
        if (null != q && !q.equals("")) //设置查询条件
        {
            query.setQuery(q);
        } else {
            query.setQuery("*:*");
        }
        if (null != fq && !fq.equals("")) //设置过滤条件
        {
            query.setFilterQueries(fq);
        }
        query.setFacet(true);//设置facet=on
        query.setRows(0);
        query.addFacetField(groupField);
        query.setFacetLimit(rows);//限制每次返回结果数
        query.set(FacetParams.FACET_OFFSET, start);
        query.setFacetMissing(false);//不统计null的值
        query.setFacetMinCount(0);// 设置返回的数据中每个分组的数据最小值,比如设置为0,则统计数量最小为0,不然不显示
        QueryResponse rsp = conn.query(query);
        List<FacetField.Count> countList = rsp.getFacetField(groupField).getValues();
        qtime = rsp.getQTime();
        System.out.print("Solr Group Time:" + qtime);
        Map<String, Long> rmap = new HashMap<String, Long>();
        for (FacetField.Count count : countList) {
            if (count.getCount() > 0)
                rmap.put(count.getName(), (long) count.getCount());
        }
        pool.close(core);
        return rmap;
    }
    /**
     * 多组分组Count(独立计算)
     */
    public List<FacetField> groupCount(String core, String q, String fq, String[] groups) throws Exception {
        SolrClient conn = pool.getConnection(core);
        SolrQuery query = new SolrQuery();
        if (null != q && !q.equals("")) //设置查询条件
        {
            query.setQuery(q);
        } else {
            query.setQuery("*:*");
        }
        if (null != fq && !fq.equals("")) //设置过滤条件
        {
            query.setFilterQueries(fq);
        }
        query.setFacet(true);//设置facet=on
        query.setRows(0);
        query.addFacetField(groups);
        query.setFacetLimit(1000);//限制每次返回结果数
        query.set(FacetParams.FACET_OFFSET, 0);
        query.setFacetMissing(true);//不统计null的值
        query.setFacetMinCount(0);// 设置返回的数据中每个分组的数据最小值,比如设置为0,则统计数量最小为0,不然不显示
        QueryResponse rsp = conn.query(query);
        qtime = rsp.getQTime();
        System.out.print("Solr Group Time:" + qtime);
        List<FacetField> facets = rsp.getFacetFields();
        pool.close(core);
        return facets;
    }
    /**
     * 多组分组Count统计(关联计算)
     *
     * @return
     */
    public List<PivotField> groupCountMult(String core, String q, String fq, String groupFields, int start, int rows) throws Exception {
        SolrClient conn = pool.getConnection(core);
        SolrQuery query = new SolrQuery();
        if (null != q && !q.equals("")) //设置查询条件
        {
            query.setQuery(q);
        } else {
            query.setQuery("*:*");
        }
        if (null != fq && !fq.equals("")) //设置过滤条件
        {
            query.setFilterQueries(fq);
        }
        query.setFacet(true);//设置facet=on
        query.setRows(0);
        query.addFacetPivotField(groupFields);
        query.setFacetLimit(rows);//限制每次返回结果数
        query.setFacetMissing(false);//不统计null的值
        query.setFacetMinCount(0);// 设置返回的数据中每个分组的数据最小值,比如设置为0,则统计数量最小为0,不然不显示
        QueryResponse rsp = conn.query(query);
        qtime = rsp.getQTime();
        System.out.print("Solr Group Time:" + qtime);
        NamedList<List<PivotField>> namedList = rsp.getFacetPivot();
        pool.close(core);
        if (namedList != null && namedList.size() > 0) {
            return namedList.getVal(0);
        } else
            return null;
    }
    /**************************** 数值统计 ******************************************/
    /**
     * 分组数值统计
     *
     * @param core       表名
     * @param q          查询条件
     * @param statsField 统计字段
     * @param groupField 分组字段
     * @return
     */
    public List<FieldStatsInfo> getStats(String core, String q, String fq, String statsField, String groupField) throws Exception {
        SolrClient conn = pool.getConnection(core);
        SolrQuery query = new SolrQuery();
        if (null != q && !q.equals("")) //设置查询条件
        {
            query.setQuery(q);
        } else {
            query.setQuery("*:*");
        }
        if (null != fq && !fq.equals("")) //设置过滤条件
        {
            query.setFilterQueries(fq);
        }
        query.addGetFieldStatistics(statsField);
        query.addStatsFieldFacets(statsField, groupField);
        query.setRows(0);
        QueryResponse rsp = conn.query(query);
        qtime = rsp.getQTime();
        System.out.print("Solr Stats Time:" + qtime);
        Map<String, FieldStatsInfo> stats = rsp.getFieldStatsInfo();
        pool.close(core);
        if (stats != null && stats.size() > 0) {
            Map<String, List<FieldStatsInfo>> map = stats.get(statsField).getFacets();
            if (map != null) {
                return map.get(groupField);
            }
        }
        return null;
    }
    /**
     * 查询统计
     *
     * @param core       表名
     * @param facetQuery 查询条件
     * @return
     * @throws Exception
     */
    public Map<String, Integer> getFacetQuery(String core, String facetQuery) throws Exception {
        SolrClient conn = pool.getConnection(core);
        SolrQuery query = new SolrQuery();
        query.setQuery("*:*");
        query.setFacet(true);
        query.addFacetQuery(facetQuery);
        QueryResponse resp = conn.query(query);
        return resp.getFacetQuery();
    }
    /**
     * 单字段分组统计
     *
     * @param core
     * @param facetField
     * @param fq
     * @param minCount
     * @param start
     * @param limit
     * @param missing
     * @return
     * @throws Exception
     */
    public FacetField getFacetField(String core, String facetField, String fq, int minCount, int start, int limit, boolean missing) throws Exception {
        SolrClient conn = pool.getConnection(core);
        SolrQuery query = new SolrQuery();
        query.setQuery("*:*");
        if (!StringUtils.isBlank(fq)) {
            query.setFilterQueries(fq);
        }
        query.setStart(start)
                .setRows(0)
                .setFacet(true)
                .addFacetField(facetField)
                .setFacetMinCount(minCount)
                .setFacetLimit(limit)
                .setFacetMissing(missing);
        QueryResponse resp = conn.query(query);
        return resp.getFacetField(facetField);
    }
    /**
     * 日期范围分组统计
     *
     * @param core
     * @param dateField
     * @param startTime
     * @param endTime
     * @param grap
     * @param fq
     * @return
     * @throws Exception
     */
    public List<RangeFacet> getFacetDateRange(String core, String dateField, Date startTime, Date endTime, String grap, String fq) throws Exception {
        SolrClient conn = pool.getConnection(core);
        SolrQuery query = new SolrQuery();
        query.setQuery("*:*");
        if (!StringUtils.isBlank(fq)) {
            query.setFilterQueries(fq);
        }
        query.setRows(0)
               .setFacet(true)
               .addDateRangeFacet(dateField, startTime, endTime, grap);
        QueryResponse resp = conn.query(query);
        return resp.getFacetRanges();
    }
    /**
     * 数值型字段范围统计
     *
     * @param core
     * @param numField
     * @param start
     * @param end
     * @param grap
     * @param fq
     * @return
     * @throws Exception
     */
    public List<RangeFacet> getFacetNumRange(String core, String numField, int start, int end, int grap, String fq) throws Exception {
        SolrClient conn = pool.getConnection(core);
        SolrQuery query = new SolrQuery();
        query.setQuery("*:*");
        if (!StringUtils.isBlank(fq)) {
            query.setFilterQueries(fq);
        }
        query.setRows(0)
                .setFacet(true)
                .addNumericRangeFacet(numField, start, end, grap);
        QueryResponse resp = conn.query(query);
        return resp.getFacetRanges();
    }
}

+ 52 - 0
common-data-solr-starter/src/main/java/com/yihu/base/SolrPool.java

@ -0,0 +1,52 @@
package com.yihu.base;
import org.apache.solr.client.solrj.SolrClient;
import org.apache.solr.client.solrj.impl.CloudSolrClient;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.beans.factory.config.ConfigurableBeanFactory;
import org.springframework.context.annotation.Scope;
import org.springframework.data.solr.server.support.MulticoreSolrClientFactory;
import org.springframework.stereotype.Service;
/**
 * Solr连接池
 * @author hzp
 * @version 1.0
 * @created 2016.04.26
 */
@Service
@Scope(ConfigurableBeanFactory.SCOPE_SINGLETON)
public class SolrPool {
    @Value("${spring.data.solr.zk-host}")
    String zkHost;
    private MulticoreSolrClientFactory factory;
    protected MulticoreSolrClientFactory getFactory(){
        if(factory==null)
        {
            CloudSolrClient client = new CloudSolrClient(zkHost);
            factory = new MulticoreSolrClientFactory(client);
        }
        return factory;
    }
    /**
     * 获取连接
     */
    public SolrClient getConnection(String core) throws Exception{
        return getFactory().getSolrClient(core);
    }
    /**
     * 关闭连接
     */
    public void close(String core) throws Exception{
        getFactory().removeSolrClient(core);
    }
}

+ 4 - 0
common-data-solr-starter/src/main/resources/template.yml

@ -0,0 +1,4 @@
spring:
  data:
    solr:
      zk-host: node1,node2,node3:2181/solr # ZooKeeper host address in the form HOST:PORT.

+ 31 - 0
common-logback-starter/pom.xml

@ -0,0 +1,31 @@
<?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.0.0"
         xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
         xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
    <modelVersion>4.0.0</modelVersion>
    <parent>
        <groupId>com.yihu.jw</groupId>
        <artifactId>jkzl-start</artifactId>
        <version>1.0.0</version>
        <relativePath>../pom.xml</relativePath>
    </parent>
    <groupId>com.yihu.base</groupId>
    <artifactId>common-logback-starter</artifactId>
    <version>1.0.0</version>
    <dependencies>
        <dependency>
            <groupId>ch.qos.logback</groupId>
            <artifactId>logback-classic</artifactId>
        </dependency>
        <dependency>
            <groupId>ch.qos.logback</groupId>
            <artifactId>logback-core</artifactId>
        </dependency>
        <dependency>
            <groupId>ch.qos.logback</groupId>
            <artifactId>logback-access</artifactId>
        </dependency>
    </dependencies>
</project>

+ 34 - 0
common-logback-starter/src/main/resources/consoleAppender_logback_demo.xml

@ -0,0 +1,34 @@
<?xml version="1.0" encoding="UTF-8"?>
<!-- 这个是根配置文件,一定要有的
    scan:是当配置文件被修改后会被重新加载
    scanPeriod:设置监测配置文件是否有修改的时间间隔,如果没有给出时间单位,默认单位是毫秒。当scan为true时,此属性生效。默认的时间间隔为1分钟。
    debug:当此属性设置为true时,将打印出logback内部日志信息,实时查看logback运行状态。默认值为false。
-->
<configuration scan="true" scanPeriod="6000" debug="false">
    <root>
        <level value="INFO" />
    </root>
    <!--
    name:logger的名称
    level:输出级别是INFO
    additivity属性为false,表示此loger的打印信息不再向上级传递,是否继承父类的日志级别
    -->
    <logger name="demo1" level="INFO" additivity="false" >
        <appender-ref ref="dailyRollingFileAppender" />
    </logger>
    <!-- 演示按时间滚动的策略 -->
    <appender name="dailyRollingFileAppender" class="ch.qos.logback.core.rolling.RollingFileAppender">
        <File>${logbase}/usercenter.log</File>
        <rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
            <!-- daily rollover -->
            <FileNamePattern>${logbase}/dailyRollingFileAppender.%d{yyyy-MM-dd}.log</FileNamePattern>
            <!-- 保留 30天数据,默认无限-->
            <maxHistory>30</maxHistory>
        </rollingPolicy>
        <encoder>
            <Pattern>%d{HH:mm:ss.SSS} [%thread] %-5level %logger{35} - %msg %n</Pattern>
        </encoder>
    </appender>
</configuration>

+ 28 - 0
common-logback-starter/src/main/resources/dailyRollingFileAppender_logback_demo.xml

@ -0,0 +1,28 @@
<?xml version="1.0" encoding="UTF-8"?>
<!-- 这个是根配置文件,一定要有的
    scan:是当配置文件被修改后会被重新加载
    scanPeriod:设置监测配置文件是否有修改的时间间隔,如果没有给出时间单位,默认单位是毫秒。当scan为true时,此属性生效。默认的时间间隔为1分钟。
    debug:当此属性设置为true时,将打印出logback内部日志信息,实时查看logback运行状态。默认值为false。
-->
<configuration scan="true" scanPeriod="6000" debug="false">
    <root>
        <level value="INFO"/>
    </root>
    <!--
    name:logger的名称
    level:输出级别是INFO
    additivity属性为false,表示此loger的打印信息不再向上级传递,是否继承父类的日志级别
    -->
    <logger name="demo1" level="INFO" additivity="false">
        <appender-ref ref="dailyRollingFileAppender"/>
    </logger>
    <!-- 输出到控制面板 -->
    <appender name="STDOUT" class="ch.qos.logback.core.ConsoleAppender">
        <encoder>
            <pattern>%-4relative [%thread] %-5level %logger{35} - %msg %n</pattern>
        </encoder>
    </appender>
</configuration>

+ 35 - 0
common-quartz-starter/pom.xml

@ -0,0 +1,35 @@
<?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.0.0"
         xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
         xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
    <modelVersion>4.0.0</modelVersion>
    <parent>
        <groupId>com.yihu.jw</groupId>
        <artifactId>jkzl-start</artifactId>
        <version>1.0.0</version>
        <relativePath>../pom.xml</relativePath>
    </parent>
    <artifactId>common-quartz-starter</artifactId>
    <version>1.0.0</version>
    <dependencies>
        <dependency>
            <groupId>org.springframework</groupId>
            <artifactId>spring-context</artifactId>
        </dependency>
        <dependency>
            <groupId>org.springframework</groupId>
            <artifactId>spring-context-support</artifactId>
        </dependency>
        <dependency>
            <groupId>org.quartz-scheduler</groupId>
            <artifactId>quartz</artifactId>
        </dependency>
        <dependency>
            <groupId>org.quartz-scheduler</groupId>
            <artifactId>quartz-jobs</artifactId>
        </dependency>
    </dependencies>
</project>

+ 97 - 0
common-quartz-starter/readme.MD

@ -0,0 +1,97 @@
**项目必须是springboot工程
quartz 版本 2.3.0**
# 如何在项目中使用?
## 1.依赖common-quartz工程``
```
        <dependency>
            <groupId>org.springframework</groupId>
            <artifactId>spring-context</artifactId>
            <version>${版本以项目中最新的版本为主}</version>
        </dependency>
        
```
## 2.在项目yml添加配置
    quartz:
      namespace: svr-demo ##quartz的命名空间,名称一样实现消费负载
      overwriteExistingJobs: true ##是否覆盖job
      
      
## 3.因为quartz使用的是数据库保存job的方式,所以spring Bean容器中必须要有javax.sql.DataSource数据源对象.同时库中要存在对应的表(建表sql在common-quartz 项目中)
## 4.以上配置均确认没问题后,可以在项目中用spring注入QuartzHelper工具类.
    
    @Autowired
    private QuartzHelper quartzHelper;
    
    
##     QuartzHelper方法说明
          根据cronString表达式添加任务到quartz框架中
        public void addJob(
            Class jobClass,  job类
            String cronString,    cronString表达式
            String jobKey, 任务ID
            Map<String, Object> params 需要传递的参数
            ) throws Exception {
            
        }
        
         删除quartz框架中任务
        public void removeJob(
            String jobKeyString  任务ID
        ) throws Exception {
        }
        
         根据任务ID判断任务是否存在
        public boolean isExistJob(
            String jobKey 任务ID
        ) throws SchedulerException {
          
        }
        
        立即执行任务。
        public void startNow(
            Class jobClass,  job类
            String id,  任务ID
            Map<String, Object> params 需要传递的参数
        ) throws Exception {
        }
        
        在指定时间点执行。
        public void startAt(
            Date time,  时间
            Class jobClass,job类
            String id, 任务ID
            Map<String, Object> params 需要传递的参数
        ) throws Exception {
        }
        
        
###         1.新增一个类实现 org.quartz.job接口
        例如:   
        @Component
        @Scope("prototype")
        @DisallowConcurrentExecution//防止到了执行时间点前一任务还在执行中,但是这时有空闲的线程,那么马上又会执行,这样一来就会存在同一job被并行执行
        public class FollowUpJob implements Job {
            @Autowired
            private WlyyQuotaResultDao wlyyQuotaResultDao;//指标结果Dao
            @Override
            public void execute(JobExecutionContext context)
                    throws JobExecutionException {
            }
        }
        
        2.在方法中注入QuartzHelper对象并且把例子中的任务添加到quartz框架中  
        
        
        quartzHelper.startNow(FollowUpJob.class, "followUpjobID", params);
    
    

+ 25 - 0
common-quartz-starter/src/main/java/com/yihu/base/config/quartz/DefaultJobFactory.java

@ -0,0 +1,25 @@
package com.yihu.base.config.quartz;
import org.quartz.spi.TriggerFiredBundle;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.config.AutowireCapableBeanFactory;
import org.springframework.scheduling.quartz.AdaptableJobFactory;
import org.springframework.stereotype.Component;
/**
 * Created by Administrator on 2016.10.12.
 * 為了讓quartz種可以使用Spring的注入
 */
@Component("jobFactory")
public class DefaultJobFactory extends AdaptableJobFactory {
    @Autowired
    private AutowireCapableBeanFactory capableBeanFactory;
    @Override
    protected Object createJobInstance(TriggerFiredBundle bundle) throws Exception {
        // 调用父类的方法
        Object jobInstance = super.createJobInstance(bundle);
       // 进行注入
        capableBeanFactory.autowireBean(jobInstance);
        return jobInstance;
    }
}

+ 63 - 0
common-quartz-starter/src/main/java/com/yihu/base/config/quartz/DefaultSchedulerConfig.java

@ -0,0 +1,63 @@
package com.yihu.base.config.quartz;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.beans.factory.config.PropertiesFactoryBean;
import org.springframework.context.ApplicationContext;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.context.annotation.Primary;
import org.springframework.core.io.ClassPathResource;
import org.springframework.scheduling.quartz.SchedulerFactoryBean;
import javax.sql.DataSource;
import java.io.IOException;
import java.util.Properties;
/**
 * Created by chenweida on 2016/2/3.
 */
@Configuration
public class DefaultSchedulerConfig {
    @Autowired
    private ApplicationContext applicationContext;
    @Autowired
    private DefaultJobFactory jobFactory;
    @Autowired
    private DataSource dataSource;
    @Value("${quartz.namespace}")
    private String namspace = "default_namespace";
    @Value("${quartz.overwriteExistingJobs}")
    private Boolean OverwriteExistingJobs = true;
    @Bean("SchedulerFactoryBean")
    SchedulerFactoryBean schedulerFactoryBean() throws IOException {
        SchedulerFactoryBean bean = new SchedulerFactoryBean();
        bean.setJobFactory(jobFactory);
        bean.setApplicationContext(this.applicationContext);
        bean.setOverwriteExistingJobs(OverwriteExistingJobs);
        bean.setStartupDelay(20);// 延时启动
        bean.setAutoStartup(true);
        bean.setDataSource(dataSource);
        bean.setBeanName(namspace);
        bean.setQuartzProperties(quartzProperties());
        return bean;
    }
    /**
     * quartz配置文件
     *
     * @return
     * @throws IOException
     */
    @Bean
    public Properties quartzProperties() throws IOException {
        PropertiesFactoryBean propertiesFactoryBean = new PropertiesFactoryBean();
        propertiesFactoryBean.setLocation(new ClassPathResource("/quartz.properties"));
        propertiesFactoryBean.afterPropertiesSet();
        return propertiesFactoryBean.getObject();
    }
}

+ 110 - 0
common-quartz-starter/src/main/java/com/yihu/base/config/quartz/QuartzHelper.java

@ -0,0 +1,110 @@
package com.yihu.base.config.quartz;
import org.quartz.*;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.context.annotation.ComponentScan;
import org.springframework.scheduling.quartz.SchedulerFactoryBean;
import org.springframework.stereotype.Component;
import javax.annotation.PostConstruct;
import java.util.Date;
import java.util.Map;
import static org.quartz.SimpleScheduleBuilder.simpleSchedule;
@Component("quartzHelper")
public class QuartzHelper {
    @Autowired
    private SchedulerFactoryBean schedulerFactoryBean;
    private Scheduler scheduler = null;
    @PostConstruct
    public void init() {
        try {
            scheduler = schedulerFactoryBean.getScheduler();
            scheduler.start();
        } catch (SchedulerException e) {
            e.printStackTrace();
        }
    }
    public void addJob(Class jobClass, String cronString, String jobKey,
                       Map<String, Object> params) throws Exception {
        if (!CronExpression.isValidExpression(cronString)) {
            throw new Exception("cronExpression is not a valid Expression");
        }
        try {
            JobDetail job = JobBuilder.newJob(jobClass)
                    .withIdentity("job-id:" + jobKey, "job-group:" + jobKey)
                    .build();
            JobDataMap jobDataMap = job.getJobDataMap();
            jobDataMap.putAll(params);
            CronTrigger trigger = TriggerBuilder
                    .newTrigger()
                    .withIdentity("trigger-name:" + jobKey,
                            "trigger-group:" + jobKey)
                    .withSchedule(CronScheduleBuilder.cronSchedule(cronString))
                    .build();
            scheduler.scheduleJob(job, trigger);
            scheduler.start();
        } catch (SchedulerException e) {
            e.printStackTrace();
        }
    }
    public void removeJob(String jobKeyString) throws Exception {
        TriggerKey triggerKey = new TriggerKey("trigger-name:" + jobKeyString,
                "trigger-group:" + jobKeyString);
        JobKey jobName = new JobKey("job-group:" + jobKeyString, "job-id:"
                + jobKeyString);
        scheduler.pauseTrigger(triggerKey);// 停止触发器
        scheduler.unscheduleJob(triggerKey);// 移除触发器
        scheduler.deleteJob(jobName);// 删除任务
    }
    public boolean isExistJob(String jobKey) throws SchedulerException {
        JobKey jk = new JobKey("job-id:" + jobKey, "job-group:" + jobKey);
        if (scheduler.checkExists(jk)) {
            return true;
        } else {
            return false;
        }
    }
    /**
     * 立即执行任务。
     *
     * @param jobClass
     * @param id
     * @param params
     * @throws Exception
     */
    public void startNow(Class jobClass, String id, Map<String, Object> params) throws Exception {
        startAt(new Date(), jobClass, id, params);
    }
    /**
     * 在指定时间点执行。
     *
     * @param time
     * @param jobClass
     * @param id
     * @param params
     * @throws Exception
     */
    public void startAt(Date time, Class jobClass, String id, Map<String, Object> params) throws Exception {
        JobDetail job = JobBuilder.newJob(jobClass).
                withIdentity("job-id:" + id, "job-group:" + id)
                .build();
        JobDataMap jobDataMap = job.getJobDataMap();
        if(null != params) jobDataMap.putAll(params);
        SimpleTrigger trigger = TriggerBuilder.newTrigger().withIdentity("trigger-id:" + id, "group-group:" + id)
                .startAt(time)
                .withSchedule(simpleSchedule().withIntervalInSeconds(10).withRepeatCount(0).withMisfireHandlingInstructionFireNow())
                .build();
        scheduler.scheduleJob(job, trigger);
        scheduler.start();
    }
}

+ 3 - 0
common-quartz-starter/src/main/resources/demo.yml

@ -0,0 +1,3 @@
quartz:
  namespace: svr-demo ##quartz的命名空间,名称一样实现消费负载
  overwriteExistingJobs: true ##是否覆盖job

+ 41 - 0
common-quartz-starter/src/main/resources/quartz.properties

@ -0,0 +1,41 @@
# Default Properties file for use by StdSchedulerFactory
# to create a Quartz Scheduler Instance, if a different
# properties file is not explicitly specified.
#
 
org.quartz.scheduler.instanceName: DefaultQuartzScheduler
org.quartz.scheduler.rmi.export: false
org.quartz.scheduler.rmi.proxy: false
org.quartz.scheduler.wrapJobExecutionInUserTransaction: false
 
org.quartz.threadPool.class: org.quartz.simpl.SimpleThreadPool
org.quartz.threadPool.threadCount: 20
org.quartz.threadPool.threadPriority: 5
org.quartz.threadPool.threadsInheritContextClassLoaderOfInitializingThread: true
 
org.quartz.jobStore.misfireThreshold: 60000
 
#============================================================================
# Configure JobStore
#============================================================================
 
# RAM
# org.quartz.jobStore.class: org.quartz.simpl.RAMJobStore
# Configure JobStore Cluster
org.quartz.jobStore.class:org.quartz.impl.jdbcjobstore.JobStoreTX
org.quartz.jobStore.driverDelegateClass:org.quartz.impl.jdbcjobstore.StdJDBCDelegate
#datasource׺
org.quartz.jobStore.tablePrefix:QRTZ_
#org.quartz.jobStore.dataSource:qzDS
#
##============================================================================
## Configure Datasources
##============================================================================
##datasource
#org.quartz.dataSource.qzDS.driver: com.mysql.jdbc.Driver
#org.quartz.dataSource.qzDS.URL: jdbc:mysql://172.19.103.85/wlyy?useUnicode=true&characterEncoding=utf-8&autoReconnect=true
#org.quartz.dataSource.qzDS.user: root
#org.quartz.dataSource.qzDS.password: 123456
org.quartz.jobGroupName = RS_JOBGROUP_NAME
org.quartz.triggerGroupName = RS_TRIGGERGROUP_NAME

+ 184 - 0
common-quartz-starter/src/main/sql/QRTZ_BLOB_TRIGGERS.sql

@ -0,0 +1,184 @@
/*
Navicat MySQL Data Transfer
Source Server         : 厦门i健康-开发库
Source Server Version : 50629
Source Host           : 172.19.103.77:3306
Source Database       : wlyy
Target Server Type    : MYSQL
Target Server Version : 50629
File Encoding         : 65001
Date: 2017-11-03 10:10:41
*/
SET FOREIGN_KEY_CHECKS=0;
-- ----------------------------
-- Table structure for QRTZ_BLOB_TRIGGERS
-- ----------------------------
DROP TABLE IF EXISTS `QRTZ_BLOB_TRIGGERS`;
CREATE TABLE `qrtz_blob_triggers` (
  `SCHED_NAME` varchar(120) NOT NULL,
  `TRIGGER_NAME` varchar(100) NOT NULL,
  `TRIGGER_GROUP` varchar(100) NOT NULL,
  `BLOB_DATA` blob,
  PRIMARY KEY (`SCHED_NAME`,`TRIGGER_NAME`,`TRIGGER_GROUP`)
) ENGINE=InnoDB DEFAULT CHARSET=utf8 ROW_FORMAT=COMPACT;
-- ----------------------------
-- Table structure for QRTZ_CALENDARS
-- ----------------------------
DROP TABLE IF EXISTS `QRTZ_CALENDARS`;
CREATE TABLE `qrtz_calendars` (
  `SCHED_NAME` varchar(120) NOT NULL,
  `CALENDAR_NAME` varchar(200) NOT NULL,
  `CALENDAR` blob NOT NULL,
  PRIMARY KEY (`SCHED_NAME`,`CALENDAR_NAME`)
) ENGINE=InnoDB DEFAULT CHARSET=utf8 ROW_FORMAT=COMPACT;
-- ----------------------------
-- Table structure for QRTZ_CRON_TRIGGERS
-- ----------------------------
DROP TABLE IF EXISTS `QRTZ_CRON_TRIGGERS`;
CREATE TABLE `qrtz_cron_triggers` (
  `SCHED_NAME` varchar(120) NOT NULL,
  `TRIGGER_NAME` varchar(200) NOT NULL,
  `TRIGGER_GROUP` varchar(200) NOT NULL,
  `CRON_EXPRESSION` varchar(200) NOT NULL,
  `TIME_ZONE_ID` varchar(80) DEFAULT NULL,
  PRIMARY KEY (`SCHED_NAME`,`TRIGGER_NAME`,`TRIGGER_GROUP`(190))
) ENGINE=InnoDB DEFAULT CHARSET=utf8 ROW_FORMAT=COMPACT;
-- ----------------------------
-- Table structure for QRTZ_FIRED_TRIGGERS
-- ----------------------------
DROP TABLE IF EXISTS `QRTZ_FIRED_TRIGGERS`;
CREATE TABLE `qrtz_fired_triggers` (
  `SCHED_NAME` varchar(120) NOT NULL,
  `ENTRY_ID` varchar(95) NOT NULL,
  `TRIGGER_NAME` varchar(200) NOT NULL,
  `TRIGGER_GROUP` varchar(200) NOT NULL,
  `INSTANCE_NAME` varchar(200) NOT NULL,
  `FIRED_TIME` bigint(13) NOT NULL,
  `PRIORITY` int(11) NOT NULL,
  `STATE` varchar(16) NOT NULL,
  `JOB_NAME` varchar(200) DEFAULT NULL,
  `JOB_GROUP` varchar(200) DEFAULT NULL,
  `IS_NONCONCURRENT` varchar(1) DEFAULT NULL,
  `REQUESTS_RECOVERY` varchar(1) DEFAULT NULL,
  `SCHED_TIME` bigint(13) DEFAULT NULL,
  PRIMARY KEY (`SCHED_NAME`,`ENTRY_ID`)
) ENGINE=InnoDB DEFAULT CHARSET=utf8 ROW_FORMAT=COMPACT;
-- ----------------------------
-- Table structure for QRTZ_JOB_DETAILS
-- ----------------------------
DROP TABLE IF EXISTS `QRTZ_JOB_DETAILS`;
CREATE TABLE `qrtz_job_details` (
  `SCHED_NAME` varchar(120) NOT NULL,
  `JOB_NAME` varchar(200) NOT NULL,
  `JOB_GROUP` varchar(200) NOT NULL,
  `DESCRIPTION` varchar(250) DEFAULT NULL,
  `JOB_CLASS_NAME` varchar(250) NOT NULL,
  `IS_DURABLE` varchar(1) NOT NULL,
  `IS_NONCONCURRENT` varchar(1) NOT NULL,
  `IS_UPDATE_DATA` varchar(1) NOT NULL,
  `REQUESTS_RECOVERY` varchar(1) NOT NULL,
  `JOB_DATA` blob,
  PRIMARY KEY (`SCHED_NAME`,`JOB_NAME`,`JOB_GROUP`(150))
) ENGINE=InnoDB DEFAULT CHARSET=utf8 ROW_FORMAT=COMPACT;
-- ----------------------------
-- Table structure for QRTZ_LOCKS
-- ----------------------------
DROP TABLE IF EXISTS `QRTZ_LOCKS`;
CREATE TABLE `qrtz_locks` (
  `SCHED_NAME` varchar(120) NOT NULL,
  `LOCK_NAME` varchar(40) NOT NULL,
  PRIMARY KEY (`SCHED_NAME`,`LOCK_NAME`)
) ENGINE=InnoDB DEFAULT CHARSET=utf8 ROW_FORMAT=COMPACT;
-- ----------------------------
-- Table structure for QRTZ_PAUSED_TRIGGER_GRPS
-- ----------------------------
DROP TABLE IF EXISTS `QRTZ_PAUSED_TRIGGER_GRPS`;
CREATE TABLE `qrtz_paused_trigger_grps` (
  `SCHED_NAME` varchar(120) NOT NULL,
  `TRIGGER_GROUP` varchar(200) NOT NULL,
  PRIMARY KEY (`SCHED_NAME`,`TRIGGER_GROUP`)
) ENGINE=InnoDB DEFAULT CHARSET=utf8 ROW_FORMAT=COMPACT;
-- ----------------------------
-- Table structure for QRTZ_SCHEDULER_STATE
-- ----------------------------
DROP TABLE IF EXISTS `QRTZ_SCHEDULER_STATE`;
CREATE TABLE `qrtz_scheduler_state` (
  `SCHED_NAME` varchar(120) NOT NULL,
  `INSTANCE_NAME` varchar(200) NOT NULL,
  `LAST_CHECKIN_TIME` bigint(13) NOT NULL,
  `CHECKIN_INTERVAL` bigint(13) NOT NULL,
  PRIMARY KEY (`SCHED_NAME`,`INSTANCE_NAME`)
) ENGINE=InnoDB DEFAULT CHARSET=utf8 ROW_FORMAT=COMPACT;
-- ----------------------------
-- Table structure for QRTZ_SIMPLE_TRIGGERS
-- ----------------------------
DROP TABLE IF EXISTS `QRTZ_SIMPLE_TRIGGERS`;
CREATE TABLE `qrtz_simple_triggers` (
  `SCHED_NAME` varchar(120) NOT NULL,
  `TRIGGER_NAME` varchar(200) NOT NULL,
  `TRIGGER_GROUP` varchar(200) NOT NULL,
  `REPEAT_COUNT` bigint(7) NOT NULL,
  `REPEAT_INTERVAL` bigint(12) NOT NULL,
  `TIMES_TRIGGERED` bigint(10) NOT NULL,
  PRIMARY KEY (`SCHED_NAME`,`TRIGGER_NAME`,`TRIGGER_GROUP`(150))
) ENGINE=InnoDB DEFAULT CHARSET=utf8 ROW_FORMAT=COMPACT;
-- ----------------------------
-- Table structure for QRTZ_SIMPROP_TRIGGERS
-- ----------------------------
DROP TABLE IF EXISTS `QRTZ_SIMPROP_TRIGGERS`;
CREATE TABLE `qrtz_simprop_triggers` (
  `SCHED_NAME` varchar(120) NOT NULL,
  `TRIGGER_NAME` varchar(200) NOT NULL,
  `TRIGGER_GROUP` varchar(200) NOT NULL,
  `STR_PROP_1` varchar(512) DEFAULT NULL,
  `STR_PROP_2` varchar(512) DEFAULT NULL,
  `STR_PROP_3` varchar(512) DEFAULT NULL,
  `INT_PROP_1` int(11) DEFAULT NULL,
  `INT_PROP_2` int(11) DEFAULT NULL,
  `LONG_PROP_1` bigint(20) DEFAULT NULL,
  `LONG_PROP_2` bigint(20) DEFAULT NULL,
  `DEC_PROP_1` decimal(13,4) DEFAULT NULL,
  `DEC_PROP_2` decimal(13,4) DEFAULT NULL,
  `BOOL_PROP_1` varchar(1) DEFAULT NULL,
  `BOOL_PROP_2` varchar(1) DEFAULT NULL,
  PRIMARY KEY (`SCHED_NAME`,`TRIGGER_NAME`,`TRIGGER_GROUP`(150))
) ENGINE=InnoDB DEFAULT CHARSET=utf8 ROW_FORMAT=COMPACT;
-- ----------------------------
-- Table structure for QRTZ_TRIGGERS
-- ----------------------------
DROP TABLE IF EXISTS `QRTZ_TRIGGERS`;
CREATE TABLE `qrtz_triggers` (
  `SCHED_NAME` varchar(120) NOT NULL,
  `TRIGGER_NAME` varchar(200) NOT NULL,
  `TRIGGER_GROUP` varchar(200) NOT NULL,
  `JOB_NAME` varchar(200) NOT NULL,
  `JOB_GROUP` varchar(200) NOT NULL,
  `DESCRIPTION` varchar(250) DEFAULT NULL,
  `NEXT_FIRE_TIME` bigint(13) DEFAULT NULL,
  `PREV_FIRE_TIME` bigint(13) DEFAULT NULL,
  `PRIORITY` int(11) DEFAULT NULL,
  `TRIGGER_STATE` varchar(16) NOT NULL,
  `TRIGGER_TYPE` varchar(8) NOT NULL,
  `START_TIME` bigint(13) NOT NULL,
  `END_TIME` bigint(13) DEFAULT NULL,
  `CALENDAR_NAME` varchar(200) DEFAULT NULL,
  `MISFIRE_INSTR` smallint(2) DEFAULT NULL,
  `JOB_DATA` blob,
  PRIMARY KEY (`SCHED_NAME`,`TRIGGER_NAME`,`TRIGGER_GROUP`(150)),
  KEY `SCHED_NAME` (`SCHED_NAME`,`JOB_NAME`,`JOB_GROUP`(150)) USING BTREE
) ENGINE=InnoDB DEFAULT CHARSET=utf8 ROW_FORMAT=COMPACT;

+ 98 - 0
common-security-starter/pom.xml

@ -0,0 +1,98 @@
<?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.0.0"
         xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
         xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
    <modelVersion>4.0.0</modelVersion>
    <parent>
        <groupId>com.yihu.jw</groupId>
        <artifactId>jkzl-start</artifactId>
        <version>1.0.0</version>
        <relativePath>../pom.xml</relativePath>
    </parent>
    <groupId>com.yihu.base</groupId>
    <artifactId>common-security-starter</artifactId>
    <version>1.0.0</version>
    <dependencies>
        <dependency>
            <groupId>org.springframework.cloud</groupId>
            <artifactId>spring-cloud-starter-oauth2</artifactId>
        </dependency>
        <dependency>
            <groupId>org.springframework.boot</groupId>
            <artifactId>spring-boot-configuration-processor</artifactId>
        </dependency>
        <dependency>
            <groupId>org.springframework.boot</groupId>
            <artifactId>spring-boot-starter-security</artifactId>
        </dependency>
        <dependency>
            <groupId>org.springframework.boot</groupId>
            <artifactId>spring-boot-autoconfigure</artifactId>
        </dependency>
        <dependency>
            <groupId>org.springframework.boot</groupId>
            <artifactId>spring-boot-starter-data-redis</artifactId>
        </dependency>
        <dependency>
            <groupId>org.springframework.boot</groupId>
            <artifactId>spring-boot-starter-aop</artifactId>
        </dependency>
        <dependency>
            <groupId>org.springframework.security</groupId>
            <artifactId>spring-security-core</artifactId>
        </dependency>
        <dependency>
            <groupId>org.springframework.security</groupId>
            <artifactId>spring-security-web</artifactId>
        </dependency>
        <dependency>
            <groupId>org.springframework.security</groupId>
            <artifactId>spring-security-config</artifactId>
        </dependency>
        <dependency>
            <groupId>org.springframework.security.oauth</groupId>
            <artifactId>spring-security-oauth2</artifactId>
        </dependency>
        <dependency>
            <groupId>commons-collections</groupId>
            <artifactId>commons-collections</artifactId>
        </dependency>
        <dependency>
            <groupId>commons-codec</groupId>
            <artifactId>commons-codec</artifactId>
        </dependency>
        <dependency>
            <groupId>net.sf.json-lib</groupId>
            <artifactId>json-lib</artifactId>
        </dependency>
        <dependency>
            <groupId>commons-lang</groupId>
            <artifactId>commons-lang</artifactId>
            <version>2.6</version>
        </dependency>
        <dependency>
            <groupId>cglib</groupId>
            <artifactId>cglib</artifactId>
            <version>3.2.5</version>
        </dependency>
        <dependency>
            <groupId>org.springframework.social</groupId>
            <artifactId>spring-social-config</artifactId>
        </dependency>
        <dependency>
            <groupId>org.springframework.social</groupId>
            <artifactId>spring-social-core</artifactId>
        </dependency>
        <dependency>
            <groupId>org.springframework.social</groupId>
            <artifactId>spring-social-security</artifactId>
        </dependency>
        <dependency>
            <groupId>org.springframework.social</groupId>
            <artifactId>spring-social-web</artifactId>
        </dependency>
    </dependencies>
</project>

+ 154 - 0
common-security-starter/readme.MD

@ -0,0 +1,154 @@
**授权码模式:(一般开放API用)**
注:
    1. 获取客户端的实现逻辑结合自己的客户端用户体系实现 (com.yihu.base.security.rbas.ClientServiceProvider)
第一步
获取code
http://localhost:8060/oauth/authorize?response_type=code&client_id=cwd&redirect_uri=http://example.com&scope=app
参数说明:
response_type=code 固定
scope=app 固定
client_id=cwd 根据用户表中自己定义的填写
redirect_uri=http://example.com 根据用户表中自己定义的填写
第二步 
获取token post请求
http://localhost:8060/oauth/token
header:  Basic {appid}:{appsecuri} 加密  例如 Basic Y3dkOmN3ZA==
  
{
     "grant_type":"authorization_code", 授权模式固定
     "client_id":"cwd",
     "code":"第一步请求获取的code",
     "redirect_uri":"http://example.com",
     "scope":"app"
}
返回值
{
    "access_token":"bd677e24-2de5-4862-a5e1-8f90a074db42",   默认2小时过期时间 可以配置 ,由于每次请求都需要验证access_token,所以access_token存储在redis
    "token_type":"bearer",
    "refresh_token":"1427b997-ef94-4061-8940-c71da6549acd",  默认2小时过期时间 可以配置 
    "expires_in":43199,
    "scope":"app"
}
**密码模式(一般自己公司系统用)**
注:  
    1. 获取用户的实现逻辑结合自己的用户体系实现 (org.springframework.security.core.userdetails.UserDetailsService)
    
获取token post请求
http://localhost:8060/oauth/token
header:  Basic {appid}:{appsecuri} 加密  例如 Basic Y3dkOmN3ZA==
  
{
     "grant_type":"password",  
     "username":"admin",
     "password":"123456",
     "scope":"app"
}
返回值
{
    "access_token":"630e2ccc-a5ce-4486-a855-ba755eb3d0d2",
    "token_type":"bearer",
    "refresh_token":"bbb36b54-61b2-4d86-aed3-91c5135174c3",
    "expires_in":43199,
    "scope":"app"
}
**刷新token**
获取token post请求
http://localhost:8060/oauth/token
header:  Basic {appid}:{appsecuri} 加密  例如 Basic Y3dkOmN3ZA==
  
{
     "grant_type":"refresh_token",
     "refresh_token":"bbb36b54-61b2-4d86-aed3-91c5135174c3"
}
返回值
{
    "access_token":"630e2ccc-a5ce-4486-a855-ba755eb3d0d2",
    "token_type":"bearer",
    "refresh_token":"bbb36b54-61b2-4d86-aed3-91c5135174c3",
    "expires_in":43199,
    "scope":"app"
}
**自定义账号密码登陆**
POST
http://localhost:8060/authentication/form
header:  Basic {appid}:{appsecuri} 加密  例如 Basic Y3dkOmN3ZA==
注:  
    1. 获取用户的实现逻辑结合自己的用户体系实现 (org.springframework.security.core.userdetails.UserDetailsService)
body
{ 
    "username":"test",
    "password":"123456"
}
返回值
{
    "access_token":"630e2ccc-a5ce-4486-a855-ba755eb3d0d2",
    "token_type":"bearer",
    "refresh_token":"bbb36b54-61b2-4d86-aed3-91c5135174c3",
    "expires_in":43199,
    "scope":"all"
}
{
    "access_token":"4ce54971-91f9-4c19-bf76-a3c1d3e8c495",
    "token_type":"bearer",
    "refresh_token":"37e3e3bc-8897-4eb4-b43b-4b8382f0efdf",
    "scope":"app"
}
**自定义手机号短信验证码登陆**
获取短信
注:  
    1. 短信验证码默认存在redis中(不可配置)
   2. 短信超时时间1分钟(可配置)
   3. 手机号码验证规则可自定义(实现接口com.yihu.base.security.sms.mobile.MobileCheck)
   4. 短信发送器必须自己实现(实现接口com.yihu.base.security.sms.sender.SmsCodeSender)
GET
http://localhost:8060/code/sms
body
{
    "mobile":"13612345678"
}
POST
http://localhost:8060/authentication/mobile
注:  
    1. 短信登陆成功验证码会删除
header:  Basic {appid}:{appsecuri} 加密  例如 Basic Y3dkOmN3ZA==
body
{
    "mobile":"test",
    "sms":"246053"
}
返回值
{
    "access_token":"630e2ccc-a5ce-4486-a855-ba755eb3d0d2",
    "token_type":"bearer",
    "refresh_token":"bbb36b54-61b2-4d86-aed3-91c5135174c3",
    "expires_in":43199,
    "scope":"app"
}
**访问方式**
http://localhost:8060/user
header 
{
"Authorization":"bearer 5fe6b2c3-f69c-4ddc-a36a-367cdf9479a3"      即 bearer accesstoken
}

+ 55 - 0
common-security-starter/src/main/java/com.yihu.base.security/SercurityConfig.java

@ -0,0 +1,55 @@
package com.yihu.base.security;
import com.yihu.base.security.sms.mobile.DefaultMobileCheck;
import com.yihu.base.security.sms.sender.DefaultSmsCodeSender;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.autoconfigure.condition.ConditionalOnMissingBean;
import org.springframework.context.ApplicationContext;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.context.annotation.Primary;
import org.springframework.security.access.PermissionEvaluator;
import org.springframework.security.access.expression.method.MethodSecurityExpressionHandler;
import org.springframework.security.authentication.AuthenticationTrustResolver;
import org.springframework.security.crypto.bcrypt.BCryptPasswordEncoder;
import org.springframework.security.crypto.password.PasswordEncoder;
import org.springframework.security.oauth2.provider.expression.OAuth2MethodSecurityExpressionHandler;
import java.util.ArrayList;
import java.util.List;
/**
 * Created by chenweida on 2017/12/4.
 */
@Configuration
public class SercurityConfig {
    private Logger logger = LoggerFactory.getLogger(SercurityConfig.class);
    @Autowired(required = false)
    List<AuthenticationTrustResolver> trustResolvers = new ArrayList<>();
    @Autowired(required = false)
    List<PermissionEvaluator> permissionEvaluators = new ArrayList<>();
    @Bean
    public PasswordEncoder passwordEncoder() {
        return new BCryptPasswordEncoder();
    }
    @Bean
    @ConditionalOnMissingBean
    public DefaultSmsCodeSender defaultSmsCodeSender() {
        logger.info("使用默认的短信发送DefaultSmsCodeSender");
        return new DefaultSmsCodeSender();
    }
    @Bean
    @ConditionalOnMissingBean
    public DefaultMobileCheck defaultMobileCheck() {
        logger.info("使用默认的手机号验证规则");
        return new DefaultMobileCheck();
    }
}

+ 108 - 0
common-security-starter/src/main/java/com.yihu.base.security/config/AuthorizationServerConfig.java

@ -0,0 +1,108 @@
package com.yihu.base.security.config;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.yihu.base.security.properties.AccessTokenPorperties;
import com.yihu.base.security.properties.SecurityProperties;
import com.yihu.base.security.rbas.ClientServiceProvider;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.context.annotation.Primary;
import org.springframework.data.redis.connection.RedisConnectionFactory;
import org.springframework.security.core.userdetails.UserDetailsService;
import org.springframework.security.crypto.password.PasswordEncoder;
import org.springframework.security.oauth2.config.annotation.configurers.ClientDetailsServiceConfigurer;
import org.springframework.security.oauth2.config.annotation.web.configuration.AuthorizationServerConfigurerAdapter;
import org.springframework.security.oauth2.config.annotation.web.configuration.EnableAuthorizationServer;
import org.springframework.security.oauth2.config.annotation.web.configurers.AuthorizationServerEndpointsConfigurer;
import org.springframework.security.oauth2.config.annotation.web.configurers.AuthorizationServerSecurityConfigurer;
import org.springframework.security.oauth2.provider.authentication.OAuth2AuthenticationManager;
import org.springframework.security.oauth2.provider.token.DefaultTokenServices;
import org.springframework.security.oauth2.provider.token.TokenStore;
import org.springframework.security.oauth2.provider.token.store.redis.RedisTokenStore;
import javax.annotation.Resource;
import javax.sql.DataSource;
/**
 * Created by chenweida on 2017/12/4.
 */
@Configuration
@EnableAuthorizationServer  //开启授权服务器
public class AuthorizationServerConfig extends AuthorizationServerConfigurerAdapter {
    @Autowired
    private UserDetailsService userDetailsService;
    @Autowired
    private ClientServiceProvider clientDetailsService;
    @Autowired
    private RedisConnectionFactory redisConnectionFactory;
    @Autowired
    private PasswordEncoder passwordEncoder;
    @Autowired
    private AccessTokenPorperties accessTokenPorperties;
    @Override
    public void configure(AuthorizationServerSecurityConfigurer security) throws Exception {
        security.passwordEncoder(passwordEncoder);
    }
    @Override
    public void configure(AuthorizationServerEndpointsConfigurer endpoints) throws Exception {
        endpoints.authenticationManager(oAuth2AuthenticationManager())
                .tokenStore(tokenStore())
                .userDetailsService(userDetailsService)
                .tokenServices(defaultTokenServices())
               // .pathMapping("/oauth/confirm_access", "/extenal/oauth/confirm_access");//授权码模式  授权页面转换
        ;
        //endpoints.setClientDetailsService(clientDetailsService);
    }
    @Override
    public void configure(ClientDetailsServiceConfigurer clients) throws Exception {
        //.jdbc(dataSource).passwordEncoder(passwordEncoder) .clients(clientDetailsService)
        clients.withClientDetails(clientDetailsService);
        ;
    }
    @Bean
    ObjectMapper objectMapper() {
        ObjectMapper objectMapper = new ObjectMapper();
        return objectMapper;
    }
    @Bean
    @Primary
    OAuth2AuthenticationManager oAuth2AuthenticationManager() {
        OAuth2AuthenticationManager oAuth2AuthenticationManager = new OAuth2AuthenticationManager();
        oAuth2AuthenticationManager.setClientDetailsService(clientDetailsService);
        oAuth2AuthenticationManager.setTokenServices(defaultTokenServices());
        return oAuth2AuthenticationManager;
    }
    //==========================token相关配置=================================
    @Bean
    @Primary
    DefaultTokenServices defaultTokenServices() {
        DefaultTokenServices defaultTokenServices = new DefaultTokenServices();
        defaultTokenServices.setTokenStore(tokenStore());
        defaultTokenServices.setAccessTokenValiditySeconds(60 * 60 * accessTokenPorperties.getAccessTokenValidityHours()); //默认2小时
        defaultTokenServices.setRefreshTokenValiditySeconds(60 * 60 * accessTokenPorperties.getRefreshTokenValidityHours());//默认2小时
        defaultTokenServices.setClientDetailsService(clientDetailsService);
        return defaultTokenServices;
    }
    @Bean
    @Primary
    TokenStore tokenStore() {
        RedisTokenStore redisTokenStore = new RedisTokenStore(redisConnectionFactory);
        redisTokenStore.setPrefix(SecurityProperties.prefix_accesstoken);
        return redisTokenStore;
    }
}

+ 89 - 0
common-security-starter/src/main/java/com.yihu.base.security/config/ResourceServerConfig.java

@ -0,0 +1,89 @@
package com.yihu.base.security.config;
import com.yihu.base.security.properties.SecurityProperties;
import com.yihu.base.security.rbas.provider.AuthorizeConfigProviderManager;
import com.yihu.base.security.sms.SmsCodeAuthenticationSecurityConfig;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.context.ApplicationContext;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.context.annotation.Primary;
import org.springframework.security.config.annotation.method.configuration.EnableGlobalMethodSecurity;
import org.springframework.security.config.annotation.web.builders.HttpSecurity;
import org.springframework.security.crypto.bcrypt.BCryptPasswordEncoder;
import org.springframework.security.oauth2.config.annotation.web.configuration.EnableResourceServer;
import org.springframework.security.oauth2.config.annotation.web.configuration.ResourceServerConfigurerAdapter;
import org.springframework.security.oauth2.config.annotation.web.configurers.ResourceServerSecurityConfigurer;
import org.springframework.security.oauth2.provider.authentication.OAuth2AuthenticationManager;
import org.springframework.security.oauth2.provider.expression.OAuth2MethodSecurityExpressionHandler;
import org.springframework.security.oauth2.provider.expression.OAuth2WebSecurityExpressionHandler;
import org.springframework.security.oauth2.provider.token.TokenStore;
import org.springframework.security.web.access.expression.DefaultWebSecurityExpressionHandler;
import org.springframework.security.web.authentication.AuthenticationFailureHandler;
import org.springframework.security.web.authentication.AuthenticationSuccessHandler;
import org.springframework.util.Base64Utils;
/**
 * Created by chenweida on 2017/12/4.
 */
@Configuration
@EnableResourceServer  //开启资源服务器
public class ResourceServerConfig extends ResourceServerConfigurerAdapter {
    @Autowired
    protected AuthenticationSuccessHandler authenticationSuccessHandler;
    @Autowired
    protected AuthenticationFailureHandler authenticationFailureHandler;
    @Autowired
    private OAuth2AuthenticationManager authenticationManager;
    @Autowired
    private TokenStore redisTokenStore;
    @Autowired
    private SmsCodeAuthenticationSecurityConfig smsCodeAuthenticationSecurityConfig;
    @Autowired
    private AuthorizeConfigProviderManager authorizeConfigProviderManager;
    @Autowired
    private OAuth2WebSecurityExpressionHandler oAuth2WebSecurityExpressionHandler;
    @Override
    public void configure(HttpSecurity http) throws Exception {
        http
                    .csrf().disable()
                .formLogin()//设置验证码 账号密码登陆
                    .loginPage(SecurityProperties.formLoginPage)
                    .loginProcessingUrl(SecurityProperties.formLogin)
                    .successHandler(authenticationSuccessHandler)
                    .failureHandler(authenticationFailureHandler)
                .and()
                    .apply(smsCodeAuthenticationSecurityConfig) //添加自定义短信登陆;
        ;
        //验证路径
        authorizeConfigProviderManager.config(http.authorizeRequests());
    }
    @Override
    public void configure(ResourceServerSecurityConfigurer resources) throws Exception {
        resources.
                authenticationManager(authenticationManager)
                .tokenStore(redisTokenStore)
                .expressionHandler(oAuth2WebSecurityExpressionHandler);
    }
    /**
     * 解决bug
     * Failed to evaluate expression '#oauth2.throwOnError
     * No bean resolver registered in the context to resolve access to bean
     * @param applicationContext
     * @return
     */
    @Bean
    @Primary
    public OAuth2WebSecurityExpressionHandler oAuth2WebSecurityExpressionHandler(ApplicationContext applicationContext) {
        OAuth2WebSecurityExpressionHandler expressionHandler = new OAuth2WebSecurityExpressionHandler();
        expressionHandler.setApplicationContext(applicationContext);
        return expressionHandler;
    }
}

+ 140 - 0
common-security-starter/src/main/java/com.yihu.base.security/hander/BaseAuthenticationSuccessHandler.java

@ -0,0 +1,140 @@
/**
 *
 */
package com.yihu.base.security.hander;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.yihu.base.security.properties.SecurityProperties;
import com.yihu.base.security.rbas.ClientServiceProvider;
import com.yihu.base.security.sms.process.SmsValidateCodeProcessor;
import org.apache.commons.codec.binary.StringUtils;
import org.apache.commons.collections.MapUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.security.authentication.BadCredentialsException;
import org.springframework.security.core.Authentication;
import org.springframework.security.crypto.codec.Base64;
import org.springframework.security.crypto.password.PasswordEncoder;
import org.springframework.security.oauth2.common.OAuth2AccessToken;
import org.springframework.security.oauth2.common.exceptions.UnapprovedClientAuthenticationException;
import org.springframework.security.oauth2.provider.*;
import org.springframework.security.oauth2.provider.token.AuthorizationServerTokenServices;
import org.springframework.security.oauth2.provider.token.DefaultTokenServices;
import org.springframework.security.web.authentication.SavedRequestAwareAuthenticationSuccessHandler;
import org.springframework.stereotype.Component;
import org.springframework.util.AntPathMatcher;
import org.springframework.web.context.request.ServletWebRequest;
import javax.annotation.Resource;
import javax.servlet.ServletException;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import java.io.IOException;
import java.io.UnsupportedEncodingException;
/**
 * @author chenweida
 *         <p>
 *         账号密码提交需要在 head 中添加 Basic clientID:cliengSecurty
 */
@Component("BaseAuthenticationSuccessHandler")
public class BaseAuthenticationSuccessHandler extends SavedRequestAwareAuthenticationSuccessHandler {
    private Logger logger = LoggerFactory.getLogger(getClass());
    /**
     * 验证请求url与配置的url是否匹配的工具类
     */
    private AntPathMatcher pathMatcher = new AntPathMatcher();
    @Autowired
    private ObjectMapper objectMapper;
    @Autowired
    private ClientServiceProvider clientDetailsService;
    @Autowired
    private AuthorizationServerTokenServices defaultTokenServices;
    @Autowired
    private SmsValidateCodeProcessor smsValidateCodeProcessor;
    @Autowired
    private PasswordEncoder passwordEncoder;
    /*
         * (non-Javadoc)
         *
         * @see org.springframework.security.web.authentication.
         * AuthenticationSuccessHandler#onAuthenticationSuccess(javax.servlet.http.
         * HttpServletRequest, javax.servlet.http.HttpServletResponse,
         * org.springframework.security.core.Authentication)
         */
    @Override
    public void onAuthenticationSuccess(HttpServletRequest request, HttpServletResponse response,
                                        Authentication authentication) throws IOException, ServletException {
        String header = request.getHeader("Authorization");
        if (org.springframework.util.StringUtils.isEmpty(header) || (!header.startsWith("Basic "))) {
            throw new UnapprovedClientAuthenticationException("请求头没有client信息");
        }
        //解析头部的basic信息
        String[] tokens = extractAndDecodeHeader(header, request);
        assert tokens.length == 2;
        String clientId = tokens[0];
        String clientSecurity =tokens[1];
        //得到ClientDetails
        ClientDetails clientDetails = clientDetailsService.loadClientByClientId(clientId);
        if (clientDetails == null) {
            throw new UnapprovedClientAuthenticationException("clientId不存在 client:" + clientId);
        } else if (!passwordEncoder.matches(clientSecurity,clientDetails.getClientSecret())) {
            throw new UnapprovedClientAuthenticationException("clientSecurity 不匹配 client:" + clientId);
        }
        TokenRequest tokenRequest = new TokenRequest(MapUtils.EMPTY_MAP, clientId, clientDetails.getScope(), "custom_password");
        OAuth2Request oAuth2Request = tokenRequest.createOAuth2Request(clientDetails);
        OAuth2Authentication oAuth2Authentication = new OAuth2Authentication(oAuth2Request, authentication);
        OAuth2AccessToken token = defaultTokenServices.createAccessToken(oAuth2Authentication);
        if(pathMatcher.match(SecurityProperties.mobileLogin, request.getRequestURI())){
            //验证码模式登陆,说明登陆成功  删除验证码
            smsValidateCodeProcessor.reomve(new ServletWebRequest(request,response));
        }
        response.setContentType("application/json;charset=UTF-8");
        response.getWriter().write(objectMapper.writeValueAsString(token));
    }
    /**
     * 解析
     *
     * @param header
     * @param request
     * @return
     * @throws IOException
     */
    private String[] extractAndDecodeHeader(String header, HttpServletRequest request)
            throws IOException {
        byte[] base64Token = header.substring(6).getBytes("UTF-8");
        byte[] decoded;
        try {
            decoded = Base64.decode(base64Token);
        } catch (IllegalArgumentException e) {
            throw new BadCredentialsException(
                    "Failed to decode basic authentication token");
        }
        String token = new String(decoded, "UTF-8");
        int delim = token.indexOf(":");
        if (delim == -1) {
            throw new BadCredentialsException("Basic 信息不合法");
        }
        return new String[]{token.substring(0, delim), token.substring(delim + 1)};
    }
}

+ 44 - 0
common-security-starter/src/main/java/com.yihu.base.security/hander/BseAuthenctiationFailureHandler.java

@ -0,0 +1,44 @@
/**
 *
 */
package com.yihu.base.security.hander;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.yihu.base.security.vo.SimpleResponse;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.http.HttpStatus;
import org.springframework.security.core.AuthenticationException;
import org.springframework.security.web.authentication.SimpleUrlAuthenticationFailureHandler;
import org.springframework.stereotype.Component;
import javax.servlet.ServletException;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import java.io.IOException;
/**
 * @author chenweida
 */
@Component("BseAuthenctiationFailureHandler")
public class BseAuthenctiationFailureHandler extends SimpleUrlAuthenticationFailureHandler {
    private Logger logger = LoggerFactory.getLogger(getClass());
    @Autowired
    private ObjectMapper objectMapper;
    @Override
    public void onAuthenticationFailure(HttpServletRequest request, HttpServletResponse response,
                                        AuthenticationException exception) throws IOException, ServletException {
        logger.info("登录失败");
        response.setStatus(HttpStatus.INTERNAL_SERVER_ERROR.value());
        response.setContentType("application/json;charset=UTF-8");
        response.getWriter().write(objectMapper.writeValueAsString(new SimpleResponse(exception.getMessage())));
    }
}

+ 49 - 0
common-security-starter/src/main/java/com.yihu.base.security/properties/AccessTokenPorperties.java

@ -0,0 +1,49 @@
package com.yihu.base.security.properties;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.boot.context.properties.ConfigurationProperties;
import org.springframework.stereotype.Component;
/**
 * Created by chenweida on 2017/12/5.
 */
@Component
public class AccessTokenPorperties {
    @Value("${security.oauth2.token.accessTokenValidityHours:2}")
    private Integer accessTokenValidityHours ; //accesstoken超时时间
    @Value("${security.oauth2.token.refreshTokenValidityHours:2}")
    private Integer refreshTokenValidityHours ;//刷新token过期时间
    @Value("${security.oauth2.token.tokenType:accessToken}")
    private String tokenType;
    public Integer getAccessTokenValidityHours() {
        return accessTokenValidityHours;
    }
    public void setAccessTokenValidityHours(Integer accessTokenValidityHours) {
        this.accessTokenValidityHours = accessTokenValidityHours;
    }
    public Integer getRefreshTokenValidityHours() {
        return refreshTokenValidityHours;
    }
    public void setRefreshTokenValidityHours(Integer refreshTokenValidityHours) {
        this.refreshTokenValidityHours = refreshTokenValidityHours;
    }
    public String getTokenType() {
        return tokenType;
    }
    public void setTokenType(String tokenType) {
        this.tokenType = tokenType;
    }
}

+ 21 - 0
common-security-starter/src/main/java/com.yihu.base.security/properties/SecurityProperties.java

@ -0,0 +1,21 @@
package com.yihu.base.security.properties;
/**
 * Created by chenweida on 2017/12/4.\
 * 安全框架配置信息
 */
public class SecurityProperties {
    //放在redis中的前缀
    public final static String prefix_accesstoken = "security:oauth2:";  //oauth2 的前缀
    public final static String prefix_sms = "security:oauth2:smsLogin:";  //短信验证码的前缀
    //表单登陆相关信息
    public final static String formLogin = "/authentication/form";//账号密码的路径
    public final static String formLoginPage = "/denglu.html";
    //短信登陆相关信息
    public final static String mobileLogin = "/authentication/mobile"; //手机号短信登陆的路径
    public final static String mobileLoginAccountKey = "mobile";
    public final static String mobileLoginSmsKey = "sms";
    public final static String mobileSendSms = "/code/sms";     //发送短信接口的路径
}

+ 34 - 0
common-security-starter/src/main/java/com.yihu.base.security/properties/SmsValidateProperties.java

@ -0,0 +1,34 @@
package com.yihu.base.security.properties;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.boot.context.properties.ConfigurationProperties;
import org.springframework.stereotype.Component;
/**
 * Created by chenweida on 2017/12/5.
 */
@Component
public class SmsValidateProperties {
    @Value("${security.oauth2.sms.expireIn:1}")
    private Integer expireIn = 1; //短信验证码过期时间
    @Value("${security.oauth2.sms.length:6}")
    private Integer length = 6; //短信验证码过期时间
    public Integer getExpireIn() {
        return expireIn;
    }
    public void setExpireIn(Integer expireIn) {
        this.expireIn = expireIn;
    }
    public Integer getLength() {
        return length;
    }
    public void setLength(Integer length) {
        this.length = length;
    }
}

+ 9 - 0
common-security-starter/src/main/java/com.yihu.base.security/rbas/ClientServiceProvider.java

@ -0,0 +1,9 @@
package com.yihu.base.security.rbas;
import org.springframework.security.oauth2.provider.ClientDetailsService;
/**
 * Created by chenweida on 2017/12/5.
 */
public interface ClientServiceProvider extends ClientDetailsService {
}

+ 15 - 0
common-security-starter/src/main/java/com.yihu.base.security/rbas/IRbasService.java

@ -0,0 +1,15 @@
package com.yihu.base.security.rbas;
import org.springframework.security.core.Authentication;
import javax.servlet.http.HttpServletRequest;
/**
 * Created by chenweida on 2017/12/1.
 * 需要权限认证的使用这个接口
 */
public interface IRbasService {
     Boolean hasPerssion(HttpServletRequest request, Authentication authentication) ;
}

+ 11 - 0
common-security-starter/src/main/java/com.yihu.base.security/rbas/provider/AuthorizeConfigProvider.java

@ -0,0 +1,11 @@
package com.yihu.base.security.rbas.provider;
import org.springframework.security.config.annotation.web.builders.HttpSecurity;
import org.springframework.security.config.annotation.web.configurers.ExpressionUrlAuthorizationConfigurer;
/**
 * Created by chenweida on 2017/12/5.
 */
public interface AuthorizeConfigProvider {
    void config(ExpressionUrlAuthorizationConfigurer<HttpSecurity>.ExpressionInterceptUrlRegistry expressionInterceptUrlRegistry);
}

+ 24 - 0
common-security-starter/src/main/java/com.yihu.base.security/rbas/provider/AuthorizeConfigProviderManager.java

@ -0,0 +1,24 @@
package com.yihu.base.security.rbas.provider;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.security.config.annotation.web.builders.HttpSecurity;
import org.springframework.security.config.annotation.web.configurers.ExpressionUrlAuthorizationConfigurer;
import org.springframework.stereotype.Component;
import java.util.Set;
/**
 * Created by chenweida on 2017/12/5.
 * 授权路径
 */
@Component
public class AuthorizeConfigProviderManager {
    @Autowired
    Set<AuthorizeConfigProvider> authorizeConfigProviders;
   public void config(ExpressionUrlAuthorizationConfigurer<HttpSecurity>.ExpressionInterceptUrlRegistry config) {
        for (AuthorizeConfigProvider authorizeConfigProvider : authorizeConfigProviders) {
            authorizeConfigProvider.config(config);
        }
    }
}

+ 29 - 0
common-security-starter/src/main/java/com.yihu.base.security/rbas/provider/PerssionAllAuthorizeConfigProvider.java

@ -0,0 +1,29 @@
package com.yihu.base.security.rbas.provider;
import com.yihu.base.security.properties.SecurityProperties;
import org.springframework.core.annotation.Order;
import org.springframework.security.config.annotation.web.builders.HttpSecurity;
import org.springframework.security.config.annotation.web.configurers.ExpressionUrlAuthorizationConfigurer;
import org.springframework.stereotype.Component;
/**
 * Created by chenweida on 2017/12/5.
 * 允许通过的路径
 */
@Component
@Order(Integer.MIN_VALUE)
public class PerssionAllAuthorizeConfigProvider implements AuthorizeConfigProvider {
    @Override
    public void config(ExpressionUrlAuthorizationConfigurer<HttpSecurity>.ExpressionInterceptUrlRegistry expressionInterceptUrlRegistry) {
        expressionInterceptUrlRegistry
                .antMatchers(
                        SecurityProperties.formLogin,
                        SecurityProperties.formLoginPage,
                        SecurityProperties.mobileLogin,
                        SecurityProperties.mobileSendSms
                ).permitAll();
    }
}

+ 25 - 0
common-security-starter/src/main/java/com.yihu.base.security/rbas/provider/RbasAuthorizeConfigProvider.java

@ -0,0 +1,25 @@
package com.yihu.base.security.rbas.provider;
import com.yihu.base.security.rbas.IRbasService;
import org.springframework.core.annotation.Order;
import org.springframework.security.config.annotation.web.builders.HttpSecurity;
import org.springframework.security.config.annotation.web.configurers.ExpressionUrlAuthorizationConfigurer;
import org.springframework.stereotype.Component;
import javax.annotation.Resource;
/**
 * Created by chenweida on 2017/12/5.
 * 角色认证
 */
@Component
@Order(Integer.MAX_VALUE)
public class RbasAuthorizeConfigProvider implements AuthorizeConfigProvider {
    @Resource(name="rbasService")
    private IRbasService rbasService;
    @Override
    public void config(ExpressionUrlAuthorizationConfigurer<HttpSecurity>.ExpressionInterceptUrlRegistry expressionInterceptUrlRegistry) {
        expressionInterceptUrlRegistry.anyRequest().access("@rbasService.hasPerssion(request,authentication)");
    }
}

+ 113 - 0
common-security-starter/src/main/java/com.yihu.base.security/sms/SmsCodeAuthenticationFilter.java

@ -0,0 +1,113 @@
/**
 * 
 */
package com.yihu.base.security.sms;
import com.yihu.base.security.properties.SecurityProperties;
import org.springframework.security.authentication.AuthenticationServiceException;
import org.springframework.security.core.Authentication;
import org.springframework.security.core.AuthenticationException;
import org.springframework.security.web.authentication.AbstractAuthenticationProcessingFilter;
import org.springframework.security.web.util.matcher.AntPathRequestMatcher;
import org.springframework.util.Assert;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
/**
 * @author chenweida
 *
 */
public class SmsCodeAuthenticationFilter extends AbstractAuthenticationProcessingFilter {
	// ~ Static fields/initializers
	// =====================================================================================
	private String mobileParameter = SecurityProperties.mobileLoginAccountKey;
	private boolean postOnly = true;
	// ~ Constructors
	// ===================================================================================================
	public SmsCodeAuthenticationFilter() {
		super(new AntPathRequestMatcher(SecurityProperties.mobileLogin, "POST"));
	}
	// ~ Methods
	// ========================================================================================================
	public Authentication attemptAuthentication(HttpServletRequest request, HttpServletResponse response)
			throws AuthenticationException {
		if (postOnly && !request.getMethod().equals("POST")) {
			throw new AuthenticationServiceException("Authentication method not supported: " + request.getMethod());
		}
		String mobile = obtainMobile(request);
		if (mobile == null) {
			mobile = "";
		}
		mobile = mobile.trim();
		SmsCodeAuthenticationToken authRequest = new SmsCodeAuthenticationToken(mobile);
		// Allow subclasses to set the "details" property
		setDetails(request, authRequest);
		return this.getAuthenticationManager().authenticate(authRequest);
	}
	/**
	 * 获取手机号
	 */
	protected String obtainMobile(HttpServletRequest request) {
		return request.getParameter(mobileParameter);
	}
	/**
	 * Provided so that subclasses may configure what is put into the
	 * authentication request's details property.
	 *
	 * @param request
	 *            that an authentication request is being created for
	 * @param authRequest
	 *            the authentication request object that should have its details
	 *            set
	 */
	protected void setDetails(HttpServletRequest request, SmsCodeAuthenticationToken authRequest) {
		authRequest.setDetails(authenticationDetailsSource.buildDetails(request));
	}
	/**
	 * Sets the parameter name which will be used to obtain the username from
	 * the login request.
	 *
	 * @param usernameParameter
	 *            the parameter name. Defaults to "username".
	 */
	public void setMobileParameter(String usernameParameter) {
		Assert.hasText(usernameParameter, "Username parameter must not be empty or null");
		this.mobileParameter = usernameParameter;
	}
	/**
	 * Defines whether only HTTP POST requests will be allowed by this filter.
	 * If set to true, and an authentication request is received which is not a
	 * POST request, an exception will be raised immediately and authentication
	 * will not be attempted. The <tt>unsuccessfulAuthentication()</tt> method
	 * will be called as if handling a failed authentication.
	 * <p>
	 * Defaults to <tt>true</tt> but may be overridden by subclasses.
	 */
	public void setPostOnly(boolean postOnly) {
		this.postOnly = postOnly;
	}
	public final String getMobileParameter() {
		return mobileParameter;
	}
}

+ 64 - 0
common-security-starter/src/main/java/com.yihu.base.security/sms/SmsCodeAuthenticationProvider.java

@ -0,0 +1,64 @@
/**
 * 
 */
package com.yihu.base.security.sms;
import org.springframework.security.authentication.AuthenticationProvider;
import org.springframework.security.authentication.InternalAuthenticationServiceException;
import org.springframework.security.core.Authentication;
import org.springframework.security.core.AuthenticationException;
import org.springframework.security.core.userdetails.UserDetails;
import org.springframework.security.core.userdetails.UserDetailsService;
/**
 * @author chenweida
 *
 */
public class SmsCodeAuthenticationProvider implements AuthenticationProvider {
	private UserDetailsService userDetailsService;
	/*
	 * (non-Javadoc)
	 * 
	 * @see org.springframework.security.authentication.AuthenticationProvider#
	 * authenticate(org.springframework.security.core.Authentication)
	 */
	@Override
	public Authentication authenticate(Authentication authentication) throws AuthenticationException {
		SmsCodeAuthenticationToken authenticationToken = (SmsCodeAuthenticationToken) authentication;
		//查找用户信息
		UserDetails user = userDetailsService.loadUserByUsername((String) authenticationToken.getPrincipal());
		if (user == null) {
			throw new InternalAuthenticationServiceException("无法获取用户信息");
		}
		
		SmsCodeAuthenticationToken authenticationResult = new SmsCodeAuthenticationToken(user, user.getAuthorities());
		
		authenticationResult.setDetails(authenticationToken.getDetails());
		return authenticationResult;
	}
	/*
	 * (non-Javadoc)
	 * 
	 * @see org.springframework.security.authentication.AuthenticationProvider#
	 * supports(java.lang.Class)
	 */
	@Override
	public boolean supports(Class<?> authentication) {
		return SmsCodeAuthenticationToken.class.isAssignableFrom(authentication);
	}
	public UserDetailsService getUserDetailsService() {
		return userDetailsService;
	}
	public void setUserDetailsService(UserDetailsService userDetailsService) {
		this.userDetailsService = userDetailsService;
	}
}

+ 53 - 0
common-security-starter/src/main/java/com.yihu.base.security/sms/SmsCodeAuthenticationSecurityConfig.java

@ -0,0 +1,53 @@
/**
 *
 */
package com.yihu.base.security.sms;
import com.yihu.base.security.sms.filter.SmsvalidateCodeFilter;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.security.authentication.AuthenticationManager;
import org.springframework.security.config.annotation.SecurityConfigurerAdapter;
import org.springframework.security.config.annotation.web.builders.HttpSecurity;
import org.springframework.security.core.userdetails.UserDetailsService;
import org.springframework.security.web.DefaultSecurityFilterChain;
import org.springframework.security.web.authentication.AuthenticationFailureHandler;
import org.springframework.security.web.authentication.AuthenticationSuccessHandler;
import org.springframework.security.web.authentication.UsernamePasswordAuthenticationFilter;
import org.springframework.stereotype.Component;
/**
 * @author chenweida
 */
@Component
public class SmsCodeAuthenticationSecurityConfig extends SecurityConfigurerAdapter<DefaultSecurityFilterChain, HttpSecurity> {
    @Autowired
    private AuthenticationSuccessHandler authenticationSuccessHandler;
    @Autowired
    private AuthenticationFailureHandler authenticationFailureHandler;
    @Autowired
    private UserDetailsService userDetailsService;
    @Autowired
    private SmsvalidateCodeFilter smsvalidateCodeFilter;
    @Override
    public void configure(HttpSecurity http) throws Exception {
        //短信验证码过滤器
        SmsCodeAuthenticationFilter smsCodeAuthenticationFilter = new SmsCodeAuthenticationFilter();
        smsCodeAuthenticationFilter.setAuthenticationManager(http.getSharedObject(AuthenticationManager.class));
        smsCodeAuthenticationFilter.setAuthenticationSuccessHandler(authenticationSuccessHandler);
        smsCodeAuthenticationFilter.setAuthenticationFailureHandler(authenticationFailureHandler);
        SmsCodeAuthenticationProvider smsCodeAuthenticationProvider = new SmsCodeAuthenticationProvider();
        smsCodeAuthenticationProvider.setUserDetailsService(userDetailsService);
        http.authenticationProvider(smsCodeAuthenticationProvider)
                .addFilterAfter(smsCodeAuthenticationFilter, UsernamePasswordAuthenticationFilter.class)//在账号密码验证过滤器之后添加短信验证码过滤器
                .addFilterBefore(smsvalidateCodeFilter,SmsCodeAuthenticationFilter.class); //添加短信验证码登陆过滤器
        ;
    }
}

+ 81 - 0
common-security-starter/src/main/java/com.yihu.base.security/sms/SmsCodeAuthenticationToken.java

@ -0,0 +1,81 @@
/**
 * 
 */
package com.yihu.base.security.sms;
import org.springframework.security.authentication.AbstractAuthenticationToken;
import org.springframework.security.core.GrantedAuthority;
import org.springframework.security.core.SpringSecurityCoreVersion;
import java.util.Collection;
/**
 * @author chenweida
 *	短信授权的Token对象
 */
public class SmsCodeAuthenticationToken extends AbstractAuthenticationToken {
	private static final long serialVersionUID = SpringSecurityCoreVersion.SERIAL_VERSION_UID;
	// ~ Instance fields
	// ================================================================================================
	private final Object principal;
	// ~ Constructors
	// ===================================================================================================
	/**
	 * This constructor can be safely used by any code that wishes to create a
	 * <code>UsernamePasswordAuthenticationToken</code>, as the {@link #isAuthenticated()}
	 * will return <code>false</code>.
	 *
	 */
	public SmsCodeAuthenticationToken(String mobile) {
		super(null);
		this.principal = mobile;
		setAuthenticated(false);
	}
	/**
	 * This constructor should only be used by <code>AuthenticationManager</code> or
	 * <code>AuthenticationProvider</code> implementations that are satisfied with
	 * producing a trusted (i.e. {@link #isAuthenticated()} = <code>true</code>)
	 * authentication token.
	 *
	 * @param principal
	 * @param credentials
	 * @param authorities
	 */
	public SmsCodeAuthenticationToken(Object principal,
			Collection<? extends GrantedAuthority> authorities) {
		super(authorities);
		this.principal = principal;
		super.setAuthenticated(true); // must use super, as we override
	}
	// ~ Methods
	// ========================================================================================================
	public Object getCredentials() {
		return null;
	}
	public Object getPrincipal() {
		return this.principal;
	}
	public void setAuthenticated(boolean isAuthenticated) throws IllegalArgumentException {
		if (isAuthenticated) {
			throw new IllegalArgumentException(
					"Cannot set this token to trusted - use constructor which takes a GrantedAuthority list instead");
		}
		super.setAuthenticated(false);
	}
	@Override
	public void eraseCredentials() {
		super.eraseCredentials();
	}
}

+ 56 - 0
common-security-starter/src/main/java/com.yihu.base.security/sms/controller/SmsController.java

@ -0,0 +1,56 @@
package com.yihu.base.security.sms.controller;
import com.yihu.base.security.properties.SecurityProperties;
import com.yihu.base.security.sms.mobile.MobileCheck;
import com.yihu.base.security.sms.process.SmsValidateCodeProcessor;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.http.HttpStatus;
import org.springframework.web.bind.annotation.GetMapping;
import org.springframework.web.bind.annotation.RestController;
import org.springframework.web.context.request.ServletWebRequest;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import java.io.OutputStream;
import java.io.PrintWriter;
/**
 * Created by chenweida on 2017/12/5.
 */
@RestController
public class SmsController {
    @Autowired
    private SmsValidateCodeProcessor smsValidateCodeProcessor;
    @Autowired
    private MobileCheck mobileCheck;
    /**
     * 创建验证码
     *
     * @param request
     * @param response
     * @throws Exception
     */
    @GetMapping(SecurityProperties.mobileSendSms)
    public void createCode(
            HttpServletRequest request,
            HttpServletResponse response)
            throws Exception {
        //获取手机号
        String mobile = request.getParameter(SecurityProperties.mobileLoginAccountKey);
        //验证手机号是否正确
        if (!mobileCheck.checkMobile(mobile)) {
            //通过设置响应头控制浏览器以UTF-8的编码显示数据,如果不加这句话,那么浏览器显示的将是乱码
            response.setHeader("content-type", "text/html;charset=UTF-8");
            response.setStatus(HttpStatus.NOT_IMPLEMENTED.value());//参数错误
            PrintWriter pw = response.getWriter();
            pw.write(new String("{\"content\":\"电话号码格式错误\"}"));
            pw.flush();
        } else {
            //发送短信验证码并且保存到redis中
            smsValidateCodeProcessor.create(new ServletWebRequest(request, response));
        }
    }
}

+ 23 - 0
common-security-starter/src/main/java/com.yihu.base.security/sms/exception/ValidateCodeException.java

@ -0,0 +1,23 @@
/**
 * 
 */
package com.yihu.base.security.sms.exception;
import org.springframework.security.core.AuthenticationException;
/**
 * @author chenweida
 *
 */
public class ValidateCodeException extends AuthenticationException {
	/**
	 * 
	 */
	private static final long serialVersionUID = -7285211528095468156L;
	public ValidateCodeException(String msg) {
		super(msg);
	}
}

+ 65 - 0
common-security-starter/src/main/java/com.yihu.base.security/sms/filter/SmsvalidateCodeFilter.java

@ -0,0 +1,65 @@
/**
 *
 */
package com.yihu.base.security.sms.filter;
import com.yihu.base.security.properties.SecurityProperties;
import com.yihu.base.security.sms.exception.ValidateCodeException;
import com.yihu.base.security.sms.process.SmsValidateCodeProcessor;
import org.apache.commons.lang.StringUtils;
import org.springframework.beans.factory.InitializingBean;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.security.web.authentication.AuthenticationFailureHandler;
import org.springframework.stereotype.Component;
import org.springframework.util.AntPathMatcher;
import org.springframework.web.context.request.ServletWebRequest;
import org.springframework.web.filter.OncePerRequestFilter;
import javax.servlet.FilterChain;
import javax.servlet.ServletException;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import java.io.IOException;
/**
 * @author chenweida
 */
@Component
public class SmsvalidateCodeFilter extends OncePerRequestFilter implements InitializingBean {
    /**
     * 验证码校验失败处理器
     */
    @Autowired
    private AuthenticationFailureHandler authenticationFailureHandler;
    /**
     * 验证请求url与配置的url是否匹配的工具类
     */
    private AntPathMatcher pathMatcher = new AntPathMatcher();
    @Autowired
    private SmsValidateCodeProcessor smsValidateCodeProcessor;
    /*
     *   短信验证码登陆过滤器
     */
    @Override
    protected void doFilterInternal(HttpServletRequest request, HttpServletResponse response, FilterChain chain)
            throws ServletException, IOException {
        if (StringUtils.equalsIgnoreCase(request.getMethod(), "post")) {
            if (pathMatcher.match(SecurityProperties.mobileLogin, request.getRequestURI())) {
                logger.info("校验请求(" + request.getRequestURI() + ")中的验证码");
                try {
                    smsValidateCodeProcessor.validate(new ServletWebRequest(request, response));
                    logger.info("验证码校验通过");
                } catch (ValidateCodeException exception) {
                    authenticationFailureHandler.onAuthenticationFailure(request, response, exception);
                    return;
                }
            }
        }
        chain.doFilter(request, response);
    }
}

+ 41 - 0
common-security-starter/src/main/java/com.yihu.base.security/sms/generator/SmsValidateCodeGenerator.java

@ -0,0 +1,41 @@
package com.yihu.base.security.sms.generator;
import com.yihu.base.security.properties.SmsValidateProperties;
import com.yihu.base.security.sms.vo.ValidateCode;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Component;
import org.springframework.web.context.request.ServletWebRequest;
import java.util.Random;
/**
 * Created by chenweida on 2017/12/5.
 * 短信验证码生成器
 */
@Component
public class SmsValidateCodeGenerator implements ValidateCodeGenerator {
    @Autowired
    private SmsValidateProperties smsValidateProperties;
    @Override
    public ValidateCode generate(ServletWebRequest request) {
        String code = getFixLenthString(smsValidateProperties.getLength());
        ValidateCode validateCode = new ValidateCode(code, smsValidateProperties.getExpireIn());
        return validateCode;
    }
    /*
 * 返回长度为【strLength】的随机数,在前面补0
 */
    private String getFixLenthString(int strLength) {
        Random rm = new Random();
        // 获得随机数
        double pross = (1 + rm.nextDouble()) * Math.pow(10, strLength);
        // 将获得的获得随机数转化为字符串
        String fixLenthString = String.valueOf(pross);
        // 返回固定的长度的随机数
        return fixLenthString.substring(1, strLength + 1);
    }
}

+ 17 - 0
common-security-starter/src/main/java/com.yihu.base.security/sms/generator/ValidateCodeGenerator.java

@ -0,0 +1,17 @@
/**
 * 
 */
package com.yihu.base.security.sms.generator;
import com.yihu.base.security.sms.vo.ValidateCode;
import org.springframework.web.context.request.ServletWebRequest;
/**
 * @author chenweida
 *
 */
public interface ValidateCodeGenerator {
	ValidateCode generate(ServletWebRequest request);
	
}

+ 0 - 0
common-security-starter/src/main/java/com.yihu.base.security/sms/mobile/DefaultMobileCheck.java


Some files were not shown because too many files changed in this diff