Prechádzať zdrojové kódy

非机构化档案上传逻辑添加

demon 9 rokov pred
rodič
commit
8d35fabf1b
47 zmenil súbory, kde vykonal 3593 pridanie a 7 odobranie
  1. 20 0
      Hos-Framework/src/main/java/com/yihu/ehr/framework/util/file/FileUtil.java
  2. 14 0
      Hos-Framework/src/main/java/com/yihu/ehr/framework/util/operator/DateUtil.java
  3. 98 0
      Hos-resource/src/main/java/com/yihu/ehr/common/mongo/IMongoDBAdminer.java
  4. 376 0
      Hos-resource/src/main/java/com/yihu/ehr/common/mongo/IMongoDBRunner.java
  5. 172 0
      Hos-resource/src/main/java/com/yihu/ehr/common/mongo/MongoDB.java
  6. 65 0
      Hos-resource/src/main/java/com/yihu/ehr/common/mongo/MongoDBConfig.java
  7. 57 0
      Hos-resource/src/main/java/com/yihu/ehr/common/mongo/MongoDBKit.java
  8. 92 0
      Hos-resource/src/main/java/com/yihu/ehr/common/mongo/MongoDBOperator.java
  9. 566 0
      Hos-resource/src/main/java/com/yihu/ehr/common/mongo/MongoDBPro.java
  10. 95 0
      Hos-resource/src/main/java/com/yihu/ehr/crawler/format/AdapterBase.java
  11. 86 0
      Hos-resource/src/main/java/com/yihu/ehr/crawler/format/AdapterScheme.java
  12. 196 0
      Hos-resource/src/main/java/com/yihu/ehr/crawler/format/DataSetTransformer.java
  13. 61 0
      Hos-resource/src/main/java/com/yihu/ehr/crawler/format/DocumentTransformer.java
  14. 23 0
      Hos-resource/src/main/java/com/yihu/ehr/crawler/format/IDataTransformer.java
  15. 17 3
      Hos-resource/src/main/java/com/yihu/ehr/crawler/model/adapter/AdapterDict.java
  16. 22 0
      Hos-resource/src/main/java/com/yihu/ehr/crawler/model/adapter/AdapterMetaData.java
  17. 49 0
      Hos-resource/src/main/java/com/yihu/ehr/crawler/model/config/SysConfig.java
  18. 153 0
      Hos-resource/src/main/java/com/yihu/ehr/crawler/model/patient/PatientCDAUpload.java
  19. 110 0
      Hos-resource/src/main/java/com/yihu/ehr/crawler/model/patient/PatientIndex.java
  20. 4 3
      Hos-resource/src/main/java/com/yihu/ehr/crawler/model/transform/MetaDataVerify.java
  21. 235 0
      Hos-resource/src/main/java/com/yihu/ehr/crawler/origin/FileSystemOrigin.java
  22. 47 0
      Hos-resource/src/main/java/com/yihu/ehr/crawler/origin/IDataOrigin.java
  23. 1 1
      Hos-resource/src/main/java/com/yihu/ehr/crawler/service/DataSetTransformer.java
  24. 50 0
      Hos-resource/src/main/java/com/yihu/ehr/crawler/service/adapter/AdapterDict.java
  25. 41 0
      Hos-resource/src/main/java/com/yihu/ehr/crawler/service/standard/StdDict.java
  26. 38 0
      Hos-resource/src/main/java/com/yihu/ehr/crawler/service/standard/StdMetaData.java
  27. 37 0
      Hos-resource/src/main/java/com/yihu/ehr/crawler/storage/DataSetStorage.java
  28. 242 0
      Hos-resource/src/main/java/com/yihu/ehr/crawler/storage/DocumentStorage.java
  29. 41 0
      Hos-resource/src/main/java/com/yihu/ehr/crawler/storage/IDataStorage.java
  30. 408 0
      Hos-resource/src/main/java/com/yihu/ehr/crawler/storage/MongodbStorage.java
  31. 17 0
      Hos-resource/src/main/java/com/yihu/ehr/standard/service/adapter/AdapterDatasetService.java
  32. 27 0
      Hos-resource/src/main/java/com/yihu/ehr/standard/service/adapter/AdapterDictEntryService.java
  33. 3 0
      Hos-resource/src/main/java/com/yihu/ehr/standard/service/adapter/AdapterDictService.java
  34. 12 0
      Hos-resource/src/main/java/com/yihu/ehr/standard/service/adapter/AdapterMetadataService.java
  35. 19 0
      Hos-resource/src/main/java/com/yihu/ehr/standard/service/standard/StdDictEntryService.java
  36. 14 0
      Hos-resource/src/main/java/com/yihu/ehr/standard/service/standard/StdDictService.java
  37. 13 0
      Hos-resource/src/main/java/com/yihu/ehr/standard/service/standard/StdMetadataService.java
  38. 11 0
      Hos-resource/src/main/java/com/yihu/ehr/system/dao/DatasourceDao.java
  39. 13 0
      Hos-resource/src/main/java/com/yihu/ehr/system/dao/OrganizationDao.java
  40. 4 0
      Hos-resource/src/main/java/com/yihu/ehr/system/dao/intf/IDatasourceDao.java
  41. 2 0
      Hos-resource/src/main/java/com/yihu/ehr/system/dao/intf/IOrganizationDao.java
  42. 11 0
      Hos-resource/src/main/java/com/yihu/ehr/system/service/DatasourceManager.java
  43. 5 0
      Hos-resource/src/main/java/com/yihu/ehr/system/service/OrganizationManager.java
  44. 7 0
      Hos-resource/src/main/java/com/yihu/ehr/system/service/intf/IDatasourceManager.java
  45. 2 0
      Hos-resource/src/main/java/com/yihu/ehr/system/service/intf/IOrganizationManager.java
  46. 15 0
      Hos-resource/src/main/resources/config/archive.properties
  47. 2 0
      Hos-resource/src/main/resources/config/sys.config.xml

+ 20 - 0
Hos-Framework/src/main/java/com/yihu/ehr/framework/util/file/FileUtil.java

@ -89,6 +89,26 @@ public class FileUtil {
        return true;
    }
    public static boolean writeFile(String filePath, byte[] bytes, String encoding) throws IOException {
        File file = new File(filePath);
        if (!file.getParentFile().exists()) {
            if (!file.getParentFile().mkdirs()) {
                return false;
            }
        }
        FileOutputStream fileOutputStream = new FileOutputStream(file);
        byte[] bbuf = new byte[1024];
        InputStream fis = new ByteArrayInputStream(bytes);
        int hasRead = 0;
        //循环从输入流中取出数据
        while ((hasRead = fis.read(bbuf)) > 0) {
            fileOutputStream.write(bbuf, 0, hasRead);
        }
        fileOutputStream.close();
        return true;
    }
    /**
     * InputStream 转 byte[]
     *

+ 14 - 0
Hos-Framework/src/main/java/com/yihu/ehr/framework/util/operator/DateUtil.java

@ -1398,4 +1398,18 @@ public class DateUtil {
        return ts;
    }
    /**
     *  日期加减天数 (可优化)
     * @param date 时间
     * @param days 天数差
     * @return
     */
    public static java.util.Date setDateTime(java.util.Date date,int days){
        Calendar cal = Calendar.getInstance();
        cal.setTime(date);
        cal.set(Calendar.DATE, cal.get(Calendar.DATE) +(days));
        return  cal.getTime();
    }
}

+ 98 - 0
Hos-resource/src/main/java/com/yihu/ehr/common/mongo/IMongoDBAdminer.java

@ -0,0 +1,98 @@
package com.yihu.ehr.common.mongo;
import java.util.List;
/**
 * @created Airhead 2016/2/17.
 */
public interface IMongoDBAdminer {
    /**
     * Drops this collection from the Database.
     *
     * @mongodb.driver.manual reference/command/drop/ Drop Collection
     */
    void drop(String collectionName);
    /**
     * Create an index with the given keys.
     *
     * @param keys an object describing the index key(s), which may not be null.
     * @return the index name
     * @mongodb.driver.manual reference/command/createIndexes Create indexes
     */
    String createIndex(String collectionName, String keys);
    /**
     * Create an index with the given keys and options.
     *
     * @param keys         an object describing the index key(s), which may not be null.
     * @param indexOptions the options for the index
     * @return the index name
     * @mongodb.driver.manual reference/command/createIndexes Create indexes
     */
    String createIndex(String collectionName, String keys, String indexOptions);
    /**
     * Create multiple indexes.
     *
     * @param indexes the list of indexes
     * @return the list of index names
     * @mongodb.driver.manual reference/command/createIndexes Create indexes
     * @mongodb.server.release 2.6
     */
//    List<String> createIndexes(List<IndexModel> indexes);
    /**
     * Get all the indexes in this collection.
     *
     * @return the list indexes iterable interface
     * @mongodb.driver.manual reference/command/listIndexes/ List indexes
     */
    List<String> listIndexes(String collectionName);
    /**
     * Get all the indexes in this collection.
     *
     * @param resultClass the class to decode each document into
     * @param <TResult>   the target document type of the iterable.
     * @return the list indexes iterable interface
     * @mongodb.driver.manual reference/command/listIndexes/ List indexes
     */
//    <TResult> ListIndexesIterable<TResult> listIndexes(Class<TResult> resultClass);
    /**
     * Drops the index given its name.
     *
     * @param indexName the name of the index to remove
     * @mongodb.driver.manual reference/command/dropIndexes/ Drop indexes
     */
    void dropIndex(String collectionName, String indexName);
    /**
     * Drop all the indexes on this collection, except for the default on _id.
     *
     * @mongodb.driver.manual reference/command/dropIndexes/ Drop indexes
     */
    void dropIndexes(String collectionName);
    /**
     * Rename the collection with oldCollectionName to the newCollectionName.
     *
     * @param newCollectionName the namespace the collection will be renamed to
     * @throws com.mongodb.MongoServerException if you provide a newCollectionName that is the name of an existing collection, or if the
     *                                          oldCollectionName is the name of a collection that doesn't exist
     * @mongodb.driver.manual reference/commands/renameCollection Rename collection
     */
    void renameCollection(String collectionName, String newCollectionName);
    /**
     * Rename the collection with oldCollectionName to the newCollectionName.
     *
     * @param newCollectionName       the name the collection will be renamed to
     * @param renameCollectionOptions the options for renaming a collection
     * @throws com.mongodb.MongoServerException if you provide a newCollectionName that is the name of an existing collection and dropTarget
     *                                          is false, or if the oldCollectionName is the name of a collection that doesn't exist
     * @mongodb.driver.manual reference/commands/renameCollection Rename collection
     */
    void renameCollection(String collectionName, String newCollectionName, String renameCollectionOptions);
}

+ 376 - 0
Hos-resource/src/main/java/com/yihu/ehr/common/mongo/IMongoDBRunner.java

@ -0,0 +1,376 @@
package com.yihu.ehr.common.mongo;
import java.util.List;
/**
 * MongoDB的CURD接口,此部分内容从MongoCollection中来
 *
 * @created Airhead 2016/2/17.
 */
public interface IMongoDBRunner {
    long count(String collectionName);
    /**
     * Counts the number of documents in the collection according to the given options.
     *
     * @param filter the query filter
     * @return the number of documents in the collection
     */
    long count(String collectionName, String filter);
    /**
     * Counts the number of documents in the collection according to the given options.
     *
     * @param filter  the query filter
     * @param options the options describing the count
     * @return the number of documents in the collection
     */
    long count(String collectionName, String filter, String options);
    /**
     * Gets the distinct values of the specified field name.
     *
     * @param fieldName   the field name
     * @param resultClass the class to cast any distinct items into.
     * @param <TResult>   the target type of the iterable.
     * @return an iterable of distinct values
     * @mongodb.driver.manual reference/command/distinct/ Distinct
     */
//    <TResult> DistinctIterable<TResult> distinct(String fieldName, Class<TResult> resultClass);
    /**
     * Gets the distinct values of the specified field name.
     *
     * @param fieldName   the field name
     * @param filter      the query filter
     * @param resultClass the class to cast any distinct items into.
     * @param <TResult>   the target type of the iterable.
     * @return an iterable of distinct values
     * @mongodb.driver.manual reference/command/distinct/ Distinct
     */
//    <TResult> DistinctIterable<TResult> distinct(String fieldName, Bson filter, Class<TResult> resultClass);
    /**
     * Finds all documents in the collection.
     *
     * @return the find iterable interface
     * @mongodb.driver.manual tutorial/query-documents/ Find
     */
    List<String> find(String collectionName);
    /**
     * Finds all documents in the collection.
     *
     * @param resultClass the class to decode each document into
     * @param <TResult>   the target document type of the iterable.
     * @return the find iterable interface
     * @mongodb.driver.manual tutorial/query-documents/ Find
     */
//    <TResult> FindIterable<TResult> find(Class<TResult> resultClass);
    /**
     * Finds all documents in the collection.
     *
     * @param filter the query filter
     * @return the find iterable interface
     * @mongodb.driver.manual tutorial/query-documents/ Find
     */
    List<String> find(String collectionName, String filter);
    /**
     * Finds all documents in the collection.
     *
     * @param filter the query filter
     * @return the find iterable interface
     * @mongodb.driver.manual tutorial/query-documents/ Find
     */
    List<String> find(String collectionName, String filter, String projection);
    /**
     * Finds all documents in the collection.
     *
     * @param filter the query filter
     * @return the find iterable interface
     * @mongodb.driver.manual tutorial/query-documents/ Find
     */
    List<String> find(String collectionName, String filter, String projection, String options);
    /**
     * Finds all documents in the collection.
     *
     * @param filter      the query filter
     * @param resultClass the class to decode each document into
     * @param <TResult>   the target document type of the iterable.
     * @return the find iterable interface
     * @mongodb.driver.manual tutorial/query-documents/ Find
     */
//    <TResult> FindIterable<TResult> find(Bson filter, Class<TResult> resultClass);
    /**
     * Aggregates documents according to the specified aggregation pipeline.
     *
     * @param pipeline the aggregate pipeline
     * @return an iterable containing the result of the aggregation operation
     * @mongodb.driver.manual aggregation/ Aggregation
     * @mongodb.server.release 2.2
     */
//    List<String> aggregate(String collectionName, List<? extends String> pipeline);
    /**
     * Aggregates documents according to the specified aggregation pipeline.
     *
     * @param pipeline    the aggregate pipeline
     * @param resultClass the class to decode each document into
     * @param <TResult>   the target document type of the iterable.
     * @return an iterable containing the result of the aggregation operation
     * @mongodb.driver.manual aggregation/ Aggregation
     * @mongodb.server.release 2.2
     */
//    <TResult> AggregateIterable<TResult> aggregate(List<? extends Bson> pipeline, Class<TResult> resultClass);
    /**
     * Aggregates documents according to the specified map-reduce function.
     *
     * @param mapFunction    A JavaScript function that associates or "maps" a value with a key and emits the key and value pair.
     * @param reduceFunction A JavaScript function that "reduces" to a single object all the values associated with a particular key.
     * @return an iterable containing the result of the map-reduce operation
     * @mongodb.driver.manual reference/command/mapReduce/ map-reduce
     */
//    List<String> mapReduce(String collectionName, String mapFunction, String reduceFunction);
    /**
     * Aggregates documents according to the specified map-reduce function.
     *
     * @param mapFunction    A JavaScript function that associates or "maps" a value with a key and emits the key and value pair.
     * @param reduceFunction A JavaScript function that "reduces" to a single object all the values associated with a particular key.
     * @param resultClass    the class to decode each resulting document into.
     * @param <TResult>      the target document type of the iterable.
     * @return an iterable containing the result of the map-reduce operation
     * @mongodb.driver.manual reference/command/mapReduce/ map-reduce
     */
//    <TResult> MapReduceIterable<TResult> mapReduce(String mapFunction, String reduceFunction, Class<TResult> resultClass);
    /**
     * Executes a mix of inserts, updates, replaces, and deletes.
     *
     * @param requests the writes to execute
     * @return the result of the bulk write
     * @throws com.mongodb.MongoBulkWriteException if there's an exception in the bulk write operation
     * @throws com.mongodb.MongoException          if there's an exception running the operation
     */
//    BulkWriteResult bulkWrite(List<? extends WriteModel<? extends TDocument>> requests);
    /**
     * Executes a mix of inserts, updates, replaces, and deletes.
     *
     * @param requests the writes to execute
     * @param options  the options to apply to the bulk write operation
     * @return the result of the bulk write
     * @throws com.mongodb.MongoBulkWriteException if there's an exception in the bulk write operation
     * @throws com.mongodb.MongoException          if there's an exception running the operation
     */
//    BulkWriteResult bulkWrite(List<? extends WriteModel<? extends TDocument>> requests, BulkWriteOptions options);
    /**
     * Inserts the provided document. If the document is missing an identifier, the driver should generate one.
     *
     * @param document the document to insert
     * @throws com.mongodb.MongoWriteException        if the write failed due some other failure specific to the insert command
     * @throws com.mongodb.MongoWriteConcernException if the write failed due being unable to fulfil the write concern
     * @throws com.mongodb.MongoException             if the write failed due some other failure
     */
    void insertOne(String collectionName, String document);
    /**
     * Inserts one or more documents.  A call to this method is equivalent to a call to the {@code bulkWrite} method
     *
     * @param documents the documents to insert
     * @throws com.mongodb.MongoBulkWriteException if there's an exception in the bulk write operation
     * @throws com.mongodb.MongoException          if the write failed due some other failure
     * @see com.mongodb.client.MongoCollection#bulkWrite
     */
    void insertMany(String collectionName, List<String> documents);
    /**
     * Inserts one or more documents.  A call to this method is equivalent to a call to the {@code bulkWrite} method
     *
     * @param documents the documents to insert
     * @param options   the options to apply to the operation
     * @throws com.mongodb.DuplicateKeyException if the write failed to a duplicate unique key
     * @throws com.mongodb.WriteConcernException if the write failed due being unable to fulfil the write concern
     * @throws com.mongodb.MongoException        if the write failed due some other failure
     */
    void insertMany(String collectionName, List<String> documents, String options);
    /**
     * Removes at most one document from the collection that matches the given filter.  If no documents match, the collection is not
     * modified.
     *
     * @param filter the query filter to apply the the delete operation
     * @return the result of the remove one operation
     * @throws com.mongodb.MongoWriteException        if the write failed due some other failure specific to the delete command
     * @throws com.mongodb.MongoWriteConcernException if the write failed due being unable to fulfil the write concern
     * @throws com.mongodb.MongoException             if the write failed due some other failure
     */
    long deleteOne(String collectionName, String filter);
    /**
     * Removes all documents from the collection that match the given query filter.  If no documents match, the collection is not modified.
     *
     * @param filter the query filter to apply the the delete operation
     * @return the result of the remove many operation
     * @throws com.mongodb.MongoWriteException        if the write failed due some other failure specific to the delete command
     * @throws com.mongodb.MongoWriteConcernException if the write failed due being unable to fulfil the write concern
     * @throws com.mongodb.MongoException             if the write failed due some other failure
     */
    long deleteMany(String collectionName, String filter);
    /**
     * Replace a document in the collection according to the specified arguments.
     *
     * @param filter      the query filter to apply the the replace operation
     * @param replacement the replacement document
     * @return the result of the replace one operation
     * @throws com.mongodb.MongoWriteException        if the write failed due some other failure specific to the replace command
     * @throws com.mongodb.MongoWriteConcernException if the write failed due being unable to fulfil the write concern
     * @throws com.mongodb.MongoException             if the write failed due some other failure
     * @mongodb.driver.manual tutorial/modify-documents/#replace-the-document Replace
     */
    long replaceOne(String collectionName, String filter, String replacement);
    /**
     * Replace a document in the collection according to the specified arguments.
     *
     * @param filter        the query filter to apply the the replace operation
     * @param replacement   the replacement document
     * @param updateOptions the options to apply to the replace operation
     * @return the result of the replace one operation
     * @throws com.mongodb.MongoWriteException        if the write failed due some other failure specific to the replace command
     * @throws com.mongodb.MongoWriteConcernException if the write failed due being unable to fulfil the write concern
     * @throws com.mongodb.MongoException             if the write failed due some other failure
     * @mongodb.driver.manual tutorial/modify-documents/#replace-the-document Replace
     */
    long replaceOne(String collectionName, String filter, String replacement, String updateOptions);
    /**
     * Update a single document in the collection according to the specified arguments.
     *
     * @param filter a document describing the query filter, which may not be null.
     * @param update a document describing the update, which may not be null. The update to apply must include only update operators.
     * @return the result of the update one operation
     * @throws com.mongodb.MongoWriteException        if the write failed due some other failure specific to the update command
     * @throws com.mongodb.MongoWriteConcernException if the write failed due being unable to fulfil the write concern
     * @throws com.mongodb.MongoException             if the write failed due some other failure
     * @mongodb.driver.manual tutorial/modify-documents/ Updates
     * @mongodb.driver.manual reference/operator/update/ Update Operators
     */
    long updateOne(String collectionName, String filter, String update);
    /**
     * Update a single document in the collection according to the specified arguments.
     *
     * @param filter        a document describing the query filter, which may not be null.
     * @param update        a document describing the update, which may not be null. The update to apply must include only update operators.
     * @param updateOptions the options to apply to the update operation
     * @return the result of the update one operation
     * @throws com.mongodb.MongoWriteException        if the write failed due some other failure specific to the update command
     * @throws com.mongodb.MongoWriteConcernException if the write failed due being unable to fulfil the write concern
     * @throws com.mongodb.MongoException             if the write failed due some other failure
     * @mongodb.driver.manual tutorial/modify-documents/ Updates
     * @mongodb.driver.manual reference/operator/update/ Update Operators
     */
    long updateOne(String collectionName, String filter, String update, String updateOptions);
    /**
     * Update all documents in the collection according to the specified arguments.
     *
     * @param filter a document describing the query filter, which may not be null.
     * @param update a document describing the update, which may not be null. The update to apply must include only update operators.
     * @return the result of the update one operation
     * @throws com.mongodb.MongoWriteException        if the write failed due some other failure specific to the update command
     * @throws com.mongodb.MongoWriteConcernException if the write failed due being unable to fulfil the write concern
     * @throws com.mongodb.MongoException             if the write failed due some other failure
     * @mongodb.driver.manual tutorial/modify-documents/ Updates
     * @mongodb.driver.manual reference/operator/update/ Update Operators
     */
    long updateMany(String collectionName, String filter, String update);
    /**
     * Update all documents in the collection according to the specified arguments.
     *
     * @param filter        a document describing the query filter, which may not be null.
     * @param update        a document describing the update, which may not be null. The update to apply must include only update operators.
     * @param updateOptions the options to apply to the update operation
     * @return the result of the update one operation
     * @throws com.mongodb.MongoWriteException        if the write failed due some other failure specific to the update command
     * @throws com.mongodb.MongoWriteConcernException if the write failed due being unable to fulfil the write concern
     * @throws com.mongodb.MongoException             if the write failed due some other failure
     * @mongodb.driver.manual tutorial/modify-documents/ Updates
     * @mongodb.driver.manual reference/operator/update/ Update Operators
     */
    long updateMany(String collectionName, String filter, String update, String updateOptions);
    /**
     * Atomically find a document and remove it.
     *
     * @param filter the query filter to find the document with
     * @return the document that was removed.  If no documents matched the query filter, then null will be returned
     */
    String findOneAndDelete(String collectionName, String filter);
    /**
     * Atomically find a document and remove it.
     *
     * @param filter  the query filter to find the document with
     * @param options the options to apply to the operation
     * @return the document that was removed.  If no documents matched the query filter, then null will be returned
     */
    String findOneAndDelete(String collectionName, String filter, String options);
    /**
     * Atomically find a document and replace it.
     *
     * @param filter      the query filter to apply the the replace operation
     * @param replacement the replacement document
     * @return the document that was replaced.  Depending on the value of the {@code returnOriginal} property, this will either be the
     * document as it was before the update or as it is after the update.  If no documents matched the query filter, then null will be
     * returned
     */
    String findOneAndReplace(String collectionName, String filter, String replacement);
    /**
     * Atomically find a document and replace it.
     *
     * @param filter      the query filter to apply the the replace operation
     * @param replacement the replacement document
     * @param options     the options to apply to the operation
     * @return the document that was replaced.  Depending on the value of the {@code returnOriginal} property, this will either be the
     * document as it was before the update or as it is after the update.  If no documents matched the query filter, then null will be
     * returned
     */
    String findOneAndReplace(String collectionName, String filter, String replacement, String options);
    /**
     * Atomically find a document and update it.
     *
     * @param filter a document describing the query filter, which may not be null.
     * @param update a document describing the update, which may not be null. The update to apply must include only update operators.
     * @return the document that was updated before the update was applied.  If no documents matched the query filter, then null will be
     * returned
     */
    String findOneAndUpdate(String collectionName, String filter, String update);
    /**
     * Atomically find a document and update it.
     *
     * @param filter  a document describing the query filter, which may not be null.
     * @param update  a document describing the update, which may not be null. The update to apply must include only update operators.
     * @param options the options to apply to the operation
     * @return the document that was updated.  Depending on the value of the {@code returnOriginal} property, this will either be the
     * document as it was before the update or as it is after the update.  If no documents matched the query filter, then null will be
     * returned
     */
    String findOneAndUpdate(String collectionName, String filter, String update, String options);
}

+ 172 - 0
Hos-resource/src/main/java/com/yihu/ehr/common/mongo/MongoDB.java

@ -0,0 +1,172 @@
package com.yihu.ehr.common.mongo;
import com.mongodb.client.MongoCollection;
import org.bson.Document;
import java.util.List;
/**
 * 提供对MongoDB的封装,减化对Mongo使用.
 * 主要就是减少层级关系,过滤掉资源释放等处理。
 * 注意:
 * 部分接口为了保持高效的情况,建议还是使用原生驱动。
 * 可以用getCollection取到原生MongoCollection<Document>
 * usage:
 * 1.使用MongoDBKit.addConfig()
 * 2.使用MongoDBKit.start();
 * 3.使用MongoDB做查询
 * 4.使用use()切换连接查询
 *
 * @created Airhead 2016/2/17.
 */
public class MongoDB {
    private static MongoDBPro mongoDBPro;
    static void init() {
        mongoDBPro = MongoDBPro.use();
    }
    public static MongoDBPro use(String configName) {
        return MongoDBPro.use(configName);
    }
    public static MongoDBPro db(String databaseName) {
        return mongoDBPro.db(databaseName);
    }
    public static long count(String collectionName) {
        return mongoDBPro.count(collectionName);
    }
    public static long count(String collectionName, String filter) {
        return mongoDBPro.count(collectionName, filter);
    }
    public static long count(String collectionName, String filter, String options) {
        return mongoDBPro.count(collectionName, filter, options);
    }
    public static List<String> find(String collectionName) {
        return mongoDBPro.find(collectionName);
    }
    public static List<String> find(String collectionName, String filter) {
        return mongoDBPro.find(collectionName, filter);
    }
    public static List<String> find(String collectionName, String filter, String projection) {
        return mongoDBPro.find(collectionName, filter, projection);
    }
    public static List<String> find(String collectionName, String filter, String projection, String options) {
        return mongoDBPro.find(collectionName, filter, projection, options);
    }
    public static void insertOne(String collectionName, String document) {
        mongoDBPro.insertOne(collectionName, document);
    }
    public static void insertMany(String collectionName, List<String> documents) {
        mongoDBPro.insertMany(collectionName, documents);
    }
    public static void insertMany(String collectionName, List<String> documents, String options) {
        mongoDBPro.insertMany(collectionName, documents, options);
    }
    public static long deleteOne(String collectionName, String filter) {
        return mongoDBPro.deleteOne(collectionName, filter);
    }
    public static long deleteMany(String collectionName, String filter) {
        return mongoDBPro.deleteMany(collectionName, filter);
    }
    public static long replaceOne(String collectionName, String filter, String replacement) {
        return mongoDBPro.replaceOne(collectionName, filter, replacement);
    }
    public static long replaceOne(String collectionName, String filter, String replacement, String updateOptions) {
        return mongoDBPro.replaceOne(collectionName, filter, replacement, updateOptions);
    }
    public static long updateOne(String collectionName, String filter, String update) {
        return mongoDBPro.replaceOne(collectionName, filter, update);
    }
    public static long updateOne(String collectionName, String filter, String update, String updateOptions) {
        return mongoDBPro.replaceOne(collectionName, filter, update, updateOptions);
    }
    public static long updateMany(String collectionName, String filter, String update) {
        return mongoDBPro.updateMany(collectionName, filter, update);
    }
    public static long updateMany(String collectionName, String filter, String update, String updateOptions) {
        return mongoDBPro.updateMany(collectionName, filter, update, updateOptions);
    }
    public static String findOneAndDelete(String collectionName, String filter) {
        return mongoDBPro.findOneAndDelete(collectionName, filter);
    }
    public static String findOneAndDelete(String collectionName, String filter, String options) {
        return mongoDBPro.findOneAndDelete(collectionName, filter, options);
    }
    public static String findOneAndReplace(String collectionName, String filter, String replacement) {
        return mongoDBPro.findOneAndReplace(collectionName, filter, replacement);
    }
    public static String findOneAndReplace(String collectionName, String filter, String replacement, String options) {
        return mongoDBPro.findOneAndReplace(collectionName, filter, replacement, options);
    }
    public static String findOneAndUpdate(String collectionName, String filter, String update, String options) {
        return mongoDBPro.findOneAndUpdate(collectionName, filter, update, options);
    }
    public static String findOneAndUpdate(String collectionName, String filter, String update) {
        return mongoDBPro.findOneAndUpdate(collectionName, filter, update);
    }
    public static void drop(String collectionName) {
        mongoDBPro.drop(collectionName);
    }
    public static String createIndex(String collectionName, String keys) {
        return mongoDBPro.createIndex(collectionName, keys);
    }
    public static String createIndex(String collectionName, String keys, String indexOptions) {
        return mongoDBPro.createIndex(collectionName, keys, indexOptions);
    }
    public static List<String> listIndexes(String collectionName) {
        return mongoDBPro.listIndexes(collectionName);
    }
    public static void dropIndex(String collectionName, String indexName) {
        mongoDBPro.dropIndex(collectionName, indexName);
    }
    public static void dropIndexes(String collectionName) {
        mongoDBPro.dropIndexes(collectionName);
    }
    public static void renameCollection(String collectionName, String newCollectionName) {
        mongoDBPro.renameCollection(collectionName, newCollectionName);
    }
    public static void renameCollection(String collectionName, String newCollectionName, String renameCollectionOptions) {
        mongoDBPro.renameCollection(collectionName, newCollectionName, renameCollectionOptions);
    }
    public static MongoCollection<Document> getCollection(String collectionName) {
        return mongoDBPro.getCollection(collectionName);
    }
    public static List<String> listCollectionNames(){
        return mongoDBPro.listCollectionNames();
    }
}

+ 65 - 0
Hos-resource/src/main/java/com/yihu/ehr/common/mongo/MongoDBConfig.java

@ -0,0 +1,65 @@
package com.yihu.ehr.common.mongo;
import com.mongodb.MongoClient;
import com.mongodb.MongoClientOptions;
import com.mongodb.MongoClientURI;
import com.mongodb.client.MongoDatabase;
/**
 * @created Airhead 2016/2/17.
 */
public class MongoDBConfig {
    String name;
    String uri;
    String defaultDatabaseName;
    static MongoClient mongoClient;
    MongoClientOptions mongoClientOptions;      //暂未使用,Mongo默认配置
    MongoDatabase mongoDatabase;
    public MongoDBConfig(String uri) {
        this.name = MongoDBKit.MAIN_CONFIG_NAME;
        this.uri = uri;
        this.defaultDatabaseName = MongoDBKit.DEFAULT_DB_NAME;
    }
    public MongoDBConfig(String name, String uri) {
        this.name = name;
        this.uri = uri;
        this.defaultDatabaseName = MongoDBKit.DEFAULT_DB_NAME;
    }
    public MongoDBConfig(String name, String uri, String databaseName) {
        this.name = name;
        this.uri = uri;
        this.defaultDatabaseName = databaseName;
    }
    public String getName() {
        return name;
    }
    public MongoDatabase getDatabase(String databaseName) {
        if (mongoClient == null) {
            MongoClientURI mongoClientURI = new MongoClientURI(uri);
            mongoClient = new MongoClient(mongoClientURI);
        }
        if (mongoDatabase != null) {
            if (mongoDatabase.getName().equals(databaseName)) {
                return mongoDatabase;
            }
        }
        mongoDatabase = mongoClient.getDatabase(databaseName);
        return mongoDatabase;
    }
    public MongoDatabase getDatabase() {
        if (mongoDatabase != null) {
            return mongoDatabase;
        }
        return getDatabase(defaultDatabaseName);
    }
}

+ 57 - 0
Hos-resource/src/main/java/com/yihu/ehr/common/mongo/MongoDBKit.java

@ -0,0 +1,57 @@
package com.yihu.ehr.common.mongo;
import java.util.HashMap;
import java.util.Map;
/**
 * @created Airhead 2016/2/17.
 */
public class MongoDBKit {
    public static final String MAIN_CONFIG_NAME = "main";
    public static final String DEFAULT_DB_NAME = "test";
    static MongoDBConfig config = null;
    private static Map<String, MongoDBConfig> configNameToConfig = new HashMap<>();
    public static void start() {
        MongoDB.init();
    }
    public static MongoDBConfig getConfig() {
        return config;
    }
    public static MongoDBConfig getConfig(String configName) {
        return configNameToConfig.get(configName);
    }
    /**
     * Add Config object
     *
     * @param config the Config contains Mongodb uri and MongoClientOptions etc.
     */
    public static void addConfig(MongoDBConfig config) {
        if (config == null) {
            throw new IllegalArgumentException("Config can not be null");
        }
        if (configNameToConfig.containsKey(config.getName())) {
            throw new IllegalArgumentException("Config already exists: " + config.getName());
        }
        configNameToConfig.put(config.getName(), config);
        /**
         * Replace the main config if current config name is MAIN_CONFIG_NAME
         */
        if (MAIN_CONFIG_NAME.equals(config.getName())) {
            MongoDBKit.config = config;
        }
        /**
         * The configName may not be MAIN_CONFIG_NAME,
         * the main config have to set the first comming Config if it is null
         */
        if (MongoDBKit.config == null) {
            MongoDBKit.config = config;
        }
    }
}

+ 92 - 0
Hos-resource/src/main/java/com/yihu/ehr/common/mongo/MongoDBOperator.java

@ -0,0 +1,92 @@
package com.yihu.ehr.common.mongo;
/**
 * Query Selectors
 * $eq	Matches values that are equal to a specified value.
 * $gt	Matches values that are greater than a specified value.
 * $gte	Matches values that are greater than or equal to a specified value.
 * $lt	Matches values that are less than a specified value.
 * $lte	Matches values that are less than or equal to a specified value.
 * $ne	Matches all values that are not equal to a specified value.
 * $in	Matches any of the values specified in an array.
 * $nin	Matches none of the values specified in an array.
 *
 * Logical
 * $or	Joins query clauses with a logical OR returns all documents that match the conditions of either clause.
 * $and	Joins query clauses with a logical AND returns all documents that match the conditions of both clauses.
 * $not	Inverts the effect of a query expression and returns documents that do not match the query expression.
 * $nor	Joins query clauses with a logical NOR returns all documents that fail to match both clauses.
 *
 * Element
 * $exists	Matches documents that have the specified field.
 * $type	Selects documents if a field is of the specified type.
 *
 * Evaluation
 * $mod	Performs a modulo operation on the value of a field and selects documents with a specified result.
 * $regex	Selects documents where values match a specified regular expression.
 * $text	Performs text search.
 * $where	Matches documents that satisfy a JavaScript expression.
 *
 * Geospatial
 * $geoWithin	Selects geometries within a bounding GeoJSON geometry. The 2dsphere and 2d indexes support $geoWithin.
 * $geoIntersects	Selects geometries that intersect with a GeoJSON geometry. The 2dsphere index supports $geoIntersects.
 * $near	Returns geospatial objects in proximity to a point. Requires a geospatial index. The 2dsphere and 2d indexes support $near.
 * $nearSphere	Returns geospatial objects in proximity to a point on a sphere. Requires a geospatial index. The 2dsphere and 2d indexes support $nearSphere.
 *
 * Array
 * $all	Matches arrays that contain all elements specified in the query.
 * $elemMatch	Selects documents if element in the array field matches all the specified $elemMatch conditions.
 * $size	Selects documents if the array field is a specified size.
 *
 * Bitwise
 * $bitsAllSet	Matches numeric or binary values in which a set of bit positions all have a value of 1.
 * $bitsAnySet	Matches numeric or binary values in which any bit from a set of bit positions has a value of 1.
 * $bitsAllClear	Matches numeric or binary values in which a set of bit positions all have a value of 0.
 * $bitsAnyClear	Matches numeric or binary values in which any bit from a set of bit positions has a value of 0.
 *
 * Comments
 * $comment	Adds a comment to a query predicate.
 *
 * Projection Operators
 * $	Projects the first element in an array that matches the query condition.
 * $elemMatch	Projects the first element in an array that matches the specified $elemMatch condition.
 * $meta	Projects the document’s score assigned during $text operation.
 * $slice	Limits the number of elements projected from an array. Supports skip and limit slices.
 *
 * Update Operators
 * $inc	Increments the value of the field by the specified amount.
 * $mul	Multiplies the value of the field by the specified amount.
 * $rename	Renames a field.
 * $setOnInsert	Sets the value of a field if an update results in an insert of a document. Has no effect on update operations that modify existing documents.
 * $set	Sets the value of a field in a document.
 * $unset	Removes the specified field from a document.
 * $min	Only updates the field if the specified value is less than the existing field value.
 * $max	Only updates the field if the specified value is greater than the existing field value.
 * $currentDate	Sets the value of a field to current date, either as a Date or a Timestamp.
 *
 * Array
 * $	Acts as a placeholder to update the first element that matches the query condition in an update.
 * $addToSet	Adds elements to an array only if they do not already exist in the set.
 * $pop	Removes the first or last item of an array.
 * $pullAll	Removes all matching values from an array.
 * $pull	Removes all array elements that match a specified query.
 * $pushAll	Deprecated. Adds several items to an array.
 * $push	Adds an item to an array.
 *
 * Modifiers
 * $each	Modifies the $push and $addToSet operators to append multiple items for array updates.
 * $slice	Modifies the $push operator to limit the size of updated arrays.
 * $sort	Modifies the $push operator to reorder documents stored in an array.
 * $position	Modifies the $push operator to specify the position in the array to add elements.
 *
 * Bitwise
 * $bit	Performs bitwise AND, OR, and XOR updates of integer values.
 *
 * Isolation
 * $isolated	Modifies the behavior of a write operation to increase the isolation of the operation.
 * @created Airhead 2016/2/17.
 */
public class MongoDBOperator {
}

+ 566 - 0
Hos-resource/src/main/java/com/yihu/ehr/common/mongo/MongoDBPro.java

@ -0,0 +1,566 @@
package com.yihu.ehr.common.mongo;
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.mongodb.MongoNamespace;
import com.mongodb.client.*;
import com.mongodb.client.model.*;
import com.mongodb.client.result.DeleteResult;
import com.mongodb.client.result.UpdateResult;
import org.apache.commons.lang3.StringUtils;
import org.bson.Document;
import java.io.IOException;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
/**
 * MongoDBPro. Professional database CURD and Manager tool.
 *
 * @created Airhead 2016/2/17.
 */
public class MongoDBPro implements IMongoDBRunner, IMongoDBAdminer {
    private static final Map<String, MongoDBPro> map = new HashMap<String, MongoDBPro>();
    private final MongoDBConfig config;
    public MongoDBPro() {
        if (MongoDBKit.config == null) {
            throw new RuntimeException("The main config is null, initialize MonogDBKit first");
        }
        this.config = MongoDBKit.config;
    }
    public MongoDBPro(String configName) {
        this.config = MongoDBKit.getConfig(configName);
        if (this.config == null) {
            throw new IllegalArgumentException("Config not found by configName: " + configName);
        }
    }
    public static MongoDBPro use() {
        return use(MongoDBKit.config.name);
    }
    public static MongoDBPro use(String configName) {
        MongoDBPro result = map.get(configName);
        if (result == null) {
            result = new MongoDBPro(configName);
            map.put(configName, result);
        }
        return result;
    }
    public MongoDBPro db(String databaseName) {
        config.getDatabase(databaseName);
        return this;
    }
    @Override
    public long count(String collectionName) {
        return count(collectionName, null);
    }
    @Override
    public long count(String collectionName, String filter) {
        return count(collectionName, filter, null);
    }
    /**
     * @param collectionName
     * @param filter         the query filter
     * @param options        the options describing the count
     *                       <p>
     *                       {
     *                       limit: <integer>,
     *                       skip: <integer>,
     *                       hint: <hint>
     *                       }
     * @return
     */
    @Override
    public long count(String collectionName, String filter, String options) {
        MongoCollection<Document> collection = getCollection(collectionName);
        Document filterDocument = new Document();
        if (filter != null) {
            filterDocument = Document.parse(filter);
        }
        CountOptions countOptions = new CountOptions();
        if (options != null) {
            ObjectMapper mapper = new ObjectMapper();
            try {
                JsonNode rootNode = mapper.readValue(options, JsonNode.class);
                String hintString = rootNode.path("hint").toString();
                if (!StringUtils.isEmpty(hintString)) {
                    Document hint = Document.parse(hintString);
                    countOptions.hint(hint);
                } else {
                    countOptions.hint(new Document());
                }
                countOptions.limit(rootNode.path("limit").asInt());
                countOptions.skip(rootNode.path("skip").asInt());
            } catch (IOException e) {
                e.printStackTrace();
            }
        }
        return collection.count(filterDocument, countOptions);
    }
    @Override
    public List<String> find(String collectionName) {
        return find(collectionName, null);
    }
    @Override
    public List<String> find(String collectionName, String filter) {
        return find(collectionName, filter, null);
    }
    @Override
    public List<String> find(String collectionName, String filter, String projection) {
        return find(collectionName, filter, projection, null);
    }
    @Override
    public List<String> find(String collectionName, String filter, String projection, String options) {
        MongoCollection<Document> collection = getCollection(collectionName);
        Document filterDocument = new Document();
        if (filter != null) {
            filterDocument = Document.parse(filter);
        }
        Document projectionDocument = new Document();
        if (projection != null) {
            projectionDocument = Document.parse(projection);
        }
        FindIterable<Document> documents = collection.find(filterDocument).projection(projectionDocument);
        List<String> list = new ArrayList<>();
        try (MongoCursor<Document> cursor = documents.iterator()) {
            while (cursor.hasNext()) {
                Document doc = cursor.next();
                list.add(doc.toJson());
            }
        }
        return list;
    }
//    @Override
//    public List<String> aggregate(String collectionName, List<? extends String> pipeline) {
//        return null;
//    }
//    @Override
//    public List<String> mapReduce(String collectionName, String mapFunction, String reduceFunction) {
//        return null;
//    }
    @Override
    public void insertOne(String collectionName, String document) {
        MongoCollection<Document> collection = getCollection(collectionName);
        Document doc = Document.parse(document);
        collection.insertOne(doc);
    }
    @Override
    public void insertMany(String collectionName, List<String> documents) {
        insertMany(collectionName, documents, null);
    }
    /**
     * @param collectionName
     * @param documents      the documents to insert
     * @param options        the options to apply to the operation
     *                       {
     *                       orderd:<orderd>
     *                       }
     */
    @Override
    public void insertMany(String collectionName, List<String> documents, String options) {
        MongoCollection<Document> collection = getCollection(collectionName);
        List<Document> list = new ArrayList<>();
        for (String document : documents) {
            Document doc = Document.parse(document);
            list.add(doc);
        }
        InsertManyOptions insertManyOptions = new InsertManyOptions();
        if (options != null) {
            ObjectMapper mapper = new ObjectMapper();
            try {
                JsonNode rootNode = mapper.readValue(options, JsonNode.class);
                insertManyOptions.ordered(rootNode.path("ordered").asBoolean());
            } catch (IOException e) {
                e.printStackTrace();
            }
        }
        collection.insertMany(list, insertManyOptions);
    }
    @Override
    public long deleteOne(String collectionName, String filter) {
        MongoCollection<Document> collection = getCollection(collectionName);
        Document filterDocument = Document.parse(filter);
        DeleteResult deleteResult = collection.deleteOne(filterDocument);
        return deleteResult.getDeletedCount();
    }
    @Override
    public long deleteMany(String collectionName, String filter) {
        MongoCollection<Document> collection = getCollection(collectionName);
        Document filterDocument = Document.parse(filter);
        DeleteResult deleteResult = collection.deleteMany(filterDocument);
        return deleteResult.getDeletedCount();
    }
    @Override
    public long replaceOne(String collectionName, String filter, String replacement) {
        return replaceOne(collectionName, filter, replacement, null);
    }
    /**
     * @param collectionName
     * @param filter         the query filter to apply the the replace operation
     * @param replacement    the replacement document
     * @param updateOptions  the options to apply to the replace operation
     *                       {
     *                       upsert:<upsert>
     *                       }
     * @return
     */
    @Override
    public long replaceOne(String collectionName, String filter, String replacement, String updateOptions) {
        MongoCollection<Document> collection = getCollection(collectionName);
        Document filterDocument = Document.parse(filter);
        Document document = Document.parse(replacement);
        UpdateOptions options = new UpdateOptions();
        if (updateOptions != null) {
            ObjectMapper mapper = new ObjectMapper();
            try {
                JsonNode rootNode = mapper.readValue(updateOptions, JsonNode.class);
                options.upsert(rootNode.path("upsert").asBoolean());
            } catch (IOException e) {
                e.printStackTrace();
            }
        }
        UpdateResult updateResult = collection.replaceOne(filterDocument, document, options);
        return updateResult.getModifiedCount();
    }
    @Override
    public long updateOne(String collectionName, String filter, String update) {
        return updateOne(collectionName, filter, update, null);
    }
    /**
     * @param collectionName
     * @param filter         a document describing the query filter, which may not be null.
     * @param update         a document describing the update, which may not be null. The update to apply must include only update operators.
     * @param updateOptions  the options to apply to the update operation
     *                       {
     *                       upsert:<upsert>
     *                       }
     * @return
     */
    @Override
    public long updateOne(String collectionName, String filter, String update, String updateOptions) {
        MongoCollection<Document> collection = getCollection(collectionName);
        Document filterDocument = Document.parse(filter);
        Document document = Document.parse(update);
        UpdateOptions options = new UpdateOptions();
        if (updateOptions != null) {
            ObjectMapper mapper = new ObjectMapper();
            try {
                JsonNode rootNode = mapper.readValue(updateOptions, JsonNode.class);
                options.upsert(rootNode.path("upsert").asBoolean());
            } catch (IOException e) {
                e.printStackTrace();
            }
        }
        UpdateResult updateResult = collection.updateOne(filterDocument, document, options);
        return updateResult.getModifiedCount();
    }
    @Override
    public long updateMany(String collectionName, String filter, String update) {
        return updateMany(collectionName, filter, update, null);
    }
    /**
     * @param collectionName
     * @param filter         a document describing the query filter, which may not be null.
     * @param update         a document describing the update, which may not be null. The update to apply must include only update operators.
     * @param updateOptions  the options to apply to the update operation
     *                       {
     *                       upsert:<upsert>
     *                       }
     * @return
     */
    @Override
    public long updateMany(String collectionName, String filter, String update, String updateOptions) {
        MongoCollection<Document> collection = getCollection(collectionName);
        Document filterDocument = Document.parse(filter);
        Document document = Document.parse(update);
        UpdateOptions options = new UpdateOptions();
        if (updateOptions != null) {
            ObjectMapper mapper = new ObjectMapper();
            try {
                JsonNode rootNode = mapper.readValue(updateOptions, JsonNode.class);
                options.upsert(rootNode.path("upsert").asBoolean());
            } catch (IOException e) {
                e.printStackTrace();
            }
        }
        UpdateResult updateResult = collection.updateMany(filterDocument, document, options);
        return updateResult.getModifiedCount();
    }
    @Override
    public String findOneAndDelete(String collectionName, String filter) {
        return findOneAndDelete(collectionName, filter, null);
    }
    /**
     * @param collectionName
     * @param filter         the query filter to find the document with
     * @param options        the options to apply to the operation
     *                       {
     *                       projection:<document>,
     *                       sort:<document>
     *                       }
     * @return
     */
    @Override
    public String findOneAndDelete(String collectionName, String filter, String options) {
        MongoCollection<Document> collection = getCollection(collectionName);
        Document filterDocument = Document.parse(filter);
        FindOneAndDeleteOptions findOneAndDeleteOptions = new FindOneAndDeleteOptions();
        if (options != null) {
            ObjectMapper mapper = new ObjectMapper();
            try {
                JsonNode rootNode = mapper.readValue(options, JsonNode.class);
                String projection = rootNode.path("projection").toString();
                Document projectionDoc = new Document();
                if (!StringUtils.isEmpty(projection)) {
                    Document.parse(projection);
                }
                String sort = rootNode.path("sort").toString();
                Document sortDoc = new Document();
                if (!StringUtils.isEmpty(sort)) {
                    Document.parse(sort);
                }
                findOneAndDeleteOptions.projection(projectionDoc);
                findOneAndDeleteOptions.sort(sortDoc);
            } catch (IOException e) {
                e.printStackTrace();
            }
        }
        Document document = collection.findOneAndDelete(filterDocument, findOneAndDeleteOptions);
        return document == null ? "{}" : document.toJson();
    }
    @Override
    public String findOneAndReplace(String collectionName, String filter, String replacement) {
        return findOneAndReplace(collectionName, filter, replacement, null);
    }
    /**
     * @param collectionName
     * @param filter         the query filter to apply the the replace operation
     * @param replacement    the replacement document
     * @param options        the options to apply to the operation
     *                       {
     *                       projection:<document>,
     *                       sort:<document>,
     *                       upsert:<upsert>
     *                       }
     * @return
     */
    @Override
    public String findOneAndReplace(String collectionName, String filter, String replacement, String options) {
        MongoCollection<Document> collection = getCollection(collectionName);
        Document filterDocument = Document.parse(filter);
        Document replacementDocument = Document.parse(replacement);
        FindOneAndReplaceOptions findOneAndReplaceOptions = new FindOneAndReplaceOptions();
        if (options != null) {
            ObjectMapper mapper = new ObjectMapper();
            try {
                JsonNode rootNode = mapper.readValue(options, JsonNode.class);
                String projection = rootNode.path("projection").toString();
                Document projectionDoc = new Document();
                if (!StringUtils.isEmpty(projection)) {
                    Document.parse(projection);
                }
                String sort = rootNode.path("sort").toString();
                Document sortDoc = new Document();
                if (!StringUtils.isEmpty(sort)) {
                    Document.parse(sort);
                }
                findOneAndReplaceOptions.projection(projectionDoc);
                findOneAndReplaceOptions.sort(sortDoc);
                findOneAndReplaceOptions.upsert(rootNode.path("upsert").asBoolean());
            } catch (IOException e) {
                e.printStackTrace();
            }
        }
        Document document = collection.findOneAndReplace(filterDocument, replacementDocument, findOneAndReplaceOptions);
        return document == null ? "{}" : document.toJson();
    }
    @Override
    public String findOneAndUpdate(String collectionName, String filter, String update) {
        return findOneAndUpdate(collectionName, filter, update, null);
    }
    /**
     * @param collectionName
     * @param filter         a document describing the query filter, which may not be null.
     * @param update         a document describing the update, which may not be null. The update to apply must include only update operators.
     * @param options        the options to apply to the operation
     *                       {
     *                       projection:<document>,
     *                       sort:<document>,
     *                       upsert:<upsert>
     *                       }
     * @return
     */
    @Override
    public String findOneAndUpdate(String collectionName, String filter, String update, String options) {
        MongoCollection<Document> collection = getCollection(collectionName);
        Document filterDocument = Document.parse(filter);
        Document updateDocument = Document.parse(update);
        FindOneAndUpdateOptions findOneAndUpdateOptions = new FindOneAndUpdateOptions();
        if (options != null) {
            ObjectMapper mapper = new ObjectMapper();
            try {
                JsonNode rootNode = mapper.readValue(options, JsonNode.class);
                String projection = rootNode.path("projection").asText();
                Document projectionDoc = Document.parse(projection);
                String sort = rootNode.path("sort").asText();
                Document sortDoc = Document.parse(sort);
                findOneAndUpdateOptions.projection(projectionDoc);
                findOneAndUpdateOptions.sort(sortDoc);
                findOneAndUpdateOptions.upsert(rootNode.path("upsert").asBoolean());
            } catch (IOException e) {
                e.printStackTrace();
            }
        }
        Document document = collection.findOneAndUpdate(filterDocument, updateDocument, findOneAndUpdateOptions);
        return document.toJson();
    }
    @Override
    public void drop(String collectionName) {
        getCollection(collectionName).drop();
    }
    @Override
    public String createIndex(String collectionName, String keys) {
        MongoCollection<Document> collection = getCollection(collectionName);
        Document keysDocument = Document.parse(keys);
        return collection.createIndex(keysDocument);
    }
    @Override
    public String createIndex(String collectionName, String keys, String indexOptions) {
        MongoCollection<Document> collection = getCollection(collectionName);
        Document keysDocument = Document.parse(keys);
        IndexOptions options = new IndexOptions();
//TODO:解析indexOptions
//        try {
//            ObjectMapper mapper = new ObjectMapper();
//            JsonNode rootNode = mapper.readValue(indexOptions, JsonNode.class);
//
//
//        } catch (IOException e) {
//            e.printStackTrace();
//        }
        return collection.createIndex(keysDocument, options);
    }
    @Override
    public List<String> listIndexes(String collectionName) {
        MongoCollection<Document> collection = getCollection(collectionName);
        ListIndexesIterable<Document> indexes = collection.listIndexes();
        List<String> list = new ArrayList<>();
        try (MongoCursor<Document> cursor = indexes.iterator()) {
            while (cursor.hasNext()) {
                Document doc = cursor.next();
                list.add(doc.toJson());
            }
        }
        return list;
    }
    @Override
    public void dropIndex(String collectionName, String indexName) {
        getCollection(collectionName).dropIndex(indexName);
    }
    @Override
    public void dropIndexes(String collectionName) {
        getCollection(collectionName).dropIndexes();
    }
    @Override
    public void renameCollection(String collectionName, String newCollectionName) {
        MongoCollection<Document> collection = getCollection(collectionName);
        MongoNamespace namespace = collection.getNamespace();
        collection.renameCollection(new MongoNamespace(namespace.getDatabaseName(), newCollectionName));
    }
    @Override
    public void renameCollection(String collectionName, String newCollectionName, String renameCollectionOptions) {
        MongoCollection<Document> collection = getCollection(collectionName);
        MongoNamespace namespace = collection.getNamespace();
        RenameCollectionOptions options = new RenameCollectionOptions();
        try {
            ObjectMapper mapper = new ObjectMapper();
            JsonNode rootNode = mapper.readValue(renameCollectionOptions, JsonNode.class);
            options.dropTarget(rootNode.path("dropTarget").asBoolean());
        } catch (IOException e) {
            e.printStackTrace();
        }
        collection.renameCollection(new MongoNamespace(namespace.getDatabaseName(), newCollectionName), options);
    }
    public MongoCollection<Document> getCollection(String collectionName) {
        MongoDatabase database = config.getDatabase();
        return database.getCollection(collectionName);
    }
    public List<String> listCollectionNames() {
        MongoDatabase database = config.getDatabase();
        MongoIterable<String> listCollectionNames = database.listCollectionNames();
        List<String> list = new ArrayList<>();
        for (String collectionName : listCollectionNames) {
            list.add(collectionName);
        }
        return list;
    }
}

+ 95 - 0
Hos-resource/src/main/java/com/yihu/ehr/crawler/format/AdapterBase.java

@ -0,0 +1,95 @@
package com.yihu.ehr.crawler.format;
import com.yihu.ehr.framework.util.springutil.SpringBeanUtil;
import com.yihu.ehr.standard.model.adapter.AdapterSchemeVersionModel;
import com.yihu.ehr.standard.service.adapter.*;
import com.yihu.ehr.standard.service.bo.AdapterVersion;
import com.yihu.ehr.standard.service.standard.StdDictEntryService;
import com.yihu.ehr.standard.service.standard.StdDictService;
import javax.annotation.Resource;
/**
 * 适配器基类,DAO缓存
 *
 * @created Created by Air on 2015/6/10.
 */
public class AdapterBase {
    @Resource(name = AdapterDatasetService.BEAN_ID)
    private AdapterDatasetService adapterDatasetService;
    @Resource(name = AdapterSchemeVersionService.BEAN_ID)
    private AdapterSchemeVersionService adapterSchemeVersionService;
    @Resource(name = AdapterMetadataService.BEAN_ID)
    private AdapterMetadataService adapterMetadataService;
    @Resource(name = AdapterDictEntryService.BEAN_ID)
    private AdapterDictEntryService adapterDictEntryService;
    @Resource(name = AdapterDictService.BEAN_ID)
    private AdapterDictService adapterDictService;
    @Resource(name =StdDictEntryService.BEAN_ID )
    private StdDictEntryService stdDictEntryService;
    @Resource(name =StdDictService.BEAN_ID )
    private StdDictService stdDictService;
    protected AdapterVersion adapterVersion;
    public AdapterVersion getAdapterVersion() {
        AdapterSchemeVersionModel adapterSchemeVersionModel = getAdapterSchemeVersionService().getEhrAdapterVersionLasted();
        if (adapterSchemeVersionModel == null) {
            adapterSchemeVersionModel=new AdapterSchemeVersionModel();
        }
        String version = adapterSchemeVersionModel.getVersion();
         adapterVersion = new AdapterVersion(version);
        return adapterVersion;
    }
    public AdapterDatasetService getAdapterDatasetService() {
        if (adapterDatasetService == null) {
            adapterDatasetService = SpringBeanUtil.getService(AdapterDatasetService.BEAN_ID);
        }
        return adapterDatasetService;
    }
    public AdapterSchemeVersionService getAdapterSchemeVersionService() {
        if (adapterSchemeVersionService == null) {
            adapterSchemeVersionService = SpringBeanUtil.getService(AdapterSchemeVersionService.BEAN_ID);
        }
        return adapterSchemeVersionService;
    }
    public AdapterMetadataService getAdapterMetadataService() {
        if (adapterMetadataService == null) {
            adapterMetadataService = SpringBeanUtil.getService(AdapterMetadataService.BEAN_ID);
        }
        return adapterMetadataService;
    }
    public AdapterDictService getAdapterDictService() {
        if (adapterDictService == null) {
            adapterDictService = SpringBeanUtil.getService(AdapterDictService.BEAN_ID);
        }
        return adapterDictService;
    }
    public AdapterDictEntryService getAdapterDictEntryService() {
        if (adapterDictEntryService == null) {
            adapterDictEntryService = SpringBeanUtil.getService(AdapterDictEntryService.BEAN_ID);
        }
        return adapterDictEntryService;
    }
    public StdDictEntryService getStdDictEntryService() {
        if (stdDictEntryService == null) {
            stdDictEntryService = SpringBeanUtil.getService(StdDictEntryService.BEAN_ID);
        }
        return stdDictEntryService;
    }
    public StdDictService getStdDictService() {
        if (stdDictService == null) {
            stdDictService = SpringBeanUtil.getService(StdDictService.BEAN_ID);
        }
        return stdDictService;
    }
}

+ 86 - 0
Hos-resource/src/main/java/com/yihu/ehr/crawler/format/AdapterScheme.java

@ -0,0 +1,86 @@
package com.yihu.ehr.crawler.format;
import com.yihu.ehr.crawler.model.adapter.AdapterMetaData;
import com.yihu.ehr.standard.model.adapter.AdapterDatasetModel;
import com.yihu.ehr.standard.model.adapter.AdapterMetadataModel;
import com.yihu.ehr.standard.model.adapter.AdapterSchemeVersionModel;
import com.yihu.ehr.standard.service.adapter.AdapterDatasetService;
import com.yihu.ehr.standard.service.bo.AdapterVersion;
import net.sf.json.JSONObject;
import javax.annotation.Resource;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
/**
 * 适配方案
 * 一个适配方案可能有一到多个机构要采集数据
 *
 * @created Created by Air on 2015/6/3.
 */
public class AdapterScheme  extends AdapterBase{
    @Resource(name = AdapterDatasetService.BEAN_ID)
    private AdapterDatasetService adapterDatasetService;
    private HashMap<String, List<AdapterMetadataModel>> adapterMetaDataMap;
    public AdapterScheme() {
    }
    public synchronized HashMap<String, List<AdapterMetadataModel>> getAdapterMetaDataMap() {
        if (adapterMetaDataMap ==null){
            AdapterSchemeVersionModel adapterSchemeVersionModel = getAdapterSchemeVersionService().getEhrAdapterVersionLasted();
            if (adapterSchemeVersionModel == null) {
                adapterSchemeVersionModel=new AdapterSchemeVersionModel();
            }
            String version = adapterSchemeVersionModel.getVersion();
            AdapterVersion adapterVersion = new AdapterVersion(version);
            adapterMetaDataMap =new HashMap<>();
            Map<String, String> condition = new HashMap<>();
            condition.put("column", "adapter_dataset_code");
            JSONObject jsonpObject = JSONObject.fromObject(condition);
            List<AdapterDatasetModel> adapterDataSets = adapterDatasetService.getAdapterDatasetNotNullList(adapterVersion, jsonpObject.toString());
            for (AdapterDatasetModel adapterDataSet:adapterDataSets){
                List<AdapterMetadataModel> adapterMetaDataTList = getAdapterMetadataService().getAdapterMetadataByDataset(version, adapterDataSet.getId());
                if (adapterMetaDataTList!=null && adapterMetaDataTList.size()>0){
                    adapterMetaDataMap.put(adapterDataSet.getStdDatasetCode(), adapterMetaDataTList);
                }
            }
        }
        return adapterMetaDataMap;
    }
    public List<AdapterMetadataModel> getAdapterMetaDatas(String dataSetCode){
        if (dataSetCode==null){
            return new ArrayList<>();
        }
        return getAdapterMetaDataMap().get(dataSetCode);
    }
    public AdapterMetadataModel getAdapterMetaData(AdapterDatasetModel adapterDataSet, String stdMetadataCode) {
        List<AdapterMetadataModel> adapterMetaDataList = getAdapterMetaDatas(adapterDataSet.getStdDatasetCode());
        for (AdapterMetadataModel adapterMetaData : adapterMetaDataList) {
            if (adapterMetaData.getAdapterMetadataCode().equals(stdMetadataCode)) {
                return adapterMetaData;
            }
        }
        return null;
    }
    public AdapterMetaData getAdapterMetaData(String dataSetCode, String metaDataCode) {
        AdapterDatasetModel adapterDataSetT = getAdapterDatasetService().getAdapterDatasetByCode(adapterVersion.getVersion(),dataSetCode);
        AdapterMetadataModel adapterMetaDataT = adapterMetaDataT = getAdapterMetaData(adapterDataSetT, metaDataCode);
        if (adapterMetaDataT == null) {
            return null;
        }
        AdapterMetaData adapterMetaData = new AdapterMetaData(adapterMetaDataT,adapterVersion);
        return adapterMetaData;
    }
}

+ 196 - 0
Hos-resource/src/main/java/com/yihu/ehr/crawler/format/DataSetTransformer.java

@ -0,0 +1,196 @@
package com.yihu.ehr.crawler.format;
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.node.ArrayNode;
import com.fasterxml.jackson.databind.node.ObjectNode;
import com.yihu.common.util.log.BusinessLogger;
import com.yihu.common.util.log.DebugLogger;
import com.yihu.ehr.crawler.model.adapter.AdapterDict;
import com.yihu.ehr.crawler.model.adapter.AdapterMetaData;
import com.yihu.ehr.crawler.model.config.SysConfig;
import com.yihu.ehr.crawler.model.patient.Patient;
import com.yihu.ehr.crawler.model.transform.DictDataType;
import com.yihu.ehr.crawler.model.transform.MetaDataVerify;
import com.yihu.ehr.crawler.model.transform.TransformType;
import com.yihu.ehr.crawler.service.EsbHttp;
import com.yihu.ehr.crawler.service.standard.StdMetaData;
import com.yihu.ehr.framework.constrant.Constants;
import com.yihu.ehr.standard.model.adapter.AdapterSchemeVersionModel;
import com.yihu.ehr.standard.model.standard.StdMetaDataModel;
import java.io.IOException;
import java.util.Iterator;
import java.util.Map;
/**
 * 目前只处理json格式
 * <p>
 * json 格式
 * {
 * "inner_version":"xxxxx",
 * "patient_id":"xxxx",
 * "event_no":"xxxx",
 * "code":"dataset_code",
 * "org_code":"xxxx"
 * "data":
 * [{"metadata_code1":"5","metadata_code2":"6"},
 * [{"metadata_code1":"1","metadata_code2":"2"}]}
 * <p>
 * Created by Air on 2015/6/4.
 */
public class DataSetTransformer implements IDataTransformer {
    private ObjectNode jsonObject;
    protected AdapterScheme adapterScheme;
    protected Patient patient;
    public DataSetTransformer(AdapterScheme adapterScheme) {
        this.adapterScheme = adapterScheme;
    }
    @Override
    public Patient getPatient() {
        return patient;
    }
    public ObjectNode getJsonObject() {
        return jsonObject;
    }
    @Override
    public boolean transfer() {
        if (jsonObject != null) {
            String dataSetCode = jsonObject.get("code").asText();
            ArrayNode jsonArray = (ArrayNode) jsonObject.get("data");
            boolean transfer = transferJson(jsonArray, dataSetCode);
            return transfer;
        }
        return false;
    }
    @Override
    public String getData() {
        //确保文档有版本信息
        String version=EsbHttp.getRemoteVersion(SysConfig.getInstance().getOrgCode());
        jsonObject.put("inner_version", version);//TODO 获取远程版本号
        return jsonObject.asText();
    }
    @Override
    public void setData(String data) {
        try {
            ObjectMapper objectMapper = new ObjectMapper();
            jsonObject = objectMapper.readValue(data, ObjectNode.class);
        } catch (IOException e) {
            DebugLogger.fatal("", e);
        }
        setPatient();
    }
    @Override
    public TransformType getTransformType() {
        return TransformType.DATA_SET_JSON;
    }
    /**
     * json 格式
     * {
     * "inner_version":"xxxxx",
     * "patient_id":"xxxx",
     * "event_no":"xxxx",
     * "code":"dataset_code",
     * "data":
     * [{"metadata_code1":"5","metadata_code2":"6"},
     * [{"metadata_code1":"1","metadata_code2":"2"}]}
     *
     * @param jsonArray
     * @param dataSetCode
     * @return
     */
    public boolean transferJson(ArrayNode jsonArray, String dataSetCode) {
        for (Object objectRow : jsonArray) {
            if (objectRow instanceof JsonNode) {
                transferJsonRow((ObjectNode) objectRow, dataSetCode);
            }
        }
        return false;
    }
    public void transferJsonRow(ObjectNode jsonObject, String dataSetCode) {
        Iterator<Map.Entry<String, JsonNode>> fields = jsonObject.fields();
        while (fields.hasNext()) {
            Map.Entry<String, JsonNode> next = fields.next();
            String key = next.getKey();
            JsonNode jsonNode = next.getValue();
            String value = jsonNode.asText();
            String stdValue = transferElem(dataSetCode, key, value);
            if (jsonNode instanceof JsonNode) {
//                ObjectNode objectNode = (ObjectNode) next;
                jsonObject.put(key, stdValue);
            }
        }
    }
    /**
     * @param dataSetCode 数据集编码
     * @param code        数据元编码
     * @param data        数据
     * @return String 标准值
     * @modify 2015.09.16 airhead 增加值与编码转换
     */
    public String transferElem(String dataSetCode, String code, String data) {
        AdapterMetaData adapterMetaData = adapterScheme.getAdapterMetaData(dataSetCode, code);
        if (adapterMetaData == null) {
            BusinessLogger.fatal("获取数据元适配错误,数据集编码:" + dataSetCode + "数据元编码:" + code);
            return Constants.EMPTY;
        }
        AdapterDict adapterDict = adapterMetaData.getAdapterDict();
        StdMetaData metaData = adapterMetaData.getStdMetaData();
        if (!adapterDict.isValidAdapterDict()) {
            MetaDataVerify metaDataVerify = new MetaDataVerify(metaData, data);
            boolean check = metaDataVerify.check();
            if (!check) {
                BusinessLogger.fatal("保存:数据元校验错误." + metaDataVerify.getErrorInfo());
                return Constants.EMPTY;    //未校验成功数据,清空
            }
            return data;
        }
        String stdData = Constants.EMPTY;
        DictDataType stdDictDataType = metaData.getDictDataType();
        DictDataType orgDictDataType = adapterMetaData.getAdapterDictDataType();
        if (stdDictDataType == DictDataType.VALUE) {
            if (orgDictDataType == DictDataType.VALUE) {
                stdData = adapterDict.getEhrDictEntryValueByCode(data);
            } else if (orgDictDataType == DictDataType.CODE) {
                String stdDictItemCode = adapterDict.getEhrDictEntryCodeByValue(data);
                stdData = adapterDict.getStdDict().toValue(stdDictItemCode);
            }
        } else if (stdDictDataType == DictDataType.CODE) {
            if (orgDictDataType == DictDataType.VALUE) {
                String stdDictItemValue = adapterDict.getEhrDictEntryValueByCode(data);
                stdData = adapterDict.getStdDict().toCode(stdDictItemValue);
            } else if (orgDictDataType == DictDataType.CODE) {
                stdData = adapterDict.getEhrDictEntryCodeByValue(data);
            }
        }
        return stdData;
    }
    /**
     * 根据DataSet信息构造Patient
     */
    private void setPatient() {
        patient=new Patient();
        patient.setPatientId(jsonObject.get("patient_id").asText());
        patient.setEventNo(jsonObject.get("event_no").asText());
        patient.setOrgCode(jsonObject.get("org_code").asText());
    }
}

+ 61 - 0
Hos-resource/src/main/java/com/yihu/ehr/crawler/format/DocumentTransformer.java

@ -0,0 +1,61 @@
package com.yihu.ehr.crawler.format;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.node.ObjectNode;
import com.yihu.common.util.log.DebugLogger;
import com.yihu.ehr.crawler.model.patient.Patient;
import com.yihu.ehr.crawler.model.transform.TransformType;
import java.io.IOException;
/**
 * Created by Administrator on 2015/10/15.
 */
public class DocumentTransformer implements IDataTransformer  {
    private ObjectNode jsonObject;
    protected AdapterScheme adapterScheme;
    protected Patient patient;
    public DocumentTransformer(AdapterScheme adapterScheme) {
        this.adapterScheme = adapterScheme;
    }
    @Override
    public Patient getPatient() {
        return patient;
    }
    /**
     * 非结构化的不需要转换
     *
     * @return
     */
    @Override
    public boolean transfer() {
        return true;
    }
    public ObjectNode getJsonObject() {
        return jsonObject;
    }
    @Override
    public String getData() {
        return jsonObject.asText();
    }
    @Override
    public void setData(String data) {
        try {
            ObjectMapper objectMapper = new ObjectMapper();
            jsonObject = objectMapper.readValue(data, ObjectNode.class);
        } catch (IOException e) {
            DebugLogger.fatal("", e);
        }
    }
    @Override
    public TransformType getTransformType() {
        return TransformType.DOCUMENT;
    }
}

+ 23 - 0
Hos-resource/src/main/java/com/yihu/ehr/crawler/format/IDataTransformer.java

@ -0,0 +1,23 @@
package com.yihu.ehr.crawler.format;
import com.yihu.ehr.crawler.model.patient.Patient;
import com.yihu.ehr.crawler.model.transform.TransformType;
/**
 * @author Airhead
 * @version 1.0
 * @created 22-5月-2015 11:24:26
 */
public interface IDataTransformer {
    boolean transfer();
    String getData();
    void setData(String data);
    Patient getPatient();
    TransformType getTransformType();
}

+ 17 - 3
Hos-resource/src/main/java/com/yihu/ehr/crawler/model/adapter/AdapterDict.java

@ -1,10 +1,13 @@
package com.yihu.ehr.crawler.model.adapter;
import com.yihu.ehr.crawler.format.AdapterBase;
import com.yihu.ehr.crawler.service.standard.StdDict;
import com.yihu.ehr.framework.util.operator.CollectionUtil;
import com.yihu.ehr.framework.util.operator.StringUtil;
import com.yihu.ehr.framework.util.springutil.SpringBeanUtil;
import com.yihu.ehr.standard.model.adapter.AdapterDictEntryModel;
import com.yihu.ehr.standard.model.adapter.AdapterDictModel;
import com.yihu.ehr.standard.model.standard.StdDictionaryModel;
import com.yihu.ehr.standard.service.adapter.AdapterDictEntryService;
import com.yihu.ehr.standard.service.bo.AdapterVersion;
import net.sf.json.JSONObject;
@ -18,20 +21,26 @@ import java.util.Map;
 * @version 1.0
 * @created 2015.08.11 11:31
 */
public class AdapterDict {
public class AdapterDict extends AdapterBase {
    public static final String INVALID_ADAPTER_DICT = "";   //"Invalid Adapter";
    private AdapterDictModel adapterDictModel;
    private AdapterVersion adapterVersion;
    private AdapterDictModel adapterDictT;
    public AdapterDict(AdapterDictModel adapterDictModel, AdapterVersion adapterVersion) {
        this.adapterDictModel = adapterDictModel;
        this.adapterVersion = adapterVersion;
    }
    public AdapterDict(AdapterDictModel adapterDictT) {
        this.adapterDictT = adapterDictT;
    }
    public Boolean isValidAdapterDict() {
        return adapterDictModel != null;
        return adapterDictT != null;
    }
    public String getEhrDictEntryCodeByValue(String esbDictEntryValue) {
        Map<String,String> condition = new HashMap<String,String>();
        condition.put("stdDictId", StringUtil.toString(adapterDictModel.getStdDictId()));
@ -48,7 +57,7 @@ public class AdapterDict {
        return new String(INVALID_ADAPTER_DICT);
    }
    public String getEhrDictEntryCodeByCode(String esbDictEntryCode) {
    public String getEhrDictEntryValueByCode(String esbDictEntryCode) {
        Map<String,String> condition = new HashMap<String,String>();
        condition.put("stdDictId", StringUtil.toString(adapterDictModel.getStdDictId()));
        condition.put("stdEntryCode", esbDictEntryCode);
@ -63,4 +72,9 @@ public class AdapterDict {
        }
        return new String(INVALID_ADAPTER_DICT);
    }
    public StdDict getStdDict(){
        StdDictionaryModel stdDictT = getStdDictService().getAdapterDictByCode(adapterVersion.getVersion(), adapterDictT.getStdDictCode());
        return new StdDict(stdDictT);
    }
}

+ 22 - 0
Hos-resource/src/main/java/com/yihu/ehr/crawler/model/adapter/AdapterMetaData.java

@ -1,13 +1,19 @@
package com.yihu.ehr.crawler.model.adapter;
import com.yihu.ehr.crawler.model.transform.DictDataType;
import com.yihu.ehr.crawler.service.standard.StdMetaData;
import com.yihu.ehr.framework.constrant.ErrorCode;
import com.yihu.ehr.framework.util.operator.NumberUtil;
import com.yihu.ehr.framework.util.springutil.SpringBeanUtil;
import com.yihu.ehr.standard.model.adapter.AdapterDictModel;
import com.yihu.ehr.standard.model.adapter.AdapterMetadataModel;
import com.yihu.ehr.standard.model.standard.StdMetaDataModel;
import com.yihu.ehr.standard.service.adapter.AdapterDictService;
import com.yihu.ehr.standard.service.adapter.AdapterMetadataService;
import com.yihu.ehr.standard.service.bo.AdapterVersion;
import com.yihu.ehr.standard.service.standard.StdMetadataService;
import javax.annotation.Resource;
/**
 * @author Air
@ -19,6 +25,14 @@ public class AdapterMetaData {
    private AdapterMetadataModel adapterMetadataModel;
    private AdapterDict adapterDict;
    private AdapterVersion adapterVersion;
    private StdMetaData stdMetaData;
    @Resource(name=AdapterMetadataService.BEAN_ID)
    private AdapterMetadataService adapterMetadataService;
    @Resource(name=StdMetadataService.BEAN_ID)
    private StdMetadataService stdMetadataService;
    public AdapterMetaData(AdapterMetadataModel adapterMetadataModel, AdapterVersion adapterVersion) {
        this.adapterMetadataModel = adapterMetadataModel;
        this.adapterVersion = adapterVersion;
@ -42,6 +56,14 @@ public class AdapterMetaData {
        return adapterDict;
    }
    public StdMetaData getStdMetaData() {
        if (stdMetaData == null) {
            StdMetaDataModel stdMetaDataT = stdMetadataService.getStdMetadata(adapterVersion.getVersion(), adapterMetadataModel.getStdMetadataId());
            stdMetaData = new StdMetaData(stdMetaDataT);
        }
        return stdMetaData;
    }
    public DictDataType getAdapterDictDataType() {
        Integer orgDictDataType = adapterMetadataModel.getAdapterDataType();
        if (orgDictDataType == null){

+ 49 - 0
Hos-resource/src/main/java/com/yihu/ehr/crawler/model/config/SysConfig.java

@ -1,6 +1,8 @@
package com.yihu.ehr.crawler.model.config;
import com.yihu.ehr.crawler.model.patient.PatientIdentity;
import com.yihu.ehr.crawler.model.patient.PatientIndex;
import com.yihu.ehr.crawler.storage.IDataStorage;
import com.yihu.ehr.framework.util.log.LogService;
import com.yihu.ehr.framework.util.operator.StringUtil;
import org.dom4j.Document;
@ -11,6 +13,7 @@ import org.dom4j.io.SAXReader;
import java.io.InputStream;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
public class SysConfig {
    public static final String HOS_RESOURCES_CONFIG = "/config/sys.config.xml";
@ -21,9 +24,12 @@ public class SysConfig {
    public static String registerIdCardNo;
    private static volatile SysConfig instance = null;
    private HashMap<String, PatientIdentity> patientIdentityHashMap;
    private String storagePattern;
    private Map<String, PatientIndex> patientIndexMap;//病人摘要信息内容
    private SysConfig() {
        patientIdentityHashMap = new HashMap<>();
        patientIndexMap = new HashMap<>();
        init();
    }
@ -48,6 +54,14 @@ public class SysConfig {
    }
    public IDataStorage.StorageMode getStoragePattern() {
        if ("0".equals(storagePattern)) {
            return IDataStorage.StorageMode.DISTRIBUTE;
        }
        return IDataStorage.StorageMode.CENTRALIZATION;
    }
    public String getTempFile() {
        return this.tempFile;
    }
@ -80,6 +94,10 @@ public class SysConfig {
        return registerIdCardNo;
    }
    public Map<String, PatientIndex> getPatientIndexMap() {
        return patientIndexMap;
    }
    private Document getDocument() throws DocumentException {
        SAXReader reader = new SAXReader();
        Document document = null;
@ -107,6 +125,11 @@ public class SysConfig {
            this.initCrawler(rootElement);
            this.initEventNo(rootElement);
            this.initPatientIndex(rootElement);
            String pattern = rootElement.elementTextTrim("storage_pattern");
            if (!StringUtil.isEmpty(pattern)) {
                this.storagePattern = pattern;
            }
        } catch (Exception e) {
            LogService.getLogger().error(e.getCause().toString());
        }
@ -150,6 +173,32 @@ public class SysConfig {
        }
    }
    private void initPatientIndex(Element rootElement) {
        List queueDataSets = rootElement.element("patient_index").elements("dataset");
        PatientIndex patientIndex = null;
        for (Object obj : queueDataSets) {
            patientIndex = new PatientIndex();
            if (obj instanceof Element) {
                Element element = (Element) obj;
                String dataSetCode = element.attributeValue("code");
                patientIndex.setEventNoCode(element.elementTextTrim("event_no"));
                patientIndex.setRefTimeCode(element.elementTextTrim("ref_time"));
                patientIndex.setOfficeCode(element.elementTextTrim("office_code"));
                patientIndex.setOfficeName(element.elementTextTrim("office_name"));
                if ("HDSC02_09".equals(dataSetCode)) {
                    patientIndex.setLeaveTime(element.elementTextTrim("leave_time"));
                }
                Element diagnostic = element.element("diagnostic");
                patientIndex.setDiagDataSet(diagnostic.attributeValue("dataset"));
                patientIndex.setDiagType(diagnostic.elementTextTrim("type"));
                patientIndex.setDiagCode(diagnostic.elementTextTrim("code"));
                patientIndex.setDiagName(diagnostic.elementTextTrim("name"));
                patientIndexMap.put(dataSetCode, patientIndex);
            }
        }
    }
    public void setOrgCode(String orgCode) {
        this.orgCode = orgCode;
    }

+ 153 - 0
Hos-resource/src/main/java/com/yihu/ehr/crawler/model/patient/PatientCDAUpload.java

@ -0,0 +1,153 @@
package com.yihu.ehr.crawler.model.patient;
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.yihu.common.constant.LogAttribute;
import com.yihu.common.util.log.BusinessLogger;
import com.yihu.common.util.log.DebugLogger;
import com.yihu.common.util.operator.ConfigureUtil;
import com.yihu.ehr.crawler.model.config.SysConfig;
import com.yihu.ehr.crawler.service.PatientCDAIndex;
import com.yihu.ehr.framework.util.compress.Zipper;
import com.yihu.ehr.framework.util.encrypt.MD5;
import com.yihu.ehr.framework.util.encrypt.RSA;
import com.yihu.ehr.framework.util.file.FileUtil;
import com.yihu.ehr.framework.util.http.HOPClient;
import com.yihu.ehr.framework.util.http.Response;
import com.yihu.ehr.framework.util.httpclient.HttpHelper;
import com.yihu.ehr.framework.util.httpclient.HttpResponse;
import org.apache.http.NameValuePair;
import org.apache.http.message.BasicNameValuePair;
import java.io.File;
import java.io.IOException;
import java.security.Key;
import java.util.*;
/**
 * 档案上传
 *
 * @author Air
 * @version 1.0
 * @created 2015.07.06 15:58
 */
public class PatientCDAUpload {
    public static String uploadMethod;
    /**
     * @param patient
     * @return
     * @modify 2015.09.15 airhead 修订删除目录
     * @modify 2015.09.19 airhead 修复无文档问题及错误信息
     */
    public boolean upload(Patient patient,String token) {
        ZipFile zipFile = zip(patient);
        try {
            if (zipFile == null || zipFile.file == null) {
                BusinessLogger.fatal( "压缩病人档案失败,病人文档未生成,patient_id:" + patient.getPatientId() + ",event_no:" + patient.getEventNo(), LogAttribute.FAIL, patient.getJobTimeStamp());
                return false;
            }
            boolean result = upload(patient, zipFile,token);
            if (!result) {
                BusinessLogger.fatal("上传病人档案失败,patient_id:" + patient.getPatientId() + ",event_no:" + patient.getEventNo());
                FileUtil.deleteDirectory(new File(zipFile.directory));
                return false;
            }
            DebugLogger.trace(zipFile.directory);
            result = FileUtil.deleteDirectory(new File(zipFile.directory));
            if (!result) {
                BusinessLogger.fatal("删除临时文件失败,patient_id:" + patient.getPatientId() + ",event_no:" + patient.getEventNo());
            }
        } catch (Exception e) {
            FileUtil.deleteDirectory(new File(zipFile.directory));
        }
        return true;
    }
    /**
     * @param patient
     * @return
     * @modify 从data目录生成zip数据
     */
    public ZipFile zip(Patient patient) {
        try {
            PatientCDAIndex patientCDAIndex = new PatientCDAIndex(patient);
            String dataDirectory = patientCDAIndex.getDataDirectory();
            String filePath = patientCDAIndex.createIndex(PatientCDAIndex.IndexType.ZIP, PatientCDAIndex.FileType.ZIP);
            UUID uuidPwd = UUID.randomUUID();
            String pwd = uuidPwd.toString();
            Key key = RSA.genPublicKey(SysConfig.getInstance().getPublicKey());
            if (key == null) {
                BusinessLogger.fatal("压缩文件错误,无公钥信息.");
                FileUtil.deleteDirectory(new File( patientCDAIndex.getDirectory()));
                return null;
            }
            ZipFile zipFile = new ZipFile();
            zipFile.encryptPwd = RSA.encrypt(pwd, key);
            Zipper zipper = new Zipper();
            zipFile.file = zipper.zipFile(new File(dataDirectory), filePath, pwd);
            zipFile.dataDirectory = dataDirectory;
            zipFile.directory = patientCDAIndex.getDirectory();
            return zipFile;
        } catch (Exception e) {
            e.printStackTrace();
            DebugLogger.fatal("从data目录生成zip数据时,压缩文件异常", e);
        }
        return null;
    }
    private boolean upload(Patient patient,  ZipFile zipFile, String token) {
        try {
            String uploadMethod = HttpHelper.defaultHttpUrl + "/packages";
            String fileMd5= MD5.getMd5ByFile(zipFile.file);
            List<NameValuePair> formParams = new ArrayList<>();
            formParams.add(new BasicNameValuePair("md5", fileMd5));
            formParams.add(new BasicNameValuePair("package_crypto", zipFile.encryptPwd));
            formParams.add(new BasicNameValuePair("org_code", SysConfig.getInstance().getOrgCode()));
            formParams.add(new BasicNameValuePair("token", token));
            Map<String, Object> header = new HashMap<>();
            header.put("Authorization", "Basic " + HttpHelper.clientKey);
            HttpResponse response = HttpHelper.postFile(uploadMethod, formParams, zipFile.file.getAbsolutePath(), header);
            if (response == null) {
                BusinessLogger.fatal( "上传病人档案请求失败,patient_id:" + patient.getPatientId() + ",event_no:" + patient.getEventNo(), LogAttribute.FAIL, patient.getJobTimeStamp());
                return false;
            }
            if (response.getStatusCode() != 200) {
                BusinessLogger.fatal( "上传病人档案请求失败,错误代码:" + response.getStatusCode() + ",patient_id:" + patient.getPatientId() + ",event_no:" + patient.getEventNo(), LogAttribute.FAIL, patient.getJobTimeStamp());
                return false;
            }
            ObjectMapper mapper = new ObjectMapper();
            JsonNode rootNode = mapper.readValue(response.getBody(), JsonNode.class);
            JsonNode codeNode = rootNode.get("code");
            String result = codeNode.asText();
            if (!result.equals("0")) {
                BusinessLogger.fatal("上传病人档案失败,错误代码:" + result + ",patient_id:" + patient.getPatientId() + ",event_no:" + patient.getEventNo(), LogAttribute.FAIL, patient.getJobTimeStamp());
                return false;
            } else {
                BusinessLogger.info( "上传病人档案成功,patient_id:" + patient.getPatientId() + ",event_no:" + patient.getEventNo(), LogAttribute.SUCCESS, patient.getJobTimeStamp());
                return true;
            }
        } catch (Exception e) {
            e.printStackTrace();
            BusinessLogger.fatal( "上传病人档案异常,patient_id:" + patient.getPatientId() + ",event_no:" + patient.getEventNo(), LogAttribute.FAIL, patient.getJobTimeStamp());
            return false;
        }
    }
    private class ZipFile {
        public File file;
        public String encryptPwd;
        public String directory;
        public String dataDirectory;
    }
}

+ 110 - 0
Hos-resource/src/main/java/com/yihu/ehr/crawler/model/patient/PatientIndex.java

@ -0,0 +1,110 @@
package com.yihu.ehr.crawler.model.patient;
/**
 * 病人摘要标识
 *
 *
 * @author HXY
 * @version 1.0
 * @created 2016.03.01 13:50
 */
public class PatientIndex {
    public static final String PATIENT_ID = "PATIENT_ID";
    private String eventNoCode;
    private String refTimeCode;
//    private String organization;
    private String officeCode;
    private String officeName;
    private String leaveTime;
    private String diagDataSet;
    private String diagCode;
    private String diagName;
    private String diagType;
    public PatientIndex() {
    }
    public String getDiagType() {
        return diagType;
    }
    public void setDiagType(String diagType) {
        this.diagType = diagType;
    }
    public static String getPatientId() {
        return PATIENT_ID;
    }
    public String getEventNoCode() {
        return eventNoCode;
    }
    public void setEventNoCode(String eventNoCode) {
        this.eventNoCode = eventNoCode;
    }
    public String getRefTimeCode() {
        return refTimeCode;
    }
    public void setRefTimeCode(String refTimeCode) {
        this.refTimeCode = refTimeCode;
    }
//    public String getOrganization() {
//        return organization;
//    }
//
//    public void setOrganization(String organization) {
//        this.organization = organization;
//    }
    public String getOfficeCode() {
        return officeCode;
    }
    public void setOfficeCode(String officeCode) {
        this.officeCode = officeCode;
    }
    public String getOfficeName() {
        return officeName;
    }
    public void setOfficeName(String officeName) {
        this.officeName = officeName;
    }
    public String getLeaveTime() {
        return leaveTime;
    }
    public void setLeaveTime(String leaveTime) {
        this.leaveTime = leaveTime;
    }
    public String getDiagDataSet() {
        return diagDataSet;
    }
    public void setDiagDataSet(String diagDataSet) {
        this.diagDataSet = diagDataSet;
    }
    public String getDiagCode() {
        return diagCode;
    }
    public void setDiagCode(String diagCode) {
        this.diagCode = diagCode;
    }
    public String getDiagName() {
        return diagName;
    }
    public void setDiagName(String diagName) {
        this.diagName = diagName;
    }
}

+ 4 - 3
Hos-resource/src/main/java/com/yihu/ehr/crawler/model/transform/MetaDataVerify.java

@ -1,5 +1,6 @@
package com.yihu.ehr.crawler.model.transform;
import com.yihu.ehr.crawler.service.standard.StdMetaData;
import com.yihu.ehr.standard.model.standard.StdMetaDataModel;
/**
@ -9,12 +10,12 @@ import com.yihu.ehr.standard.model.standard.StdMetaDataModel;
 */
public class MetaDataVerify implements IVerifier {
    private StdMetaDataModel stdMetaDataModel;
    private StdMetaData stdMetaData;
    private String value;
    private String errorInfo;
    public MetaDataVerify(StdMetaDataModel stdMetaDataModel, String value) {
        this.stdMetaDataModel = stdMetaDataModel;
    public MetaDataVerify(StdMetaData stdMetaData, String value) {
        this.stdMetaData = stdMetaData;
        this.value = value;
    }

+ 235 - 0
Hos-resource/src/main/java/com/yihu/ehr/crawler/origin/FileSystemOrigin.java

@ -0,0 +1,235 @@
package com.yihu.ehr.crawler.origin;
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.node.ObjectNode;
import com.yihu.common.util.file.FtpFileUtil;
import com.yihu.common.util.log.BusinessLogger;
import com.yihu.common.util.log.DebugLogger;
import com.yihu.ehr.common.Services;
import com.yihu.ehr.crawler.format.AdapterScheme;
import com.yihu.ehr.crawler.model.adapter.AdapterDataSet;
import com.yihu.ehr.crawler.model.config.SysConfig;
import com.yihu.ehr.crawler.model.patient.Patient;
import com.yihu.ehr.crawler.model.transform.LogicValues;
import com.yihu.ehr.crawler.service.EsbHttp;
import com.yihu.ehr.framework.util.operator.DateUtil;
import com.yihu.ehr.framework.util.operator.StringUtil;
import com.yihu.ehr.framework.util.springutil.SpringBeanUtil;
import com.yihu.ehr.system.model.SystemDatasource;
import com.yihu.ehr.system.model.SystemOrganization;
import com.yihu.ehr.system.service.OrganizationManager;
import com.yihu.ehr.system.service.SystemManager;
import java.io.IOException;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.Date;
import java.util.List;
import java.util.Map;
/**
 * �ݻ�
 *
 * @author Airhead
 * @version 1.0
 * @created 22-5��-2015 11:24:24
 */
public class FileSystemOrigin implements IDataOrigin {
    public static String dirHear = "/home/test/patient/";        //病人数据文件根目录
    public static String fileType = "/image/";                    //采集的文件类型文件夹
    protected AdapterScheme adapterScheme;
    public FileSystemOrigin(AdapterScheme adapterScheme) {
        this.adapterScheme=adapterScheme;
    }
    /**
     * ftp采集数据
     * 非结构化档案中,key_words格式暂定为:数据集-数据元,生成文件上传时再转成:数据集.数据源(主要因为mongodb的key不支持特殊符号"."
     * @param patient         病人ID
     * @param orgAgencyOrigin 数据源
     * @param adapterDataSet  适配数据集   @return
     */
    @Override
    public String fecthData(Patient patient, SystemDatasource orgAgencyOrigin, AdapterDataSet adapterDataSet) {
        try {
            String data = null;
            String innerVersion= EsbHttp.getRemoteVersion(SysConfig.getInstance().getOrgCode());
            List<String> datas = null;
            String agencyCode = patient.getOrgCode();
            String ftpConfig = orgAgencyOrigin.getConfig();
            FtpFileUtil ftp = genFtpUtil(ftpConfig);
            String filePath = "";//远程ftp文件路径
            ObjectNode jsonObject = null;
            boolean patientId = true;
            boolean eventNo = true;
            if (patient.getPatientId() != null && !"".equals(patient.getPatientId())) {
                if (patient.getEventNo() != null && !"".equals(patient.getEventNo())) {
                    //文件路径
                    filePath = dirHear + agencyCode + "/" + patient.getPatientId() + "/" + patient.getEventNo() +  fileType;
                } else {
                    eventNo = false;
                }
            } else {
                patientId = false;
            }
            if (!patientId || !eventNo) {
                throw new Exception("采集病人数据集必须要有病人ID,事件号,数据集名:" + adapterDataSet.getAdapterDataSetT().getStdDatasetCode());
            }
            datas = ftp.readFileData(filePath);
            if (datas != null && datas.size() > 0) {
                data = datas.get(0);
            }
            //TODO "data"内容实现,主要包括key_words和content,
            //json生成
            jsonObject.put("patient_id", patient.getPatientId());
            jsonObject.put("event_no", patient.getEventNo());
            jsonObject.put("org_code", agencyCode);
            jsonObject.put("inner_version", innerVersion);
            jsonObject.put("create_date", DateUtil.toString(new Date(), DateUtil.DEFAULT_YMDHMSDATE_FORMAT));
            jsonObject.put("event_time", patient.getReferenceTime());//DateUtil.toString(patient.getReferenceTime(), DateUtil.DEFAULT_YMDHMSDATE_FORMAT)
            if (StringUtil.isEmpty(patient.getReUploadFlg())) {
                jsonObject.put("reUploadFlg", LogicValues.LOGIC_FALSE);
            } else {
                jsonObject.put("reUploadFlg", patient.getReUploadFlg());
            }
            return jsonObject.toString();
        } catch (SQLException e) {
//            e.printStackTrace();
            DebugLogger.fatal("", e);
        } catch (Exception e) {
//            e.printStackTrace();
            DebugLogger.fatal("", e);
        }
        return null;
    }
    /**
     * 获取病人列表
     *
     * @param orgAgencyOrigin 数据源
     * @param adapterDataSet  适配数据集
     * @param condition       查询条件
     * @return 病人集合
     */
    @Override
    public List<Patient> getPatientList(SystemDatasource orgAgencyOrigin, AdapterDataSet adapterDataSet, Map<String, Object> condition) {
        ArrayList<Patient> patientList = new ArrayList<>();
        try {
            String ftpConfig = orgAgencyOrigin.getConfig();
            FtpFileUtil ftp = genFtpUtil(ftpConfig);
//			StdDataSet stdDataSet = adapterDataSet.getStdDataSet();
            OrganizationManager organizationManager= SpringBeanUtil.getService(Services.Organization);
            SystemOrganization orgAgency =organizationManager.getOrgById(orgAgencyOrigin.getOrgId());
            String agencyCode =orgAgency.getCode();
            List<Map<String, String>> patientMaps = ftp.getPatientList(dirHear, agencyCode);
            if (patientMaps != null && patientMaps.size() > 0) {
                for (Map<String, String> patientMap : patientMaps) {
                    Patient patient = new Patient();
                    String patientId = patientMap.get("patient_id");
                    String eventNo = patientMap.get("event_no");
                    if (orgAgency == null) {
                        BusinessLogger.fatal("获取病人列表错误,无法获取机构代码.");
                        continue;
                    }
                    patient.setPatientId(patientId);
                    patient.setEventNo(eventNo);
                    patient.setReferenceTime(DateUtil.toString(new Date(), DateUtil.DEFAULT_YMDHMSDATE_FORMAT));//暂设置为当前时间
                    patient.setOrgCode(orgAgency.getCode());
                    patientList.add(patient);
                }
            }
        } catch (Exception e) {
//            e.printStackTrace();
            DebugLogger.fatal("", e);
        }
        return patientList;
    }
    /**
     * 清除ftp数据
     *
     * @param patient
     * @param orgAgencyOrigin
     * @param adapterDataSet  @return
     */
    @Override
    public boolean clearData(Patient patient, SystemDatasource orgAgencyOrigin, AdapterDataSet adapterDataSet) {
        try {
            boolean clear = false;
            String agencyCode = patient.getOrgCode();
            String ftpConfig = orgAgencyOrigin.getConfig();
            FtpFileUtil ftp = genFtpUtil(ftpConfig);
            String filePath = "";//远程ftp文件路径
            boolean patientId = true;
            boolean eventNo = true;
            if (patient.getPatientId() != null && !"".equals(patient.getPatientId())) {
                if (patient.getEventNo() != null && !"".equals(patient.getEventNo())) {
                    //文件路径
                    filePath = dirHear + agencyCode + "/" + patient.getPatientId() + "/" + patient.getEventNo()  + fileType;
                } else {
                    eventNo = false;
                }
            } else {
                patientId = false;
            }
            if (!patientId || !eventNo) {
                throw new Exception("清除病人数据集必须要有病人ID,事件号,数据集名:" + adapterDataSet.getAdapterDataSetT().getStdDatasetCode());
            }
            ftp.connect();
            clear = ftp.removeData(filePath);
            ftp.closeConnect();
            return clear;
        } catch (SQLException e) {
            e.printStackTrace();
            DebugLogger.fatal("", e);
        } catch (Exception e) {
            e.printStackTrace();
            DebugLogger.fatal("", e);
        }
        return false;
    }
    @Override
    public Date getServerDateTime(SystemDatasource orgAgencyOrigin) {
        return null;
    }
    public void finalize() throws Throwable {
    }
    public FtpFileUtil genFtpUtil(String ftpConfig) {
        ObjectMapper mapper = new ObjectMapper();
        FtpFileUtil ftpUtil = null;
        JsonNode rootNode = null;
        try {
            rootNode = mapper.readValue(ftpConfig, JsonNode.class);
            String username = rootNode.path("username").asText();
            String password = rootNode.path("password").asText();
            String host = rootNode.path("host").asText();
            int port = rootNode.path("port").asInt();
            ftpUtil = new FtpFileUtil(username, password, host, port);
        } catch (IOException e) {
            DebugLogger.fatal("获取Ftp服务器配置失败", e);
            e.printStackTrace();
        }
        return ftpUtil;
    }
}//end FileSystemOrigin

+ 47 - 0
Hos-resource/src/main/java/com/yihu/ehr/crawler/origin/IDataOrigin.java

@ -0,0 +1,47 @@
package com.yihu.ehr.crawler.origin;
import com.yihu.ehr.crawler.model.adapter.AdapterDataSet;
import com.yihu.ehr.crawler.model.patient.Patient;
import com.yihu.ehr.system.model.SystemDatasource;
import java.util.Date;
import java.util.List;
import java.util.Map;
/**
 * @author Airhead
 * @version 1.0
 * @created 22-5-2015 11:24:26
 */
public interface IDataOrigin {
    String fecthData(Patient patient, SystemDatasource orgAgencyOrigin, AdapterDataSet adapterDataSet);
    List<Patient> getPatientList(SystemDatasource orgAgencyOrigin, AdapterDataSet adapterDataSet, Map<String, Object> condition);
    boolean clearData(Patient patient, SystemDatasource orgAgencyOrigin, AdapterDataSet adapterDataSet);
    Date getServerDateTime(SystemDatasource orgAgencyOrigin);
    enum OriginType {
        /**
         * 数据库
         */
        DB,
        /**
         * RESTful Web Service
         */
        REST,
        /**
         * FileSystem
         */
        FS,
        /**
         * SOAP Web Service
         */
        SOAP
    }
}

+ 1 - 1
Hos-resource/src/main/java/com/yihu/ehr/crawler/service/DataSetTransformer.java

@ -132,7 +132,7 @@ public class DataSetTransformer {
        if (adapterDictDataType == DictDataType.VALUE) {
            ehrData = adapterDict.getEhrDictEntryCodeByValue(esbData);
        } else if (adapterDictDataType == DictDataType.CODE) {
            ehrData = adapterDict.getEhrDictEntryCodeByCode(esbData);
            ehrData = adapterDict.getEhrDictEntryValueByCode(esbData);
        }
        return ehrData;
    }

+ 50 - 0
Hos-resource/src/main/java/com/yihu/ehr/crawler/service/adapter/AdapterDict.java

@ -0,0 +1,50 @@
package com.yihu.ehr.crawler.service.adapter;
import com.yihu.ehr.crawler.format.AdapterBase;
import com.yihu.ehr.crawler.service.standard.StdDict;
import com.yihu.ehr.standard.model.adapter.AdapterDictEntryModel;
import com.yihu.ehr.standard.model.adapter.AdapterDictModel;
import com.yihu.ehr.standard.model.standard.StdDictionaryModel;
/**
 * @author Air
 * @version 1.0
 * @created 2015.08.11 11:31
 */
public class AdapterDict extends AdapterBase {
    public static final String INVALID_ADAPTER_DICT = "";   //"Invalid Adapter";
    private AdapterDictModel adapterDictT;
    public AdapterDict(AdapterDictModel adapterDictT) {
        this.adapterDictT = adapterDictT;
    }
    public Boolean isValidAdapterDict() {
        return adapterDictT != null;
    }
    /**
     * @param orgItemCode
     */
    public String getStdDictItemCode(String version,String orgItemCode) {
        AdapterDictEntryModel adapterDictItemT = getAdapterDictEntryService().getAdapterDictItemBydictCode(version,adapterDictT, orgItemCode);
        if (adapterDictItemT == null) {
            return new String(INVALID_ADAPTER_DICT);
        }
        return adapterDictItemT.getStdEntryCode();
    }
    public String getStdDictItemValue(String version,String orgItemValue) {
        AdapterDictEntryModel adapterDictItemT = getAdapterDictEntryService().getAdapterDictItemBydictValue(version, adapterDictT, orgItemValue);
        if (adapterDictItemT == null) {
            return new String(INVALID_ADAPTER_DICT);
        }
        return adapterDictItemT.getStdEntryValue();
    }
    public StdDict getStdDict(){
        StdDictionaryModel stdDictT = getStdDictService().getAdapterDictByCode(adapterVersion.getVersion(), adapterDictT.getStdDictCode());
        return new StdDict(stdDictT);
    }
}

+ 41 - 0
Hos-resource/src/main/java/com/yihu/ehr/crawler/service/standard/StdDict.java

@ -0,0 +1,41 @@
package com.yihu.ehr.crawler.service.standard;
import com.yihu.ehr.crawler.format.AdapterBase;
import com.yihu.ehr.standard.model.standard.StdDictionaryEntryModel;
import com.yihu.ehr.standard.model.standard.StdDictionaryModel;
/**
 * @author Air
 * @version 1.0
 * @created 2015.08.11 15:25
 */
public class StdDict extends AdapterBase {
    private StdDictionaryModel stdDictT;
    public StdDict(StdDictionaryModel stdDictT) {
        this.stdDictT = stdDictT;
    }
    public StdDictionaryModel getStdDictT() {
        return stdDictT;
    }
    public String toValue(String code) {
        StdDictionaryEntryModel entryModel=getStdDictEntryService().getStEntryValueByCode(adapterVersion.getVersion(), stdDictT.getId(), code);
        if (entryModel==null){
            return null;
        }else {
            return entryModel.getValue();
        }
    }
    public String toCode(String value) {
        StdDictionaryEntryModel entryModel=getStdDictEntryService().getStEntryValueByCode(adapterVersion.getVersion(), stdDictT.getId(), value);
        if (entryModel==null){
            return null;
        }else {
            return entryModel.getCode();
        }
    }
}

+ 38 - 0
Hos-resource/src/main/java/com/yihu/ehr/crawler/service/standard/StdMetaData.java

@ -0,0 +1,38 @@
package com.yihu.ehr.crawler.service.standard;
import com.yihu.ehr.crawler.format.AdapterBase;
import com.yihu.ehr.crawler.model.transform.DictDataType;
import com.yihu.ehr.framework.util.operator.StringUtil;
import com.yihu.ehr.standard.model.standard.StdMetaDataModel;
import static com.yihu.ehr.crawler.model.transform.MetaDataType.S2;
import static com.yihu.ehr.crawler.model.transform.MetaDataType.S3;
/**
 * @author Air
 * @version 1.0
 * @created 2015.08.11 15:25
 */
public class StdMetaData extends AdapterBase {
    private StdMetaDataModel stdMetaDataT;
    public StdMetaData(StdMetaDataModel stdMetaDataT) {
        this.stdMetaDataT = stdMetaDataT;
    }
    public StdMetaDataModel getStdMetaDataT() {
        return stdMetaDataT;
    }
    public DictDataType getDictDataType() {
        Integer dict = stdMetaDataT.getDictId();
        String type = stdMetaDataT.getType();
        int dictValueType;
        if (dict!=null && (type.equals(S2.name()) || type.equals(S3.name()))) {
            dictValueType = DictDataType.CODE.ordinal();
        } else {
            dictValueType = DictDataType.VALUE.ordinal();
        }
        return DictDataType.values()[dictValueType];
    }
}

+ 37 - 0
Hos-resource/src/main/java/com/yihu/ehr/crawler/storage/DataSetStorage.java

@ -0,0 +1,37 @@
package com.yihu.ehr.crawler.storage;
import com.yihu.ehr.crawler.format.IDataTransformer;
import com.yihu.ehr.crawler.model.patient.Patient;
import com.yihu.ehr.crawler.format.AdapterScheme;
/**
 * Created by Administrator on 2015/10/15.
 */
public class DataSetStorage extends MongodbStorage {
    public static final String KEY_CODE = "code";
    public DataSetStorage(AdapterScheme adapterScheme, String dbName) {
        super(adapterScheme, dbName);
    }
    @Override
    public boolean save(IDataTransformer dataTransformer) {
        return true;
    }
    /**
     * @param patient
     * @return
     * @modify 将档案生成到到data目录
     */
    @Override
    public boolean toFile(Patient patient) {
        return true;
    }
    @Override
    public String getKey(){
        return KEY_CODE;
    }
}

+ 242 - 0
Hos-resource/src/main/java/com/yihu/ehr/crawler/storage/DocumentStorage.java

@ -0,0 +1,242 @@
package com.yihu.ehr.crawler.storage;
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.node.ArrayNode;
import com.fasterxml.jackson.databind.node.ObjectNode;
import com.mongodb.client.FindIterable;
import com.mongodb.client.MongoCollection;
import com.mongodb.client.MongoCursor;
import com.mongodb.client.model.UpdateOptions;
import com.yihu.common.util.log.DebugLogger;
import com.yihu.ehr.common.mongo.MongoDB;
import com.yihu.ehr.crawler.format.IDataTransformer;
import com.yihu.ehr.crawler.model.config.SysConfig;
import com.yihu.ehr.crawler.model.patient.Patient;
import com.yihu.ehr.crawler.format.AdapterScheme;
import com.yihu.ehr.crawler.format.DocumentTransformer;
import com.yihu.ehr.crawler.service.EsbHttp;
import com.yihu.ehr.crawler.service.PatientCDAIndex;
import com.yihu.ehr.framework.util.encode.Base64;
import com.yihu.ehr.framework.util.file.FileUtil;
import com.yihu.ehr.framework.util.operator.DateUtil;
import org.bson.Document;
import java.io.IOException;
import java.text.SimpleDateFormat;
import java.util.Date;
import java.util.Iterator;
import static com.mongodb.client.model.Filters.and;
import static com.mongodb.client.model.Filters.eq;
import static com.mongodb.client.model.Projections.excludeId;
/**
 * Created by Administrator on 2015/10/15.
 */
public class DocumentStorage extends MongodbStorage {
        public static final String KEY_CODE = "catalog";
    public DocumentStorage(AdapterScheme adapterScheme, String dbName) {
        super(adapterScheme, dbName);
    }
    @Override
    public boolean save(IDataTransformer dataTransformer) {
        DocumentTransformer documentTransformer = (DocumentTransformer) dataTransformer;
        ObjectNode jsonObject = documentTransformer.getJsonObject();
        StorageMode storagePattern = SysConfig.getInstance().getStoragePattern();//模式类型
        if (jsonObject != null) {
            String dataSetCode = jsonObject.get(getKey()).asText();
            String documentId = jsonObject.path(getKey()).asText();
            System.out.println(documentId);
            String patientId = jsonObject.get(PATIENT_ID).asText();
            String eventNo = jsonObject.get(EVENT_NO).asText();
            try {
                MongoCollection<Document> collection = MongoDB.db(dbName).getCollection(dataSetCode);
                createIndex(collection);    //创建索引
                Document filter = new Document();
                filter.append(PATIENT_ID, patientId);
                filter.append(EVENT_NO, eventNo);
                collection.deleteMany(filter);
                UpdateOptions updateOptions = new UpdateOptions();
                updateOptions.upsert(true);
                collection.replaceOne(filter, Document.parse(jsonObject.toString()), updateOptions);
                String url = createUrl(dataSetCode, patientId, eventNo);
                if (storagePattern == StorageMode.DISTRIBUTE) {
                    Document updateDoc = new Document();
                    updateDoc.put("resource.url", url);
                    updateDoc.put("resource.expiry_date", null);
                    collection.updateOne(filter, new Document("$set", updateDoc));
                } else {
                    Date expiryDate = DateUtil.setDateTime(new Date(), getExpireDays().intValue());
                    SimpleDateFormat sdf = new SimpleDateFormat(DateUtil.DEFAULT_YMDHMSDATE_FORMAT);
                    String date = sdf.format(expiryDate);
                    Document updateDoc = new Document(CREATE_AT, new Date());
                    updateDoc.put("resource.url", url);
                    updateDoc.put("resource.expiry_date", date);
                    collection.updateMany(filter, new Document("$set", updateDoc));
                }
            } catch (Exception e) {
                DebugLogger.fatal("保存病人档案信息至MongoDB异常:", e);
                return false;
            }
            return true;
        }
        return false;
    }
    /**
     * @param patient
     * @return
     * @modify 将档案生成到到data目录
     */
    @Override
    public boolean toFile(Patient patient) {
        boolean result = true;
        PatientCDAIndex patientCDAIndex = new PatientCDAIndex(patient);
        ObjectMapper mapper = new ObjectMapper();
        ArrayNode arrayNode=mapper.createArrayNode();
        ObjectNode resultNode=mapper.createObjectNode();
        try {
            for (String name : MongoDB.db(dbName).listCollectionNames()) {
                MongoCollection<Document> collection = MongoDB.db(dbName).getCollection(name);
                FindIterable<Document> documents = collection.find(and(eq("patient_id", patient.getPatientId()), eq(EVENT_NO, patient.getEventNo()))).projection(excludeId());
                try (MongoCursor<Document> cursor = documents.iterator()) {
                    while (cursor.hasNext()) {
                        try {
                            String document = cursor.next().toJson();
                            ObjectNode rootNode = mapper.readValue(document, ObjectNode.class);
                            JsonNode jsonNode = rootNode.get("data");
                            boolean array = jsonNode.isArray();
                            if (!array) {
                                continue;
                            }
                            arrayNode=genunStructureData(jsonNode,patientCDAIndex);
                        } catch (IOException e) {
                            e.printStackTrace();
                            DebugLogger.fatal("存储临时文件失败.");
                            result = false;
                        }
                    }
                } catch (Exception e) {
                    DebugLogger.fatal("", e);
                    result = false;
                }
            }
            String innerVersion =  EsbHttp.getRemoteVersion(patient.getOrgCode());
            for (int i = 0; i != arrayNode.size(); ++i) {
                JsonNode keyWordsNode = arrayNode.get(i).path("key_words");
                ObjectNode newNode=mapper.createObjectNode();
                JsonNode jsonNode= transformKeyWords(keyWordsNode, newNode);
                ((ObjectNode) arrayNode.get(i)).set("key_words", jsonNode);
            }
            resultNode.set("data", arrayNode);
            resultNode.put("patient_id", patient.getPatientId());
            resultNode.put("event_no",patient.getEventNo());
            resultNode.put("org_code",patient.getOrgCode());
            resultNode.put("event_time",patient.getReferenceTime());
            resultNode.put("inner_version",innerVersion);
            String indexPath = patientCDAIndex.getDataDirectory()+"/"+"meta.json";
            boolean writeFile = FileUtil.writeFile(indexPath, mapper.writeValueAsString(resultNode), "UTF-8");
        } catch (Exception e) {
            DebugLogger.fatal("", e);
            result = false;
        }
        return result;
    }
    @Override
    public String getDataSet(Patient patient, String dataSetCode) {
        return null;
    }
    @Override
    public String getKey(){
        return KEY_CODE;
    }
    /**
     * 生成非结构化 meta.json文件数据
     * @param jsonNode
     * @param patientCDAIndex
     * @return
     * @throws IOException
     */
    public ArrayNode genunStructureData(JsonNode jsonNode,PatientCDAIndex patientCDAIndex) throws IOException {
        ObjectMapper mapper=new ObjectMapper();
        ArrayNode arrayNode=mapper.createArrayNode();
        for (int i = 0; i != jsonNode.size(); ++i) {
            JsonNode documentNode = jsonNode.get(i);
            JsonNode contentNode=documentNode.path("content");
            if (contentNode.isArray()){
                for (int j = 0; j< contentNode.size(); j++) {
                    JsonNode fileArr = contentNode.get(j);
//                    String mimeType = fileArr.path("mime_type").asText();//文件类型
                    String names = fileArr.path("name").asText();
                    String fileType=names.substring(names.lastIndexOf("."));//文件后缀
                    JsonNode file=fileArr.path("file_content");//文件内容
                    Iterator<String> fileNames = file.fieldNames();
                    StringBuilder stringBuilder=new StringBuilder();
                    while (fileNames.hasNext()){
                        String key=fileNames.next();
                        String content =file.path(key).asText();
                        String filePath = patientCDAIndex.createDataIndex(dbName, fileType);
                        String fileName = filePath.substring(filePath.lastIndexOf("/")+1);
                        byte[]  fileContent = Base64.decode(content);
                        boolean writeFile = FileUtil.writeFile(filePath, fileContent, "UTF-8");
                        if (!writeFile) {
                            DebugLogger.fatal("存储临时文件失败.");
                        } else {
                            stringBuilder.append(fileName).append(",");
                        }
                    }
                    if (file.isObject()) {
                        ((ObjectNode) fileArr).put("name", stringBuilder.toString());
                        ((ObjectNode) fileArr).remove("file_content");
                    }
                }
            }
            arrayNode.add(documentNode);
        }
        return arrayNode;
    }
    /**
     * 将key_words中key中包含的“-”转换成“."
     * @param keyWordsNode
     * @param newObjectNode
     * @return
     */
    public ObjectNode transformKeyWords(JsonNode keyWordsNode, ObjectNode newObjectNode){
        Iterator<String> iterator = keyWordsNode.fieldNames();
        while (iterator.hasNext()){
            String key=iterator.next();
            String value =keyWordsNode.path(key).asText();
            String newKey=key.replaceAll("-",".");
            newObjectNode.put(newKey,value);
        }
        return newObjectNode;
    }
}

+ 41 - 0
Hos-resource/src/main/java/com/yihu/ehr/crawler/storage/IDataStorage.java

@ -0,0 +1,41 @@
package com.yihu.ehr.crawler.storage;
import com.yihu.ehr.crawler.format.IDataTransformer;
import com.yihu.ehr.crawler.model.patient.Patient;
import java.util.Map;
/**
 * 存储接口,应该只关心存取
 * 目前定义接口存在如下问题需修订:
 * 1.无取接口
 * 2.toFile超出职责范围
 *
 * @author Airhead
 * @version 1.0
 * @created 22-5-2015 11:24:26
 */
public interface IDataStorage {
    boolean save(IDataTransformer dataTransformer);
    boolean toFile(Patient patient);
    String getDataSet(Patient patient, String dataSetCode);
    String getArchive(String dataSetCode, Map<String, Object> params);
    Boolean isStored(String orgCode, String patientID, String eventNo);
    enum StorageType {
        MYSQL_DB,
        MONGODB,
        FILE_SYSTEM
    }
    enum StorageMode {
        DISTRIBUTE,
        CENTRALIZATION
    }
}

+ 408 - 0
Hos-resource/src/main/java/com/yihu/ehr/crawler/storage/MongodbStorage.java

@ -0,0 +1,408 @@
package com.yihu.ehr.crawler.storage;
import com.fasterxml.jackson.databind.node.ObjectNode;
import com.mongodb.BasicDBObject;
import com.mongodb.client.FindIterable;
import com.mongodb.client.MongoCollection;
import com.mongodb.client.MongoCursor;
import com.mongodb.client.model.IndexOptions;
import com.mongodb.client.model.UpdateOptions;
import com.yihu.common.util.log.DebugLogger;
import com.yihu.common.util.operator.ConfigureUtil;
import com.yihu.ehr.common.mongo.MongoDB;
import com.yihu.ehr.crawler.format.DataSetTransformer;
import com.yihu.ehr.crawler.format.IDataTransformer;
import com.yihu.ehr.crawler.model.config.SysConfig;
import com.yihu.ehr.crawler.model.patient.Patient;
import com.yihu.ehr.crawler.model.patient.PatientIdentity;
import com.yihu.ehr.crawler.model.patient.PatientIndex;
import com.yihu.ehr.crawler.format.AdapterScheme;
import com.yihu.ehr.crawler.service.PatientCDAIndex;
import com.yihu.ehr.framework.util.file.FileUtil;
import com.yihu.ehr.framework.util.operator.DateUtil;
import com.yihu.ehr.framework.util.operator.NumberUtil;
import org.bson.Document;
import java.io.IOException;
import java.text.SimpleDateFormat;
import java.util.*;
import java.util.concurrent.TimeUnit;
import static com.mongodb.client.model.Filters.and;
import static com.mongodb.client.model.Filters.eq;
import static com.mongodb.client.model.Projections.excludeId;
/**
 * 档案数据只使用Mongo进行存储
 * 目前阶段只会有两种数据类型
 * 1.结构化,数据集
 * 2.非结构化,文档(Pictures,Word,PDF,Video etc.)
 *
 * @author Air
 * @version 1.0
 * @created 2015.07.06 10:38
 */
public class MongodbStorage implements IDataStorage {
    public static final String KEY = "code";
    public static final String PATIENT_ID = "patient_id";
    public static final String EVENT_NO = "event_no";
    public static final String CREATE_AT = "create_at";
    public static final String CREATE_TIME = "create_time";
    public static final String ORG_CODE = "org_code";
    public static final String TTL_INDEX = "ceate_at_1";   //TTL index name, 过期时间索引
    public static final String TTL_INDEX_EXPIRED = "create_time_1"; //旧的TTL index name,已经作废,用于删除索引时使用。
    public static final String INNER_VERSION = "inner_version";
    public static final String EVENT_TIME = "event_time";
    protected String dbName;
    protected AdapterScheme adapterScheme;
    public MongodbStorage(AdapterScheme adapterScheme, String dbName) {
        this.adapterScheme = adapterScheme;
        this.dbName = dbName;
    }
    @Override
    public boolean save(IDataTransformer dataTransformer) {
        DataSetTransformer dataSetTransformer = (DataSetTransformer) dataTransformer;
        ObjectNode jsonObject = dataSetTransformer.getJsonObject();
        StorageMode storagePattern = SysConfig.getInstance().getStoragePattern();//模式类型
        if (jsonObject != null) {
            String dataSetCode = jsonObject.get(getKey()).asText();
            String patientId = jsonObject.get(PATIENT_ID).asText();
            String eventNo = jsonObject.get(EVENT_NO).asText();
            try {
                MongoCollection<Document> collection = MongoDB.db(dbName).getCollection(dataSetCode);
                createIndex(collection);    //创建索引
                Document filter = new Document();
                filter.append(PATIENT_ID, patientId);
                filter.append(EVENT_NO, eventNo);
                collection.deleteMany(filter);
                UpdateOptions updateOptions = new UpdateOptions();
                updateOptions.upsert(true);
                collection.replaceOne(filter, Document.parse(jsonObject.toString()), updateOptions);
                String url = createUrl(dataSetCode, patientId, eventNo);
                if (storagePattern == StorageMode.DISTRIBUTE) {
                    Document updateDoc = new Document();
                    updateDoc.put("resource.url", url);
                    updateDoc.put("resource.expiry_date", null);
                    collection.updateOne(filter, new Document("$set", updateDoc));
                } else {
                    Date expiryDate = DateUtil.setDateTime(new Date(), getExpireDays().intValue());
                    SimpleDateFormat sdf = new SimpleDateFormat(DateUtil.DEFAULT_YMDHMSDATE_FORMAT);
                    String date = sdf.format(expiryDate);
                    Document updateDoc = new Document(CREATE_AT, new Date());
                    updateDoc.put("resource.url", url);
                    updateDoc.put("resource.expiry_date", date);
                    collection.updateMany(filter, new Document("$set", updateDoc));
                }
            } catch (Exception e) {
                DebugLogger.fatal("保存病人档案信息至MongoDB异常:", e);
                return false;
            }
            return true;
        }
        return false;
    }
    /**
     * @param patient
     * @return
     * @modify 将档案生成到到data目录
     */
    @Override
    public boolean toFile(Patient patient) {
        boolean result = true;
        PatientCDAIndex patientCDAIndex = new PatientCDAIndex(patient);
        StorageMode storagePattern = SysConfig.getInstance().getStoragePattern();
        Document datasetDoc = new Document();
        Document resultDoc = new Document();
        try {
            // 生成文件,轻量模式需清空data中数据
            for (String name : MongoDB.db(dbName).listCollectionNames()) {
                MongoCollection<Document> collection = MongoDB.db(dbName).getCollection(name);
                FindIterable<Document> documents = collection.find(and(eq(PATIENT_ID, patient.getPatientId()), eq(EVENT_NO, patient.getEventNo()))).projection(excludeId());
                try (MongoCursor<Document> cursor = documents.iterator()) {
                    while (cursor.hasNext()) {
                        String filePath = patientCDAIndex.createDataIndex(dbName, PatientCDAIndex.FileType.JSON);
                        try {
                            Document doc = cursor.next();
                            if ("HDSC01_02".equals(name) || "HDSC02_09".equals(name)) {
                                resultDoc.put(PATIENT_ID, doc.get(PATIENT_ID));
                                resultDoc.put(EVENT_NO, doc.get(EVENT_NO));
                                resultDoc.put(ORG_CODE, doc.get(ORG_CODE));
                                resultDoc.put(INNER_VERSION, doc.get(INNER_VERSION));
                                resultDoc.put(EVENT_TIME, doc.get(EVENT_TIME));
                                if ("HDSC01_02".equals(name)) {
                                    resultDoc.put("visit_type", "1");
                                } else {
                                    resultDoc.put("visit_type", "2");//临时约定,后续从字典中获取
                                }
                            }
                            Map<String, String> resource = (Map<String, String>) doc.get("resource");
                            if (storagePattern == StorageMode.DISTRIBUTE) {
                                //分布式 dataset 数据调阅URL生成
                                datasetDoc.put(name, resource.get("url"));
                                resultDoc.put("expiry_date", "");
                            } else {
                                datasetDoc.put(name, "");
                                resultDoc.put("expiry_date", resource.get("expiry_date"));
                                boolean writeFile = FileUtil.writeFile(filePath, doc.toJson(), "UTF-8");
                                if (!writeFile) {
                                    DebugLogger.fatal("存储临时文件失败:" + cursor.next().toJson());
                                    result = false;
                                }
                            }
                        } catch (IOException e) {
                            DebugLogger.fatal("存储临时文件失败.", e);
                            result = false;
                        }
                    }
                } catch (Exception e) {
                    DebugLogger.fatal("", e);
                }
            }
            //摘要信息生成
            Document indexData = genPatientIndexData(patient);
            if (indexData != null) {
                resultDoc.put("dataset", datasetDoc);
                resultDoc.put("sumary", indexData);
                String indexPath = patientCDAIndex.createDataSetIndex("index", PatientCDAIndex.FileType.JSON);
                boolean writeFile = FileUtil.writeFile(indexPath, resultDoc.toJson(), "UTF-8");
                if (!writeFile) {
                    DebugLogger.fatal("存储索引临时文件失败:" + resultDoc.toJson());
                    result = false;
                }
            }
        } catch (Exception e) {
            DebugLogger.fatal("", e);
            result = false;
        }
        return result;
    }
    @Override
    public String getDataSet(Patient patient, String dataSetCode) {
        try {
            MongoCollection<Document> collection = MongoDB.db(dbName).getCollection(dataSetCode);
            FindIterable<Document> documents = collection.find(and(eq(PATIENT_ID, patient.getPatientId()), eq(EVENT_NO, patient.getEventNo()), eq(ORG_CODE, patient.getOrgCode()))).projection(excludeId());
            Document document = documents.first();
            if (document != null) {
                return document.toJson();
            }
        } catch (Exception e) {
            DebugLogger.fatal("", e);
        }
        return null;
    }
    /**
     * 根据条件 获取数据集信息
     *
     * @param dataSetCode 数据集编码
     * @param params      map参数集合
     * @return
     */
    @Override
    public String getArchive(String dataSetCode, Map<String, Object> params) {
        String data = null;
        boolean result = true;
        try {
            MongoCollection<Document> collection = MongoDB.db(dbName).getCollection(dataSetCode);
            BasicDBObject basicDBObject = new BasicDBObject();
            for (Map.Entry<String, Object> entry : params.entrySet()) {
                basicDBObject.put(entry.getKey(), entry.getValue());
            }
            FindIterable<Document> documents = collection.find(basicDBObject);
            try (MongoCursor<Document> cursor = documents.iterator()) {
                while (cursor.hasNext()) {
                    data = cursor.next().toJson();
                    DebugLogger.fatal("存储临时文 :" + cursor.next().toJson());
                }
            } catch (Exception e) {
                DebugLogger.fatal("", e);
            }
        } catch (Exception e) {
            DebugLogger.fatal("", e);
        }
        return data;
    }
    @Override
    public Boolean isStored(String orgCode, String patientID, String eventNo) {
        HashMap<String, PatientIdentity> patientIdentityHashMap = SysConfig.getInstance().getPatientIdentityHashMap();
        Set<Map.Entry<String, PatientIdentity>> entries = patientIdentityHashMap.entrySet();
        Iterator<Map.Entry<String, PatientIdentity>> iterator = entries.iterator();
        try {
            while (iterator.hasNext()) {
                Map.Entry<String, PatientIdentity> next = iterator.next();
                String datasetCode = next.getKey();
                MongoCollection<Document> collection = MongoDB.db(dbName).getCollection(datasetCode);
                Document document = new Document();
                document.append(ORG_CODE, orgCode);
                document.append(PATIENT_ID, patientID);
                document.append(EVENT_NO, eventNo);
                Document findDoc = collection.find(document).first();
                if (findDoc != null) {
                    return true;
                }
            }
        } catch (Exception e) {
            DebugLogger.fatal("", e);
        }
        return false;
    }
    protected void createIndex(MongoCollection<Document> collection) {
        for (final Document index : collection.listIndexes()) {
            if (index.get("name").equals(TTL_INDEX_EXPIRED)) {
                collection.dropIndex(TTL_INDEX_EXPIRED);  //删除旧的TTL Index
            } else if (index.get("name").equals(TTL_INDEX)) {
                return;
            }
        }
        Document createTimeIndex = new Document(CREATE_AT, 1);
        IndexOptions indexOptions = new IndexOptions();
        indexOptions.expireAfter(getExpireDays(), TimeUnit.DAYS);
        indexOptions.name(TTL_INDEX);
        collection.createIndex(createTimeIndex, indexOptions);
        Document patientIndex = new Document();
        patientIndex.append(PATIENT_ID, 1);
        patientIndex.append(EVENT_NO, 1);
        collection.createIndex(patientIndex);
    }
    /**
     * url生成
     *
     * @param patientId 病人ID
     * @param eventNo   事件号
     * @return
     */
    protected String createUrl(String dataSetCode, String patientId, String eventNo) {
        String requestPath = ConfigureUtil.getProValue("archive.properties","hos.archives.request.url");
        return requestPath + dataSetCode + "/" + patientId + "/" + eventNo;
    }
    protected String getKey() {
        return KEY;
    }
    protected Long getExpireDays() {
        final Long expireDay = 30L;
        String value = ConfigureUtil.getProValue("archive.properties","hos.archives.expiry.days");
        Long days = NumberUtil.toLong(value);
        return days == null ? expireDay : days;
    }
    /**
     * 病人摘要信息生成
     * 从sys.config文件中的配置读取所需的摘要信息
     *
     * @param patient
     * @return
     */
    protected Document genPatientIndexData(Patient patient) {
        Map<String, PatientIndex> patientIndexMap = SysConfig.getInstance().getPatientIndexMap();
        PatientIndex patientIndex = null;
        List<Document> arrayNode = null;
        Document objectNode = null;
        Document result = new Document();
        MongoCursor<Document> cursor = null;
        MongoCursor<Document> diagCursor = null;
        try {
            for (Map.Entry<String, PatientIndex> entry : patientIndexMap.entrySet()) {
                String dataSetCode = entry.getKey();
                patientIndex = entry.getValue();
                arrayNode = new ArrayList<>();
                MongoCollection<Document> collection = MongoDB.db(dbName).getCollection(dataSetCode);
                FindIterable<Document> documents = collection.find(and(eq(KEY, dataSetCode), eq(PATIENT_ID, patient.getPatientId()), eq(EVENT_NO, patient.getEventNo()))).projection(excludeId());
                cursor = documents.iterator();
                if (cursor.hasNext()) {
                    while (cursor.hasNext()) {
                        Document document = cursor.next();
                        List<Document> list = document.get("data", List.class);
                        for (Document doc : list) {
                            objectNode = new Document();
                            objectNode.put(patientIndex.getPatientId(), patient.getPatientId());
                            objectNode.put(patientIndex.getEventNoCode(), patient.getEventNo());
                            objectNode.put(patientIndex.getRefTimeCode(), doc.get(patientIndex.getRefTimeCode()) == null ? null : (String) doc.get(patientIndex.getRefTimeCode()));
                            objectNode.put("orgCode", patient.getOrgCode());
                            objectNode.put(patientIndex.getOfficeCode(), doc.get(patientIndex.getOfficeCode()) == null ? null : (String) doc.get(patientIndex.getOfficeCode()));
                            objectNode.put(patientIndex.getOfficeName(), doc.get(patientIndex.getOfficeName()) == null ? null : (String) doc.get(patientIndex.getOfficeName()));
                            if ("HDSC02_09".equals(dataSetCode)) {
                                objectNode.put(patientIndex.getLeaveTime(), doc.get(patientIndex.getLeaveTime()) == null ? null : (String) doc.get(patientIndex.getLeaveTime()));
                            }
                            arrayNode.add(objectNode);
                        }
                    }
                    if (arrayNode != null && arrayNode.size() > 0) {
                        result.put(dataSetCode, arrayNode);
                    } else {
                        continue;
                    }
                    String diagDataSet = patientIndex.getDiagDataSet();
                    MongoCollection<Document> diagCollection = MongoDB.db(dbName).getCollection(diagDataSet);
                    FindIterable<Document> diags = diagCollection.find(and(eq(KEY, diagDataSet), eq(PATIENT_ID, patient.getPatientId()), eq(EVENT_NO, patient.getEventNo()))).projection(excludeId());
                    diagCursor = diags.iterator();
                    arrayNode = new ArrayList<>();
                    while (diagCursor.hasNext()) {
                        Document document = diagCursor.next();
                        List<Document> list = document.get("data", List.class);
                        for (Document doc : list) {
                            objectNode = new Document();
                            objectNode.put(patientIndex.getDiagType(), doc.get(patientIndex.getDiagType()) == null ? null : (String) doc.get(patientIndex.getDiagType()));
                            objectNode.put(patientIndex.getDiagCode(), doc.get(patientIndex.getDiagCode()) == null ? null : (String) doc.get(patientIndex.getDiagCode()));
                            objectNode.put(patientIndex.getDiagName(), doc.get(patientIndex.getDiagName()) == null ? null : (String) doc.get(patientIndex.getDiagName()));
                            arrayNode.add(objectNode);
                        }
                    }
                    if (arrayNode != null && arrayNode.size() > 0) {
                        result.put(diagDataSet, arrayNode);
                    }
                } else {
                    continue;
                }
            }
            if (result == null) {
                return null;
            } else {
                return result;
            }
        } catch (Exception e) {
            DebugLogger.fatal("", e);
        } finally {
            if (cursor != null) {
                cursor.close();
            }
            if (diagCursor != null) {
                diagCursor.close();
            }
        }
        return null;
    }
}

+ 17 - 0
Hos-resource/src/main/java/com/yihu/ehr/standard/service/adapter/AdapterDatasetService.java

@ -462,4 +462,21 @@ public class AdapterDatasetService extends SQLGeneralDAO {
            return null;
        }
    }
    public AdapterDatasetModel getAdapterDatasetByCode(String version,String datasetCode) {
        try {
            if (StringUtil.isEmpty(datasetCode)) {
                return null;
            }
            SqlCreator sqlCreator = new SqlCreator(AdapterDatasetModel.class);
            sqlCreator.equalCondition("stdDatasetCode", datasetCode);
            String sql = sqlCreator.selectData("adapter_dataset_"+version);
            Query query = getQuery(sqlCreator, sql);
            return (AdapterDatasetModel)query.uniqueResult();
        } catch (Exception e) {
            e.printStackTrace();
            return null;
        }
    }
}

+ 27 - 0
Hos-resource/src/main/java/com/yihu/ehr/standard/service/adapter/AdapterDictEntryService.java

@ -327,4 +327,31 @@ public class AdapterDictEntryService extends SQLGeneralDAO {
            return Result.error("删除适配字典项失败");
        }
    }
    public AdapterDictEntryModel getAdapterDictItemBydictCode(String version,AdapterDictModel adapterDictT, String dictCode) {
        try {
            SqlCreator sqlCreator = new SqlCreator(AdapterDictEntryModel.class);
            sqlCreator.equalCondition("stdDictId", adapterDictT.getStdDictId());
            sqlCreator.equalCondition("stdEntryCode", dictCode);
            String sql = sqlCreator.selectData("std_dictionary_entry_"+version);
            Query query = getQuery(sqlCreator, sql);
            return (AdapterDictEntryModel)query.uniqueResult();
        } catch (Exception e) {
            return null;
        }
    }
    public AdapterDictEntryModel getAdapterDictItemBydictValue(String version,AdapterDictModel adapterDictT, String dictValue) {
        try {
            SqlCreator sqlCreator = new SqlCreator(AdapterDictEntryModel.class);
            sqlCreator.equalCondition("stdDictId", adapterDictT.getStdDictId());
            sqlCreator.equalCondition("stdEntryValue", dictValue);
            String sql = sqlCreator.selectData("std_dictionary_entry_"+version);
            Query query = getQuery(sqlCreator, sql);
            return (AdapterDictEntryModel)query.uniqueResult();
        } catch (Exception e) {
            return null;
        }
    }
}

+ 3 - 0
Hos-resource/src/main/java/com/yihu/ehr/standard/service/adapter/AdapterDictService.java

@ -175,4 +175,7 @@ public class AdapterDictService extends SQLGeneralDAO {
        Integer count = Integer.parseInt(StringUtil.toString(query.list().get(0)));
        return count;
    }
}

+ 12 - 0
Hos-resource/src/main/java/com/yihu/ehr/standard/service/adapter/AdapterMetadataService.java

@ -390,4 +390,16 @@ public class AdapterMetadataService extends SQLGeneralDAO {
        return metadataModelList;
    }
    public AdapterMetadataModel getAdapterMetadata(String version, Integer adapterMetadataId) {
        try {
            SqlCreator sqlCreator = new SqlCreator(AdapterMetadataModel.class);
            sqlCreator.equalCondition("id", adapterMetadataId);
            String sql = sqlCreator.selectData("adapter_metadata_"+version);
            Query query = getQuery(sqlCreator, sql);
            return (AdapterMetadataModel)query.uniqueResult();
        } catch (Exception e) {
            return null;
        }
    }
}

+ 19 - 0
Hos-resource/src/main/java/com/yihu/ehr/standard/service/standard/StdDictEntryService.java

@ -319,4 +319,23 @@ public class StdDictEntryService extends SQLGeneralDAO {
           return DetailModelResult.error("获取标准字典项下拉列表失败");
        }
    }
    public StdDictionaryEntryModel getStEntryValueByCode(String version,Integer dictId, String entryCode) {
        try {
            SqlCreator sqlCreator = new SqlCreator(StdDictionaryEntryModel.class);
            sqlCreator.equalCondition("dictId", dictId);
            sqlCreator.equalCondition("code", entryCode);
            String sql = sqlCreator.selectData("std_dictionary_entry_"+version);
            Query query = getQuery(sqlCreator, sql);
            List<StdDictionaryEntryModel> list=query.list();
            if (list ==null || list.size()==0){
                return null;
            }else {
                return list.get(0);
            }
        } catch (Exception e) {
            return null;
        }
    }
}//end DictEntryManager

+ 14 - 0
Hos-resource/src/main/java/com/yihu/ehr/standard/service/standard/StdDictService.java

@ -17,6 +17,7 @@ import com.yihu.ehr.framework.util.sql.BeanTransformer;
import com.yihu.ehr.framework.util.sql.RequestParamTransformer;
import com.yihu.ehr.framework.util.sql.SqlCreator;
import com.yihu.ehr.standard.model.Select2;
import com.yihu.ehr.standard.model.adapter.AdapterDictModel;
import com.yihu.ehr.standard.model.standard.StdDataSetModel;
import com.yihu.ehr.standard.model.standard.StdDictionaryModel;
import com.yihu.ehr.standard.service.bo.StandardVersion;
@ -354,4 +355,17 @@ public class StdDictService extends SQLGeneralDAO {
            return DetailModelResult.error("获取字典下来列表失败");
        }
    }
    public StdDictionaryModel getAdapterDictByCode(String version, String dictCode) {
        try {
            SqlCreator sqlCreator = new SqlCreator(StdDictionaryModel.class);
            sqlCreator.equalCondition("code", dictCode);
            String sql = sqlCreator.selectData("std_dictentry_"+version);
            Query query = getQuery(sqlCreator, sql);
            return (StdDictionaryModel)query.uniqueResult();
        } catch (Exception e) {
            return null;
        }
    }
}

+ 13 - 0
Hos-resource/src/main/java/com/yihu/ehr/standard/service/standard/StdMetadataService.java

@ -15,6 +15,7 @@ import com.yihu.ehr.framework.util.sql.BeanTransformer;
import com.yihu.ehr.framework.util.sql.RequestParamTransformer;
import com.yihu.ehr.framework.util.sql.SqlCreator;
import com.yihu.ehr.standard.model.Select2;
import com.yihu.ehr.standard.model.adapter.AdapterMetadataModel;
import com.yihu.ehr.standard.model.standard.StandardVersionModel;
import com.yihu.ehr.standard.model.standard.StdDataSetModel;
import com.yihu.ehr.standard.model.standard.StdDictionaryEntryModel;
@ -391,4 +392,16 @@ public class StdMetadataService extends SQLGeneralDAO {
            throw new ApiException(ErrorCode.GetDataSetListFailed);
        }
    }
    public StdMetaDataModel getStdMetadata(String version, Integer stdMetadataId) {
        try {
            SqlCreator sqlCreator = new SqlCreator(AdapterMetadataModel.class);
            sqlCreator.equalCondition("id", stdMetadataId);
            String sql = sqlCreator.selectData("std_metadata_"+version);
            Query query = getQuery(sqlCreator, sql);
            return (StdMetaDataModel)query.uniqueResult();
        } catch (Exception e) {
            return null;
        }
    }
}

+ 11 - 0
Hos-resource/src/main/java/com/yihu/ehr/system/dao/DatasourceDao.java

@ -102,4 +102,15 @@ public class DatasourceDao extends SQLGeneralDAO implements IDatasourceDao {
        String sql = "update System_Datasource set activity_Flag='"+valid+"' where Id='"+id+"'";
        super.execute(sql);
    }
    @Override
    public List<SystemDatasource> getDatasources(String orgId) throws Exception {
        String sql = "select * from System_Datasource";
        if(orgId!=null&&orgId.length()>0){
            sql += " where org_id ='"+orgId+"'";
        }
        List<SystemDatasource> list = super.queryListBySql(sql,SystemDatasource.class);
        return list;
    }
}

+ 13 - 0
Hos-resource/src/main/java/com/yihu/ehr/system/dao/OrganizationDao.java

@ -64,4 +64,17 @@ public class OrganizationDao extends SQLGeneralDAO implements IOrganizationDao {
        String sql = "update System_Organization set activity_Flag='"+activityFlag+"' where Id='"+orgId+"'";
        super.execute(sql);
    }
    public SystemOrganization getOrgByIde(String orgId) throws Exception
    {
        String sql = "select * from system_organization where id = '"+orgId+"'and activity_flag = '1'";
        List<SystemOrganization> list = super.queryListBySql(sql, SystemOrganization.class);
        if (list!=null && list.size()>0){
            return list.get(0);
        }else {
            return null;
        }
    }
}

+ 4 - 0
Hos-resource/src/main/java/com/yihu/ehr/system/dao/intf/IDatasourceDao.java

@ -2,7 +2,9 @@ package com.yihu.ehr.system.dao.intf;
import com.yihu.ehr.framework.common.dao.XSQLGeneralDAO;
import com.yihu.ehr.framework.model.DataGridResult;
import com.yihu.ehr.system.model.SystemDatasource;
import java.util.List;
import java.util.Map;
/**
@ -31,4 +33,6 @@ public interface IDatasourceDao extends XSQLGeneralDAO {
     * 修改数据源状态(暂弃)
     */
    void validDatasource(String id, String valid) throws Exception;
    List<SystemDatasource> getDatasources(String orgId)throws Exception;
}

+ 2 - 0
Hos-resource/src/main/java/com/yihu/ehr/system/dao/intf/IOrganizationDao.java

@ -27,4 +27,6 @@ public interface IOrganizationDao extends XSQLGeneralDAO {
     * 修改组织状态
     */
    void activityOrg(String orgId, String activityFlag) throws Exception;
    SystemOrganization getOrgByIde(String orgId) throws Exception;
}

+ 11 - 0
Hos-resource/src/main/java/com/yihu/ehr/system/service/DatasourceManager.java

@ -11,6 +11,7 @@ import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Transactional;
import javax.annotation.Resource;
import java.util.List;
import java.util.Map;
/**
@ -142,4 +143,14 @@ public class DatasourceManager implements IDatasourceManager {
            return new ActionResult(false,"删除失败!");
        }
    }
    @Override
    public List<SystemDatasource> getDatasources(String orgId) {
        try {
            return datasourceDao.getDatasources(orgId);
        } catch (Exception e) {
            e.printStackTrace();
        }
        return null;
    }
}

+ 5 - 0
Hos-resource/src/main/java/com/yihu/ehr/system/service/OrganizationManager.java

@ -117,6 +117,11 @@ public class OrganizationManager implements IOrganizationManager {
        }
    }
    @Override
    public SystemOrganization getOrgByIde(String orgId) throws Exception {
        return organizationDao.getOrgByIde(orgId);
    }
    /**
     * 修改组织状态
     * @param orgId

+ 7 - 0
Hos-resource/src/main/java/com/yihu/ehr/system/service/intf/IDatasourceManager.java

@ -55,4 +55,11 @@ public interface IDatasourceManager {
     * @return
     */
    ActionResult deleteDatasource(String id);
    /**
     * 获取数据源列表
     * @param orgId
     * @return
     */
    List<SystemDatasource> getDatasources(String orgId);
}

+ 2 - 0
Hos-resource/src/main/java/com/yihu/ehr/system/service/intf/IOrganizationManager.java

@ -44,4 +44,6 @@ public interface IOrganizationManager {
     * 修改组织状态
     *//*
    ActionResult activityOrg(String orgId,String activityFlag) throws Exception;*/
    SystemOrganization getOrgByIde(String orgId) throws Exception;
}

+ 15 - 0
Hos-resource/src/main/resources/config/archive.properties

@ -0,0 +1,15 @@
#轻量模式 档案浏览Url
hos.archives.host=https://192.168.131.11:8443
hos.archives.request.url=/api/v1.0/archives/patient/
hos.archives.expiry.days=3
#总支撑平台接口请求
ha.url.standard.getVersion=/v1.0/adapter-dispatcher/versionplan
ha.url.standard.packageDown=/v1.0/adapter-dispatcher/schema
ha.url.patient.register=/v1.0/patient/registration
ha.url.patient.upload=/v1.0/json_package/
ha.url.security.publicKey.get=/v1.0/security/user_key/
ha.url.security.token.get=/v1.0/security/token
ha.url.standard.schemaMappingPlan=/v1.0/adapter-dispatcher/schemaMappingPlan
ha.url.standard.allSchemaMappingPlan=/v1.0/adapter-dispatcher/allSchemaMappingPlan

+ 2 - 0
Hos-resource/src/main/resources/config/sys.config.xml

@ -1,6 +1,8 @@
<?xml version="1.0" encoding="UTF-8" ?>
<config>
    <temp_file>D:\temp</temp_file>
    <!--存储模式:轻量模式:0,传统模式:1-->
    <storage_pattern>0</storage_pattern>
    <event_no>
        <item>HDSD03_01_031</item>
        <item>HDSD00_01_579</item>