Parcourir la source

Merge branch 'master' of luofaqiang/esb into master

罗发强 il y a 8 ans
Parent
commit
7f5d9e588e
44 fichiers modifiés avec 787 ajouts et 4372 suppressions
  1. 0 20
      sdk/java/hos-client/hos-client.iml
  2. 3 3
      sdk/java/hos-client/pom.xml
  3. 38 27
      sdk/java/hos-client/src/main/java/com/yihu/hos/client/BrokerServer.java
  4. 18 30
      sdk/java/hos-client/src/main/java/com/yihu/hos/client/BrokerServerClient.java
  5. 1 1
      sdk/java/hos-client/src/main/java/com/yihu/hos/client/Request.java
  6. 1 1
      sdk/java/hos-client/src/main/java/com/yihu/hos/client/Response.java
  7. 580 0
      sdk/websocket/hos-client.js
  8. 91 0
      sdk/websocket/test.htm
  9. 0 6
      src/main/java/com/yihu/hos/ESBApplication.java
  10. 2 2
      src/main/java/com/yihu/hos/ServletInitializer.java
  11. 0 117
      src/main/java/com/yihu/hos/common/ActiveMqUtil.java
  12. 0 13
      src/main/java/com/yihu/hos/common/activeMq/ActiveMqConstants.java
  13. 0 29
      src/main/java/com/yihu/hos/common/activeMq/ActivemqConfiguration.java
  14. 0 98
      src/main/java/com/yihu/hos/common/mongo/IMongoDBAdminer.java
  15. 0 376
      src/main/java/com/yihu/hos/common/mongo/IMongoDBRunner.java
  16. 0 172
      src/main/java/com/yihu/hos/common/mongo/MongoDB.java
  17. 0 65
      src/main/java/com/yihu/hos/common/mongo/MongoDBConfig.java
  18. 0 57
      src/main/java/com/yihu/hos/common/mongo/MongoDBKit.java
  19. 0 92
      src/main/java/com/yihu/hos/common/mongo/MongoDBOperator.java
  20. 0 566
      src/main/java/com/yihu/hos/common/mongo/MongoDBPro.java
  21. 0 27
      src/main/java/com/yihu/hos/config/WebConfig.java
  22. 6 30
      src/main/java/com/yihu/hos/crawler/controller/CrawlerController.java
  23. 0 159
      src/main/java/com/yihu/hos/crawler/format/DataSetTransformer.java
  24. 0 54
      src/main/java/com/yihu/hos/crawler/format/DocumentTransformer.java
  25. 0 27
      src/main/java/com/yihu/hos/crawler/format/IDataTransformer.java
  26. 0 235
      src/main/java/com/yihu/hos/crawler/origin/FileSystemOrigin.java
  27. 0 47
      src/main/java/com/yihu/hos/crawler/origin/IDataOrigin.java
  28. 0 323
      src/main/java/com/yihu/hos/crawler/service/CrawlerFlowManager.java
  29. 0 243
      src/main/java/com/yihu/hos/crawler/service/CrawlerManager.java
  30. 5 5
      src/main/java/com/yihu/hos/crawler/service/CrawlerService.java
  31. 0 381
      src/main/java/com/yihu/hos/crawler/service/DataCollectDispatcher.java
  32. 33 8
      src/main/java/com/yihu/hos/crawler/service/EsbHttp.java
  33. 0 148
      src/main/java/com/yihu/hos/crawler/service/OldPatientCDAUpload.java
  34. 0 83
      src/main/java/com/yihu/hos/crawler/service/PatientCDAIndex.java
  35. 0 113
      src/main/java/com/yihu/hos/crawler/service/PatientCDAUpload.java
  36. 0 37
      src/main/java/com/yihu/hos/crawler/storage/DataSetStorage.java
  37. 0 231
      src/main/java/com/yihu/hos/crawler/storage/DocumentStorage.java
  38. 0 36
      src/main/java/com/yihu/hos/crawler/storage/IDataStorage.java
  39. 0 390
      src/main/java/com/yihu/hos/crawler/storage/MongodbStorage.java
  40. 1 1
      src/main/java/com/yihu/hos/common/CheckLoginFilter.java
  41. 1 1
      src/main/java/com/yihu/hos/common/SessionOutTimeFilter.java
  42. 0 55
      src/main/java/com/yihu/hos/services/ArchiveUploadJob.java
  43. 0 56
      src/main/java/com/yihu/hos/services/CrawlerFlowUploadJob.java
  44. 7 7
      src/main/webapp/WEB-INF/ehr/jsp/common/indexJs.jsp

+ 0 - 20
sdk/java/hos-client/hos-client.iml

@ -1,20 +0,0 @@
<?xml version="1.0" encoding="UTF-8"?>
<module org.jetbrains.idea.maven.project.MavenProjectsManager.isMavenModule="true" type="JAVA_MODULE" version="4">
  <component name="NewModuleRootManager" LANGUAGE_LEVEL="JDK_1_8" inherit-compiler-output="false">
    <output url="file://$MODULE_DIR$/target/classes" />
    <output-test url="file://$MODULE_DIR$/target/test-classes" />
    <content url="file://$MODULE_DIR$">
      <sourceFolder url="file://$MODULE_DIR$/src/main/java" isTestSource="false" />
      <excludeFolder url="file://$MODULE_DIR$/target" />
    </content>
    <orderEntry type="inheritedJdk" />
    <orderEntry type="sourceFolder" forTests="false" />
    <orderEntry type="library" name="Maven: org.apache.httpcomponents:httpclient:4.5.1" level="project" />
    <orderEntry type="library" name="Maven: org.apache.httpcomponents:httpcore:4.4.3" level="project" />
    <orderEntry type="library" name="Maven: commons-logging:commons-logging:1.2" level="project" />
    <orderEntry type="library" name="Maven: commons-codec:commons-codec:1.9" level="project" />
    <orderEntry type="library" name="Maven: com.fasterxml.jackson.core:jackson-databind:2.6.4" level="project" />
    <orderEntry type="library" name="Maven: com.fasterxml.jackson.core:jackson-annotations:2.6.0" level="project" />
    <orderEntry type="library" name="Maven: com.fasterxml.jackson.core:jackson-core:2.6.4" level="project" />
  </component>
</module>

+ 3 - 3
sdk/java/hos-client/pom.xml

@ -9,9 +9,9 @@
    <version>1.0-SNAPSHOT</version>
    <dependencies>
        <dependency>
            <groupId>org.apache.httpcomponents</groupId>
            <artifactId>httpclient</artifactId>
            <version>4.5.1</version>
            <groupId>com.squareup.okhttp3</groupId>
            <artifactId>okhttp</artifactId>
            <version>3.4.1</version>
        </dependency>
        <dependency>
            <groupId>com.fasterxml.jackson.core</groupId>

+ 38 - 27
sdk/java/hos-client/src/main/java/com/yihu/hos/client/BrokerServer.java

@ -1,14 +1,13 @@
package com.yihu.hos.client;
import org.apache.http.HttpEntity;
import org.apache.http.client.methods.CloseableHttpResponse;
import org.apache.http.client.methods.HttpPost;
import org.apache.http.impl.client.CloseableHttpClient;
import org.apache.http.impl.client.HttpClients;
import org.apache.http.util.EntityUtils;
import okhttp3.Call;
import okhttp3.OkHttpClient;
import okhttp3.Request;
import okhttp3.Response;
import java.io.IOException;
import java.util.Arrays;
import java.util.concurrent.*;
/**
 * @created Airhead 2016/8/16.
@ -43,6 +42,19 @@ class BrokerServer {
        this.port = port;
    }
    private class AsyncCall implements Callable<ClientResponse> {
        private ClientRequest clientRequest;
        AsyncCall(ClientRequest clientRequest) {
            this.clientRequest = clientRequest;
        }
        @Override
        public ClientResponse call() throws Exception {
            return invokeSync(clientRequest);
        }
    }
    Boolean getEnable() {
        return enable;
    }
@ -55,46 +67,45 @@ class BrokerServer {
        return this.hostAddress + ":" + this.port;
    }
    Response invokeSync(Request request) {
        CloseableHttpClient httpclient = HttpClients.createDefault();
        String method = request.getMethod();
        if (request.getMethod().startsWith("/")) {
            method = request.getMethod().substring(1);
    ClientResponse invokeSync(ClientRequest clientRequest) {
        String method = clientRequest.getMethod();
        if (clientRequest.getMethod().startsWith("/")) {
            method = clientRequest.getMethod().substring(1);
        }
        HttpPost httpPost = new HttpPost(hostAddress + ":" + port + method);
        CloseableHttpResponse response = null;
        Response result = new Response();
        ClientResponse result = new ClientResponse();
        try {
            response = httpclient.execute(httpPost);
            if (response.getStatusLine().getStatusCode() != 200) {
            OkHttpClient okHttpClient = new OkHttpClient();
            Request request = new Request.Builder().url(hostAddress + ":" + port + method).build();
            Call call = okHttpClient.newCall(request);
            Response response = call.execute();
            if (!response.isSuccessful()) {
                EsbException esbException = new EsbException("Can not connect the server.");
                result.setError(esbException);
                result.setStackTrace(Arrays.toString(esbException.getStackTrace()));
                return result;
            }
            HttpEntity entity = response.getEntity();
            String body = EntityUtils.toString(entity, "UTF-8");
            result.setResult(body);
            result.setResult(response.body().toString());
        } catch (IOException e) {
            e.printStackTrace();
            result.setError(e);
            result.setStackTrace(Arrays.toString(e.getStackTrace()));
        } finally {
            assert response != null;
            try {
                response.close();
                httpclient.close();
            } catch (IOException ignored) {
            }
        }
        return result;
    }
    void invokeAsync(Request request, final ResultCallback<Response> callback) {
    void invokeAsync(ClientRequest clientRequest, final ResultCallback<ClientResponse> callback) {
        try {
            ExecutorService executorService = Executors.newCachedThreadPool();
            Future<ClientResponse> future = executorService.submit(new AsyncCall(clientRequest));
            ClientResponse clientResponse = future.get();
            callback.onReturn(clientResponse);
        } catch (InterruptedException | ExecutionException e) {
            e.printStackTrace();
        }
    }
}

+ 18 - 30
sdk/java/hos-client/src/main/java/com/yihu/hos/client/BrokerServerClient.java

@ -2,12 +2,10 @@ package com.yihu.hos.client;
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.ObjectMapper;
import org.apache.http.HttpEntity;
import org.apache.http.client.methods.CloseableHttpResponse;
import org.apache.http.client.methods.HttpGet;
import org.apache.http.impl.client.CloseableHttpClient;
import org.apache.http.impl.client.HttpClients;
import org.apache.http.util.EntityUtils;
import okhttp3.Call;
import okhttp3.OkHttpClient;
import okhttp3.Request;
import okhttp3.Response;
import java.io.IOException;
import java.util.HashMap;
@ -32,25 +30,25 @@ public class BrokerServerClient {
        }
    }
    public Response invokeSync(Request request) {
        Response response = new Response();
    public ClientResponse invokeSync(ClientRequest clientRequest) {
        ClientResponse clientResponse = new ClientResponse();
        try {
            BrokerServer brokerServer = selectBrokerServer();
            return brokerServer.invokeSync(request);
            return brokerServer.invokeSync(clientRequest);
        } catch (Exception e) {
            e.printStackTrace();
            response.setError(e);
            response.setStackTrace(e.getStackTrace().toString());
            clientResponse.setError(e);
            clientResponse.setStackTrace(e.getStackTrace().toString());
        }
        return response;
        return clientResponse;
    }
    public void invokeAsync(Request request, final ResultCallback<Response> callback) {
    public void invokeAsync(ClientRequest clientRequest, final ResultCallback<ClientResponse> callback) {
        BrokerServer brokerServer = selectBrokerServer();
        brokerServer.invokeAsync(request, callback);
        brokerServer.invokeAsync(clientRequest, callback);
    }
    private BrokerServer selectBrokerServer() {
@ -58,19 +56,17 @@ public class BrokerServerClient {
            return mapBrokerServer.entrySet().iterator().next().getValue();
        }
        CloseableHttpClient httpclient = HttpClients.createDefault();
        HttpGet httpGet = new HttpGet(host + ":10135/brokerServer");
        CloseableHttpResponse response = null;
        try {
            response = httpclient.execute(httpGet);
            if (response.getStatusLine().getStatusCode() != 200) {
            OkHttpClient okHttpClient = new OkHttpClient();
            Request request = new Request.Builder().url(host + ":10135/brokerServer").build();
            Call call = okHttpClient.newCall(request);
            Response response = call.execute();
            if (!response.isSuccessful()) {
                throw new IOException("Can not connect the server.");
            }
            HttpEntity entity = response.getEntity();
            String body = EntityUtils.toString(entity, "UTF-8");
            ObjectMapper objectMapper = new ObjectMapper();
            JsonNode node = objectMapper.readTree(body);
            JsonNode node = objectMapper.readTree(response.body().toString());
            String hostAddress = node.path("hostAddress").asText();
            String hostName = node.path("hostName").asText();
            String port = node.path("port").asText();
@ -84,14 +80,6 @@ public class BrokerServerClient {
        } catch (IOException e) {
            e.printStackTrace();
            throw new EsbException(e.getMessage(), e.getCause());
        } finally {
            assert response != null;
            try {
                response.close();
                httpclient.close();
            } catch (IOException e) {
                ;
            }
        }
        return mapBrokerServer.entrySet().iterator().next().getValue();

+ 1 - 1
sdk/java/hos-client/src/main/java/com/yihu/hos/client/Request.java

@ -6,7 +6,7 @@ import java.util.Map;
/**
 * @created Airhead 2016/8/5.
 */
public class Request {
public class ClientRequest {
    private String module;
    private String method;
    private Map<String, String> args;

+ 1 - 1
sdk/java/hos-client/src/main/java/com/yihu/hos/client/Response.java

@ -6,7 +6,7 @@ import java.io.StringWriter;
/**
 * @created Airhead 2016/8/5.
 */
public class Response {
public class ClientResponse {
    public static final String KEY_RESULT = "result";
    public static final String KEY_STACK_TRACE = "stackTrace";
    public static final String KEY_ERROR = "error";

+ 580 - 0
sdk/websocket/hos-client.js

@ -0,0 +1,580 @@
function hashSize(obj) {
    var size = 0, key;
    for (key in obj) {
        if (obj.hasOwnProperty(key)) size++;
    }
    return size;
}
function uuid() {
    //http://stackoverflow.com/questions/105034/how-to-create-a-guid-uuid-in-javascript
    return 'xxxxxxxx-xxxx-4xxx-yxxx-xxxxxxxxxxxx'.replace(/[xy]/g, function (c) {
        var r = Math.random() * 16 | 0, v = c == 'x' ? r : (r & 0x3 | 0x8);
        return v.toString(16);
    });
}
function string2Buffer(str) {
    var buf = new ArrayBuffer(str.length * 2); // 2 bytes for each char
    var bufView = new Int16Array(buf);
    for (var i = 0, strLen = str.length; i < strLen; i++) {
        bufView[i] = str.charCodeAt(i);
    }
    return bufView;
}
function buffer2String(buf) {
    return String.fromCharCode.apply(null, buf);
}
//First, checks if it isn't implemented yet.
if (!String.prototype.format) {
    String.prototype.format = function () {
        var args = arguments;
        return this.replace(/{(\d+)}/g, function (match, number) {
            return typeof args[number] != 'undefined'
                ? args[number]
                : match
                ;
        });
    };
}
function inherits(ctor, superCtor) {
    ctor.super_ = superCtor;
    ctor.prototype = Object.create(superCtor.prototype, {
        constructor: {
            value: ctor,
            enumerable: false,
            writable: true,
            configurable: true
        }
    });
};
// /////////////////////////////////////////////////////////////////
function Meta(meta) {
    this.status = null;
    this.method = "GET";
    this.url = "/";
    this.path = null;
    this.params = null;
    if (!meta || meta == "") return;
    var blocks = meta.split(" ");
    var method = blocks[0];
    if (Meta.HttpMethod.indexOf(method) == -1) {
        this.status = blocks[1];
        return;
    }
    this.url = blocks[1];
    this.decodeUrl(this.url);
}
Meta.HttpMethod = ["GET", "POST", "PUT", "DELETE", "HEAD", "OPTIONS"];
Meta.HttpStatus = {
    "200": "OK",
    "201": "Created",
    "202": "Accepted",
    "204": "No Content",
    "206": "Partial Content",
    "301": "Moved Permanently",
    "304": "Not Modified",
    "400": "Bad Request",
    "401": "Unauthorized",
    "403": "Forbidden",
    "404": "Not Found",
    "405": "Method Not Allowed",
    "416": "Requested Range Not Satisfiable",
    "500": "Internal Server Error"
};
Meta.prototype.toString = function () {
    if (this.status) {
        var desc = Meta.HttpStatus[this.status];
        if (!desc) desc = "Unknown Status";
        return "HTTP/1.1 {0} {1}".format(this.status, desc);
    }
    return "{0} {1} HTTP/1.1".format(this.method, this.url);
};
Meta.prototype.getParam = function (key) {
    if (!this.params) {
        return undefined;
    }
    return this.params[key];
};
Meta.prototype.setParam = function (key, val) {
    if (!this.params) {
        this.params = {};
    }
    this.params[key] = val;
};
Meta.prototype.setUrl = function (url) {
    this.url = url;
    this.decodeUrl(url);
}
Meta.prototype.decodeUrl = function (cmdStr) {
    var idx = cmdStr.indexOf("?");
    if (idx < 0) {
        this.path = cmdStr;
    } else {
        this.path = cmdStr.substring(0, idx);
    }
    if (this.path.charAt(0) == '/') {
        this.path = this.path.substring(1);
    }
    if (idx < 0) return;
    var paramStr = cmdStr.substring(idx + 1);
    this.params = {};
    var kvs = paramStr.split("&");
    for (var i in kvs) {
        var kv = kvs[i];
        idx = kv.indexOf("=");
        if (idx < 0) {
            util.debug("omit: " + kv);
            continue;
        }
        var key = kv.substring(0, idx);
        var val = kv.substring(idx + 1);
        this.params[key] = val;
    }
};
//HTTP Message
function Message(body) {
    this.meta = new Meta();
    this.head = {};
    this.setBody(body);
}
Message.HEARTBEAT = "heartbeat";
Message.REMOTE_ADDR = "remote-addr";
Message.ENCODING = "encoding";
Message.CMD = "cmd";
Message.BROKER = "broker";
Message.TOPIC = "topic";
Message.MQ = "mq";
Message.ID = "id";
Message.ACK = "ack";
Message.SENDER = "sender";
Message.RECVER = "recver";
Message.ORIGIN_URL = "origin_url";
Message.ORIGIN_ID = "rawid";
Message.ORIGIN_STATUS = "reply_code"
Message.prototype.getHead = function (key) {
    return this.head[key];
};
Message.prototype.setHead = function (key, val) {
    this.head[key] = val;
};
Message.prototype.removeHead = function (key) {
    delete this.head[key];
};
Message.prototype.getMq = function () {
    return this.getHead(Message.MQ);
};
Message.prototype.setMq = function (val) {
    this.setHead(Message.MQ, val);
};
Message.prototype.getId = function () {
    return this.getHead(Message.ID);
};
Message.prototype.setId = function (val) {
    this.setHead(Message.ID, val);
};
Message.prototype.getTopic = function () {
    return this.getHead(Message.TOPIC);
};
Message.prototype.setTopic = function (val) {
    this.setHead(Message.TOPIC, val);
};
Message.prototype.getEncoding = function () {
    return this.getHead(Message.ENCODING);
};
Message.prototype.setEncoding = function (val) {
    this.setHead(Message.ENCODING, val);
};
Message.prototype.isAck = function () {
    var ack = this.getHead(Message.ACK);
    if (!ack) return true;//default to true
    return ack == '1';
};
Message.prototype.setAck = function (val) {
    this.setHead(Message.ACK, val);
};
Message.prototype.getCmd = function () {
    return this.getHead(Message.CMD);
};
Message.prototype.setCmd = function (val) {
    this.setHead(Message.CMD, val);
};
Message.prototype.getSender = function () {
    return this.getHead(Message.SENDER);
};
Message.prototype.setSender = function (val) {
    this.setHead(Message.SENDER, val);
};
Message.prototype.getRecver = function () {
    return this.getHead(Message.RECVER);
};
Message.prototype.setRecver = function (val) {
    this.setHead(Message.RECVER, val);
};
Message.prototype.getOriginUrl = function () {
    return this.getHead(Message.ORIGIN_URL);
};
Message.prototype.setOriginUrl = function (val) {
    this.setHead(Message.ORIGIN_URL, val);
};
Message.prototype.getOriginStatus = function () {
    return this.getHead(Message.ORIGIN_STATUS);
};
Message.prototype.setOriginStatus = function (val) {
    this.setHead(Message.ORIGIN_STATUS, val);
};
Message.prototype.getOriginId = function () {
    return this.getHead(Message.ORIGIN_ID);
};
Message.prototype.setOriginId = function (val) {
    this.setHead(Message.ORIGIN_ID, val);
};
Message.prototype.getPath = function () {
    return this.meta.path;
};
Message.prototype.getUrl = function () {
    return this.meta.url;
};
Message.prototype.setUrl = function (url) {
    this.meta.status = null;
    return this.meta.setUrl(url);
};
Message.prototype.getStatus = function () {
    return this.meta.status;
};
Message.prototype.setStatus = function (val) {
    this.meta.status = val;
};
Message.prototype.getBodyString = function () {
    if (!this.body) return null;
    return buffer2String(this.body);
};
Message.prototype.getBody = function () {
    if (!this.body) return null;
    return this.body;
};
Message.prototype.setBody = function (val) {
    if (val === undefined) return;
    if (val instanceof Int16Array) {
        this.body = val;
    } else {
        this.body = string2Buffer(val);
    }
    this.setHead('content-length', this.body.length);
};
Message.prototype.setJsonBody = function (json) {
    this.setBody(json);
    this.setHead('content-type', 'application/json');
}
Message.prototype.setBodyFormat = function (format) {
    var args = Array.prototype.slice.call(arguments, 1);
    var body = format.replace(/{(\d+)}/g, function (match, number) {
        return typeof args[number] != 'undefined'
            ? args[number]
            : match
            ;
    });
    this.setBody(body);
};
Message.prototype.isStatus200 = function () {
    return "200" == this.getStatus();
};
Message.prototype.isStatus404 = function () {
    return "404" == this.getStatus();
};
Message.prototype.isStatus500 = function () {
    return "500" == this.getStatus();
};
Message.prototype.toString = function () {
    var lines = new Array();
    lines.push("{0}".format(this.meta.toString()));
    for (var key in this.head) {
        lines.push("{0}: {1}".format(key, this.head[key]));
    }
    var bodyLen = 0;
    if (this.body) {
        bodyLen = this.body.length;
    }
    var lenKey = "content-length";
    if (!(lenKey in this.head)) {
        lines.push("{0}: {1}".format(lenKey, bodyLen));
    }
    var bodyString = "\r\n";
    if (bodyLen > 0) {
        bodyString += buffer2String(this.body);
    }
    lines.push(bodyString);
    return lines.join("\r\n");
};
Message.parse = function (str) {
    var blocks = str.split("\r\n");
    var lines = [];
    for (var i in blocks) {
        var line = blocks[i];
        if (line == '') continue;
        lines.push(line);
    }
    var lenKey = "content-length";
    var lenVal = 0;
    var msg = new Message();
    msg.meta = new Meta(lines[0]);
    for (var i = 1; i < lines.length; i++) {
        var line = lines[i];
        if (i == lines.length - 1) {
            if (lenVal > 0) {
                msg.setBody(line);
                continue;
            }
        }
        var p = line.indexOf(":");
        if (p == -1) continue;
        var key = line.substring(0, p).trim().toLowerCase();
        var val = line.substring(p + 1).trim();
        if (key == lenKey) {
            lenVal = val;
        }
        msg.setHead(key, val);
    }
    return msg;
};
function Ticket(reqMsg, callback) {
    this.id = uuid();
    this.request = reqMsg;
    this.response = null;
    this.callback = callback;
    reqMsg.setId(this.id);
}
var WebSocket = window.WebSocket;
if (!WebSocket) {
    WebSocket = window.MozWebSocket;
}
function MessageClient(address) {
    this.address = address;
    this.autoReconnect = true;
    this.reconnectInterval = 3000;
    this.ticketTable = {};
}
MessageClient.prototype.connect = function (connectedHandler) {
    console.log("Trying to connect to " + this.address);
    this.socket = new WebSocket(this.address);
    var client = this;
    this.socket.onopen = function (event) {
        console.log("Connected to " + client.address);
        if (connectedHandler) {
            connectedHandler(event);
        }
        client.heartbeatInterval = setInterval(function () {
            var msg = new Message();
            msg.setCmd(Message.HEARTBEAT);
            client.invokeAsync(msg);
        }, 300 * 1000);
    };
    this.socket.onclose = function (event) {
        clearInterval(client.heartbeatInterval);
        setTimeout(function () {
            try {
                client.connect(connectedHandler);
            } catch (e) {
            }//ignore
        }, client.reconnectInterval);
    };
    this.socket.onmessage = function (event) {
        var msg = Message.parse(event.data);
        var msgid = msg.getId();
        var ticket = client.ticketTable[msgid];
        if (ticket) {
            ticket.response = msg;
            if (ticket.callback) {
                ticket.callback(msg);
            }
            delete client.ticketTable[msgid];
        } else {
            console.log("Warn: drop message\n" + msg.toString());
        }
    }
    this.socket.onerror = function (data) {
        console.log("Error: " + data);
    }
}
MessageClient.prototype.invokeAsync = function (msg, callback) {
    if (this.socket.readyState != WebSocket.OPEN) {
        console.log("socket is not open, invalid");
        return;
    }
    if (callback) {
        var ticket = new Ticket(msg, callback);
        this.ticketTable[ticket.id] = ticket;
    }
    this.socket.send(msg);
};
function Proto() {
}
Proto.Produce = "produce";
Proto.Consume = "consume";
Proto.Route = "route";
Proto.Heartbeat = "heartbeat";
Proto.Admin = "admin";
Proto.CreateMQ = "create_mq";
function MqMode() {
}
MqMode.MQ = 1 << 0;
MqMode.PubSub = 1 << 1;
MqMode.Memory = 1 << 2;
var Broker = MessageClient;
//define more brokers
function MqAdmin(broker, mq) {
    this.broker = broker;
    this.mq = mq;
    this.mode = 0;
    var args = Array.prototype.slice.call(arguments, 2);
    for (var i in args) {
        this.mode |= args[i];
    }
}
MqAdmin.prototype.createMq = function (callback) {
    var params = {};
    params["mq_name"] = this.mq;
    params["mq_mode"] = "" + this.mode;
    var msg = new Message();
    msg.setCmd(Proto.CreateMQ);
    msg.setHead("mq_name", this.mq);
    msg.setHead("mq_mode", "" + this.mode);
    this.broker.invokeAsync(msg, callback);
};
function Producer(broker, mq) {
    MqAdmin.call(this, broker, mq);
}
inherits(Producer, MqAdmin)
Producer.prototype.sendAsync = function (msg, callback) {
    msg.setCmd(Proto.Produce);
    msg.setMq(this.mq);
    this.broker.invokeAsync(msg, callback);
};
function Consumer(broker, mq) {
    MqAdmin.call(this, broker, mq);
}
inherits(Consumer, MqAdmin);
Consumer.prototype.take = function (callback) {
    var msg = new Message();
    msg.setCmd(Proto.Consume);
    msg.setMq(this.mq);
    if (this.topic) msg.setTopic(this.topic);
    var consumer = this;
    this.broker.invokeAsync(msg, function (res) {
        if (res.isStatus404()) {
            consumer.createMq(function (res) {
                if (res.isStatus200()) {
                    console.log(consumer.mq + " created");
                }
                consumer.take(callback);
            });
            return;
        }
        if (res.isStatus200()) {
            var originUrl = res.getOriginUrl();
            var id = res.getOriginId();
            res.removeHead(Message.ORIGIN_ID);
            if (originUrl == null) {
                originUrl = "/";
            } else {
                res.removeHead(Message.ORIGIN_URL);
            }
            res.setId(id);
            res.setUrl(originUrl);
            try {
                callback(res);
            } catch (error) {
                console.log(error);
            }
        }
        return consumer.take(callback);
    });
};
Consumer.prototype.route = function (msg) {
    msg.setCmd(Proto.Route);
    msg.setAck(false);
    this.broker.invokeAsync(msg);
}; 

+ 91 - 0
sdk/websocket/test.htm

@ -0,0 +1,91 @@
<!DOCTYPE html>
<html>
<head>
<meta http-equiv="Content-type" content="text/html; charset=utf-8" />
<title>ZBUS Monitor</title>
<script type="text/javascript" src="hos-client.js"></script>
<style>
table {
	font-family: "Helvetica Neue", Helvetica, sans-serif
}
caption {
	text-align: left;
	color: silver;
	font-weight: bold;
	text-transform: uppercase;
	padding: 5px;
}
thead {
	background: SteelBlue;
	color: white;
}
th, td {
	padding: 5px 10px;
}
tbody tr:nth-child(even) {
	background: WhiteSmoke;
}
tbody tr td:nth-child(2) {
	text-align: center;
}
tbody tr td:nth-child(3), tbody tr td:nth-child(4) {
	text-align: right;
	font-family: monospace;
}
tfoot {
	background: SeaGreen;
	color: white;
	text-align: right;
}
tfoot tr th:last-child {
	font-family: monospace;
}
#slogan {
	text-align: center;
	font: bold 24px arial, sans-serif;
	margin: 30px 30px;
}
</style>
</head>
<body>
	<h2 id="slogan" align="center">
		ZBUS = MQ + RPC <label id="broker" style="color: orange;"></label>
	</h2>
	<script type="text/javascript"> 
	var broker = new Broker("ws://localhost:15555/");
	function cbConsumer(){
		var c = new Consumer(broker, "MyMQ");
		
		c.createMq(function(data){ 
			c.take(function(res){
				var ta = document.getElementById('responseText');
				if(ta.value){
					ta.value = ta.value + "\n\n";
				} 
				ta.value = ta.value + res;
				ta.scrollTop = ta.scrollHeight;
			});
		}); 
	}
	broker.connect(cbConsumer);
	
	</script>
	<div align="center">
		<h3>Output</h3>
		<textarea id="responseText" style="width: 900px; height: 400px;"></textarea>
	</div>
	
</body>
</html>

+ 0 - 6
src/main/java/com/yihu/hos/ESBApplication.java

@ -9,12 +9,6 @@ import javax.jms.Queue;
@SpringBootApplication
public class ESBApplication {
//    @Override
//    protected SpringApplicationBuilder configure(SpringApplicationBuilder application) {
//        return application.sources(ESBApplication.class);
//    }
    public static void main(String[] args) throws Exception {
        SpringApplication application = new SpringApplication(ESBApplication.class);
        application.run(args);

+ 2 - 2
src/main/java/com/yihu/hos/ServletInitializer.java

@ -1,7 +1,7 @@
package com.yihu.hos;
import org.springframework.boot.builder.SpringApplicationBuilder;
import org.springframework.boot.context.web.SpringBootServletInitializer;
import org.springframework.boot.web.support.SpringBootServletInitializer;
public class ServletInitializer extends SpringBootServletInitializer {
@ -10,4 +10,4 @@ public class ServletInitializer extends SpringBootServletInitializer {
		return application.sources(ESBApplication.class);
	}
}
}

+ 0 - 117
src/main/java/com/yihu/hos/common/ActiveMqUtil.java

@ -1,117 +0,0 @@
package com.yihu.hos.common;
import com.yihu.hos.common.activeMq.ActiveMqConstants;
import com.yihu.hos.common.activeMq.ActivemqConfiguration;
import org.apache.activemq.ActiveMQConnectionFactory;
import javax.jms.*;
import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
/**
 * @author HZY
 * @vsrsion 1.0
 * Created at 2016/8/22.
 */
//@Component
public class ActiveMqUtil {
    static ConnectionFactory connectionFactory;
    static Connection connection = null;
    static Session session;
    static Map<String, MessageProducer> sendQueues = new ConcurrentHashMap<String, MessageProducer>();
    static Map<String, MessageConsumer> getQueues = new ConcurrentHashMap<String, MessageConsumer>();
    static {
        ActivemqConfiguration configuration = new ActivemqConfiguration();
        connectionFactory = new ActiveMQConnectionFactory(
                ActiveMqConstants.ACTIVE_MQ_USER,
                ActiveMqConstants.ACTIVE_MQ_PASS,
                ActiveMqConstants.ACTIVE_MQ_URI);
        try
        {
            connection = connectionFactory.createConnection();
            connection.start();
            session = connection.createSession(Boolean.FALSE.booleanValue(),
                    1);
        }
        catch (Exception e) {
            e.printStackTrace();
        }
    }
    static MessageProducer getMessageProducer(String name) {
        if (sendQueues.containsKey(name))
            return ((MessageProducer)sendQueues.get(name));
        try
        {
            Destination destination = session.createQueue(name);
            MessageProducer producer = session.createProducer(destination);
            sendQueues.put(name, producer);
            return producer;
        } catch (JMSException e) {
            e.printStackTrace();
        }
        return ((MessageProducer)sendQueues.get(name));
    }
    static MessageConsumer getMessageConsumer(String name) {
        if (getQueues.containsKey(name))
            return ((MessageConsumer)getQueues.get(name));
        try
        {
            Destination destination = session.createQueue(name);
            MessageConsumer consumer = session.createConsumer(destination);
            getQueues.put(name, consumer);
            return consumer;
        } catch (JMSException e) {
            e.printStackTrace();
        }
        return ((MessageConsumer)getQueues.get(name));
    }
    public static void sendMessage(String queue, String text) {
        try {
            TextMessage message = session.createTextMessage(text);
            getMessageProducer(queue).send(message);
            // log.info("sendMessage " + queue + "\t\t" + text);
        }
        catch (JMSException e) {
            e.printStackTrace();
        }
    }
    public static String getMessage(String queue)
    {
        try {
            TextMessage message = (TextMessage)getMessageConsumer(queue).receive(10000L);
            if (message != null)
                return message.getText();
        } catch (JMSException e) {
            e.printStackTrace();
        }
        return null;
    }
    public static void close() {
        try {
            session.close();
        } catch (JMSException e) {
            e.printStackTrace();
        }
        try {
            connection.close();
        } catch (JMSException e) {
            e.printStackTrace();
        }
    }
}

+ 0 - 13
src/main/java/com/yihu/hos/common/activeMq/ActiveMqConstants.java

@ -1,13 +0,0 @@
package com.yihu.hos.common.activeMq;
/**
 * @author HZY
 * @vsrsion 1.0
 * Created at 2016/8/22.
 */
public class ActiveMqConstants {
    public static String ACTIVE_MQ_USER = "";
    public static String ACTIVE_MQ_PASS = "";
    public static String ACTIVE_MQ_URI = "";
}

+ 0 - 29
src/main/java/com/yihu/hos/common/activeMq/ActivemqConfiguration.java

@ -1,29 +0,0 @@
package com.yihu.hos.common.activeMq;
import org.springframework.beans.factory.annotation.Value;
import javax.annotation.PostConstruct;
/**
 * 配置文件参数-静态初始化
 * @author HZY
 * @vsrsion 1.0
 * Created at 2016/8/21.
 */
//@Configuration
public class ActivemqConfiguration {
    @Value("${spring.activemq.broker-url}")
    private String brokerURL;
    @Value("${spring.activemq.user}")
    private String user;
    @Value("${spring.activemq.password}")
    private String password;
    @PostConstruct
    public void init() {
        ActiveMqConstants.ACTIVE_MQ_USER = user;
        ActiveMqConstants.ACTIVE_MQ_PASS = password;
        ActiveMqConstants.ACTIVE_MQ_URI = brokerURL;
    }
}

+ 0 - 98
src/main/java/com/yihu/hos/common/mongo/IMongoDBAdminer.java

@ -1,98 +0,0 @@
package com.yihu.hos.common.mongo;
import java.util.List;
/**
 * @created Airhead 2016/2/17.
 */
public interface IMongoDBAdminer {
    /**
     * Drops this collection from the Database.
     *
     * @mongodb.driver.manual reference/command/drop/ Drop Collection
     */
    void drop(String collectionName);
    /**
     * Create an index with the given keys.
     *
     * @param keys an object describing the index key(s), which may not be null.
     * @return the index name
     * @mongodb.driver.manual reference/command/createIndexes Create indexes
     */
    String createIndex(String collectionName, String keys);
    /**
     * Create an index with the given keys and options.
     *
     * @param keys         an object describing the index key(s), which may not be null.
     * @param indexOptions the options for the index
     * @return the index name
     * @mongodb.driver.manual reference/command/createIndexes Create indexes
     */
    String createIndex(String collectionName, String keys, String indexOptions);
    /**
     * Create multiple indexes.
     *
     * @param indexes the list of indexes
     * @return the list of index names
     * @mongodb.driver.manual reference/command/createIndexes Create indexes
     * @mongodb.server.release 2.6
     */
//    List<String> createIndexes(List<IndexModel> indexes);
    /**
     * Get all the indexes in this collection.
     *
     * @return the list indexes iterable interface
     * @mongodb.driver.manual reference/command/listIndexes/ List indexes
     */
    List<String> listIndexes(String collectionName);
    /**
     * Get all the indexes in this collection.
     *
     * @param resultClass the class to decode each document into
     * @param <TResult>   the target document type of the iterable.
     * @return the list indexes iterable interface
     * @mongodb.driver.manual reference/command/listIndexes/ List indexes
     */
//    <TResult> ListIndexesIterable<TResult> listIndexes(Class<TResult> resultClass);
    /**
     * Drops the index given its name.
     *
     * @param indexName the name of the index to remove
     * @mongodb.driver.manual reference/command/dropIndexes/ Drop indexes
     */
    void dropIndex(String collectionName, String indexName);
    /**
     * Drop all the indexes on this collection, except for the default on _id.
     *
     * @mongodb.driver.manual reference/command/dropIndexes/ Drop indexes
     */
    void dropIndexes(String collectionName);
    /**
     * Rename the collection with oldCollectionName to the newCollectionName.
     *
     * @param newCollectionName the namespace the collection will be renamed to
     * @throws com.mongodb.MongoServerException if you provide a newCollectionName that is the name of an existing collection, or if the
     *                                          oldCollectionName is the name of a collection that doesn't exist
     * @mongodb.driver.manual reference/commands/renameCollection Rename collection
     */
    void renameCollection(String collectionName, String newCollectionName);
    /**
     * Rename the collection with oldCollectionName to the newCollectionName.
     *
     * @param newCollectionName       the name the collection will be renamed to
     * @param renameCollectionOptions the options for renaming a collection
     * @throws com.mongodb.MongoServerException if you provide a newCollectionName that is the name of an existing collection and dropTarget
     *                                          is false, or if the oldCollectionName is the name of a collection that doesn't exist
     * @mongodb.driver.manual reference/commands/renameCollection Rename collection
     */
    void renameCollection(String collectionName, String newCollectionName, String renameCollectionOptions);
}

+ 0 - 376
src/main/java/com/yihu/hos/common/mongo/IMongoDBRunner.java

@ -1,376 +0,0 @@
package com.yihu.hos.common.mongo;
import java.util.List;
/**
 * MongoDB的CURD接口,此部分内容从MongoCollection中来
 *
 * @created Airhead 2016/2/17.
 */
public interface IMongoDBRunner {
    long count(String collectionName);
    /**
     * Counts the number of documents in the collection according to the given options.
     *
     * @param filter the query filter
     * @return the number of documents in the collection
     */
    long count(String collectionName, String filter);
    /**
     * Counts the number of documents in the collection according to the given options.
     *
     * @param filter  the query filter
     * @param options the options describing the count
     * @return the number of documents in the collection
     */
    long count(String collectionName, String filter, String options);
    /**
     * Gets the distinct values of the specified field name.
     *
     * @param fieldName   the field name
     * @param resultClass the class to cast any distinct items into.
     * @param <TResult>   the target type of the iterable.
     * @return an iterable of distinct values
     * @mongodb.driver.manual reference/command/distinct/ Distinct
     */
//    <TResult> DistinctIterable<TResult> distinct(String fieldName, Class<TResult> resultClass);
    /**
     * Gets the distinct values of the specified field name.
     *
     * @param fieldName   the field name
     * @param filter      the query filter
     * @param resultClass the class to cast any distinct items into.
     * @param <TResult>   the target type of the iterable.
     * @return an iterable of distinct values
     * @mongodb.driver.manual reference/command/distinct/ Distinct
     */
//    <TResult> DistinctIterable<TResult> distinct(String fieldName, Bson filter, Class<TResult> resultClass);
    /**
     * Finds all documents in the collection.
     *
     * @return the find iterable interface
     * @mongodb.driver.manual tutorial/query-documents/ Find
     */
    List<String> find(String collectionName);
    /**
     * Finds all documents in the collection.
     *
     * @param resultClass the class to decode each document into
     * @param <TResult>   the target document type of the iterable.
     * @return the find iterable interface
     * @mongodb.driver.manual tutorial/query-documents/ Find
     */
//    <TResult> FindIterable<TResult> find(Class<TResult> resultClass);
    /**
     * Finds all documents in the collection.
     *
     * @param filter the query filter
     * @return the find iterable interface
     * @mongodb.driver.manual tutorial/query-documents/ Find
     */
    List<String> find(String collectionName, String filter);
    /**
     * Finds all documents in the collection.
     *
     * @param filter the query filter
     * @return the find iterable interface
     * @mongodb.driver.manual tutorial/query-documents/ Find
     */
    List<String> find(String collectionName, String filter, String projection);
    /**
     * Finds all documents in the collection.
     *
     * @param filter the query filter
     * @return the find iterable interface
     * @mongodb.driver.manual tutorial/query-documents/ Find
     */
    List<String> find(String collectionName, String filter, String projection, String options);
    /**
     * Finds all documents in the collection.
     *
     * @param filter      the query filter
     * @param resultClass the class to decode each document into
     * @param <TResult>   the target document type of the iterable.
     * @return the find iterable interface
     * @mongodb.driver.manual tutorial/query-documents/ Find
     */
//    <TResult> FindIterable<TResult> find(Bson filter, Class<TResult> resultClass);
    /**
     * Aggregates documents according to the specified aggregation pipeline.
     *
     * @param pipeline the aggregate pipeline
     * @return an iterable containing the result of the aggregation operation
     * @mongodb.driver.manual aggregation/ Aggregation
     * @mongodb.server.release 2.2
     */
//    List<String> aggregate(String collectionName, List<? extends String> pipeline);
    /**
     * Aggregates documents according to the specified aggregation pipeline.
     *
     * @param pipeline    the aggregate pipeline
     * @param resultClass the class to decode each document into
     * @param <TResult>   the target document type of the iterable.
     * @return an iterable containing the result of the aggregation operation
     * @mongodb.driver.manual aggregation/ Aggregation
     * @mongodb.server.release 2.2
     */
//    <TResult> AggregateIterable<TResult> aggregate(List<? extends Bson> pipeline, Class<TResult> resultClass);
    /**
     * Aggregates documents according to the specified map-reduce function.
     *
     * @param mapFunction    A JavaScript function that associates or "maps" a value with a key and emits the key and value pair.
     * @param reduceFunction A JavaScript function that "reduces" to a single object all the values associated with a particular key.
     * @return an iterable containing the result of the map-reduce operation
     * @mongodb.driver.manual reference/command/mapReduce/ map-reduce
     */
//    List<String> mapReduce(String collectionName, String mapFunction, String reduceFunction);
    /**
     * Aggregates documents according to the specified map-reduce function.
     *
     * @param mapFunction    A JavaScript function that associates or "maps" a value with a key and emits the key and value pair.
     * @param reduceFunction A JavaScript function that "reduces" to a single object all the values associated with a particular key.
     * @param resultClass    the class to decode each resulting document into.
     * @param <TResult>      the target document type of the iterable.
     * @return an iterable containing the result of the map-reduce operation
     * @mongodb.driver.manual reference/command/mapReduce/ map-reduce
     */
//    <TResult> MapReduceIterable<TResult> mapReduce(String mapFunction, String reduceFunction, Class<TResult> resultClass);
    /**
     * Executes a mix of inserts, updates, replaces, and deletes.
     *
     * @param requests the writes to execute
     * @return the result of the bulk write
     * @throws com.mongodb.MongoBulkWriteException if there's an exception in the bulk write operation
     * @throws com.mongodb.MongoException          if there's an exception running the operation
     */
//    BulkWriteResult bulkWrite(List<? extends WriteModel<? extends TDocument>> requests);
    /**
     * Executes a mix of inserts, updates, replaces, and deletes.
     *
     * @param requests the writes to execute
     * @param options  the options to apply to the bulk write operation
     * @return the result of the bulk write
     * @throws com.mongodb.MongoBulkWriteException if there's an exception in the bulk write operation
     * @throws com.mongodb.MongoException          if there's an exception running the operation
     */
//    BulkWriteResult bulkWrite(List<? extends WriteModel<? extends TDocument>> requests, BulkWriteOptions options);
    /**
     * Inserts the provided document. If the document is missing an identifier, the driver should generate one.
     *
     * @param document the document to insert
     * @throws com.mongodb.MongoWriteException        if the write failed due some other failure specific to the insert command
     * @throws com.mongodb.MongoWriteConcernException if the write failed due being unable to fulfil the write concern
     * @throws com.mongodb.MongoException             if the write failed due some other failure
     */
    void insertOne(String collectionName, String document);
    /**
     * Inserts one or more documents.  A call to this method is equivalent to a call to the {@code bulkWrite} method
     *
     * @param documents the documents to insert
     * @throws com.mongodb.MongoBulkWriteException if there's an exception in the bulk write operation
     * @throws com.mongodb.MongoException          if the write failed due some other failure
     * @see com.mongodb.client.MongoCollection#bulkWrite
     */
    void insertMany(String collectionName, List<String> documents);
    /**
     * Inserts one or more documents.  A call to this method is equivalent to a call to the {@code bulkWrite} method
     *
     * @param documents the documents to insert
     * @param options   the options to apply to the operation
     * @throws com.mongodb.DuplicateKeyException if the write failed to a duplicate unique key
     * @throws com.mongodb.WriteConcernException if the write failed due being unable to fulfil the write concern
     * @throws com.mongodb.MongoException        if the write failed due some other failure
     */
    void insertMany(String collectionName, List<String> documents, String options);
    /**
     * Removes at most one document from the collection that matches the given filter.  If no documents match, the collection is not
     * modified.
     *
     * @param filter the query filter to apply the the delete operation
     * @return the result of the remove one operation
     * @throws com.mongodb.MongoWriteException        if the write failed due some other failure specific to the delete command
     * @throws com.mongodb.MongoWriteConcernException if the write failed due being unable to fulfil the write concern
     * @throws com.mongodb.MongoException             if the write failed due some other failure
     */
    long deleteOne(String collectionName, String filter);
    /**
     * Removes all documents from the collection that match the given query filter.  If no documents match, the collection is not modified.
     *
     * @param filter the query filter to apply the the delete operation
     * @return the result of the remove many operation
     * @throws com.mongodb.MongoWriteException        if the write failed due some other failure specific to the delete command
     * @throws com.mongodb.MongoWriteConcernException if the write failed due being unable to fulfil the write concern
     * @throws com.mongodb.MongoException             if the write failed due some other failure
     */
    long deleteMany(String collectionName, String filter);
    /**
     * Replace a document in the collection according to the specified arguments.
     *
     * @param filter      the query filter to apply the the replace operation
     * @param replacement the replacement document
     * @return the result of the replace one operation
     * @throws com.mongodb.MongoWriteException        if the write failed due some other failure specific to the replace command
     * @throws com.mongodb.MongoWriteConcernException if the write failed due being unable to fulfil the write concern
     * @throws com.mongodb.MongoException             if the write failed due some other failure
     * @mongodb.driver.manual tutorial/modify-documents/#replace-the-document Replace
     */
    long replaceOne(String collectionName, String filter, String replacement);
    /**
     * Replace a document in the collection according to the specified arguments.
     *
     * @param filter        the query filter to apply the the replace operation
     * @param replacement   the replacement document
     * @param updateOptions the options to apply to the replace operation
     * @return the result of the replace one operation
     * @throws com.mongodb.MongoWriteException        if the write failed due some other failure specific to the replace command
     * @throws com.mongodb.MongoWriteConcernException if the write failed due being unable to fulfil the write concern
     * @throws com.mongodb.MongoException             if the write failed due some other failure
     * @mongodb.driver.manual tutorial/modify-documents/#replace-the-document Replace
     */
    long replaceOne(String collectionName, String filter, String replacement, String updateOptions);
    /**
     * Update a single document in the collection according to the specified arguments.
     *
     * @param filter a document describing the query filter, which may not be null.
     * @param update a document describing the update, which may not be null. The update to apply must include only update operators.
     * @return the result of the update one operation
     * @throws com.mongodb.MongoWriteException        if the write failed due some other failure specific to the update command
     * @throws com.mongodb.MongoWriteConcernException if the write failed due being unable to fulfil the write concern
     * @throws com.mongodb.MongoException             if the write failed due some other failure
     * @mongodb.driver.manual tutorial/modify-documents/ Updates
     * @mongodb.driver.manual reference/operator/update/ Update Operators
     */
    long updateOne(String collectionName, String filter, String update);
    /**
     * Update a single document in the collection according to the specified arguments.
     *
     * @param filter        a document describing the query filter, which may not be null.
     * @param update        a document describing the update, which may not be null. The update to apply must include only update operators.
     * @param updateOptions the options to apply to the update operation
     * @return the result of the update one operation
     * @throws com.mongodb.MongoWriteException        if the write failed due some other failure specific to the update command
     * @throws com.mongodb.MongoWriteConcernException if the write failed due being unable to fulfil the write concern
     * @throws com.mongodb.MongoException             if the write failed due some other failure
     * @mongodb.driver.manual tutorial/modify-documents/ Updates
     * @mongodb.driver.manual reference/operator/update/ Update Operators
     */
    long updateOne(String collectionName, String filter, String update, String updateOptions);
    /**
     * Update all documents in the collection according to the specified arguments.
     *
     * @param filter a document describing the query filter, which may not be null.
     * @param update a document describing the update, which may not be null. The update to apply must include only update operators.
     * @return the result of the update one operation
     * @throws com.mongodb.MongoWriteException        if the write failed due some other failure specific to the update command
     * @throws com.mongodb.MongoWriteConcernException if the write failed due being unable to fulfil the write concern
     * @throws com.mongodb.MongoException             if the write failed due some other failure
     * @mongodb.driver.manual tutorial/modify-documents/ Updates
     * @mongodb.driver.manual reference/operator/update/ Update Operators
     */
    long updateMany(String collectionName, String filter, String update);
    /**
     * Update all documents in the collection according to the specified arguments.
     *
     * @param filter        a document describing the query filter, which may not be null.
     * @param update        a document describing the update, which may not be null. The update to apply must include only update operators.
     * @param updateOptions the options to apply to the update operation
     * @return the result of the update one operation
     * @throws com.mongodb.MongoWriteException        if the write failed due some other failure specific to the update command
     * @throws com.mongodb.MongoWriteConcernException if the write failed due being unable to fulfil the write concern
     * @throws com.mongodb.MongoException             if the write failed due some other failure
     * @mongodb.driver.manual tutorial/modify-documents/ Updates
     * @mongodb.driver.manual reference/operator/update/ Update Operators
     */
    long updateMany(String collectionName, String filter, String update, String updateOptions);
    /**
     * Atomically find a document and remove it.
     *
     * @param filter the query filter to find the document with
     * @return the document that was removed.  If no documents matched the query filter, then null will be returned
     */
    String findOneAndDelete(String collectionName, String filter);
    /**
     * Atomically find a document and remove it.
     *
     * @param filter  the query filter to find the document with
     * @param options the options to apply to the operation
     * @return the document that was removed.  If no documents matched the query filter, then null will be returned
     */
    String findOneAndDelete(String collectionName, String filter, String options);
    /**
     * Atomically find a document and replace it.
     *
     * @param filter      the query filter to apply the the replace operation
     * @param replacement the replacement document
     * @return the document that was replaced.  Depending on the value of the {@code returnOriginal} property, this will either be the
     * document as it was before the update or as it is after the update.  If no documents matched the query filter, then null will be
     * returned
     */
    String findOneAndReplace(String collectionName, String filter, String replacement);
    /**
     * Atomically find a document and replace it.
     *
     * @param filter      the query filter to apply the the replace operation
     * @param replacement the replacement document
     * @param options     the options to apply to the operation
     * @return the document that was replaced.  Depending on the value of the {@code returnOriginal} property, this will either be the
     * document as it was before the update or as it is after the update.  If no documents matched the query filter, then null will be
     * returned
     */
    String findOneAndReplace(String collectionName, String filter, String replacement, String options);
    /**
     * Atomically find a document and update it.
     *
     * @param filter a document describing the query filter, which may not be null.
     * @param update a document describing the update, which may not be null. The update to apply must include only update operators.
     * @return the document that was updated before the update was applied.  If no documents matched the query filter, then null will be
     * returned
     */
    String findOneAndUpdate(String collectionName, String filter, String update);
    /**
     * Atomically find a document and update it.
     *
     * @param filter  a document describing the query filter, which may not be null.
     * @param update  a document describing the update, which may not be null. The update to apply must include only update operators.
     * @param options the options to apply to the operation
     * @return the document that was updated.  Depending on the value of the {@code returnOriginal} property, this will either be the
     * document as it was before the update or as it is after the update.  If no documents matched the query filter, then null will be
     * returned
     */
    String findOneAndUpdate(String collectionName, String filter, String update, String options);
}

+ 0 - 172
src/main/java/com/yihu/hos/common/mongo/MongoDB.java

@ -1,172 +0,0 @@
package com.yihu.hos.common.mongo;
import com.mongodb.client.MongoCollection;
import org.bson.Document;
import java.util.List;
/**
 * 提供对MongoDB的封装,减化对Mongo使用.
 * 主要就是减少层级关系,过滤掉资源释放等处理。
 * 注意:
 * 部分接口为了保持高效的情况,建议还是使用原生驱动。
 * 可以用getCollection取到原生MongoCollection<Document>
 * usage:
 * 1.使用MongoDBKit.addConfig()
 * 2.使用MongoDBKit.start();
 * 3.使用MongoDB做查询
 * 4.使用use()切换连接查询
 *
 * @created Airhead 2016/2/17.
 */
public class MongoDB {
    private static MongoDBPro mongoDBPro;
    static void init() {
        mongoDBPro = MongoDBPro.use();
    }
    public static MongoDBPro use(String configName) {
        return MongoDBPro.use(configName);
    }
    public static MongoDBPro db(String databaseName) {
        return mongoDBPro.db(databaseName);
    }
    public static long count(String collectionName) {
        return mongoDBPro.count(collectionName);
    }
    public static long count(String collectionName, String filter) {
        return mongoDBPro.count(collectionName, filter);
    }
    public static long count(String collectionName, String filter, String options) {
        return mongoDBPro.count(collectionName, filter, options);
    }
    public static List<String> find(String collectionName) {
        return mongoDBPro.find(collectionName);
    }
    public static List<String> find(String collectionName, String filter) {
        return mongoDBPro.find(collectionName, filter);
    }
    public static List<String> find(String collectionName, String filter, String projection) {
        return mongoDBPro.find(collectionName, filter, projection);
    }
    public static List<String> find(String collectionName, String filter, String projection, String options) {
        return mongoDBPro.find(collectionName, filter, projection, options);
    }
    public static void insertOne(String collectionName, String document) {
        mongoDBPro.insertOne(collectionName, document);
    }
    public static void insertMany(String collectionName, List<String> documents) {
        mongoDBPro.insertMany(collectionName, documents);
    }
    public static void insertMany(String collectionName, List<String> documents, String options) {
        mongoDBPro.insertMany(collectionName, documents, options);
    }
    public static long deleteOne(String collectionName, String filter) {
        return mongoDBPro.deleteOne(collectionName, filter);
    }
    public static long deleteMany(String collectionName, String filter) {
        return mongoDBPro.deleteMany(collectionName, filter);
    }
    public static long replaceOne(String collectionName, String filter, String replacement) {
        return mongoDBPro.replaceOne(collectionName, filter, replacement);
    }
    public static long replaceOne(String collectionName, String filter, String replacement, String updateOptions) {
        return mongoDBPro.replaceOne(collectionName, filter, replacement, updateOptions);
    }
    public static long updateOne(String collectionName, String filter, String update) {
        return mongoDBPro.replaceOne(collectionName, filter, update);
    }
    public static long updateOne(String collectionName, String filter, String update, String updateOptions) {
        return mongoDBPro.replaceOne(collectionName, filter, update, updateOptions);
    }
    public static long updateMany(String collectionName, String filter, String update) {
        return mongoDBPro.updateMany(collectionName, filter, update);
    }
    public static long updateMany(String collectionName, String filter, String update, String updateOptions) {
        return mongoDBPro.updateMany(collectionName, filter, update, updateOptions);
    }
    public static String findOneAndDelete(String collectionName, String filter) {
        return mongoDBPro.findOneAndDelete(collectionName, filter);
    }
    public static String findOneAndDelete(String collectionName, String filter, String options) {
        return mongoDBPro.findOneAndDelete(collectionName, filter, options);
    }
    public static String findOneAndReplace(String collectionName, String filter, String replacement) {
        return mongoDBPro.findOneAndReplace(collectionName, filter, replacement);
    }
    public static String findOneAndReplace(String collectionName, String filter, String replacement, String options) {
        return mongoDBPro.findOneAndReplace(collectionName, filter, replacement, options);
    }
    public static String findOneAndUpdate(String collectionName, String filter, String update, String options) {
        return mongoDBPro.findOneAndUpdate(collectionName, filter, update, options);
    }
    public static String findOneAndUpdate(String collectionName, String filter, String update) {
        return mongoDBPro.findOneAndUpdate(collectionName, filter, update);
    }
    public static void drop(String collectionName) {
        mongoDBPro.drop(collectionName);
    }
    public static String createIndex(String collectionName, String keys) {
        return mongoDBPro.createIndex(collectionName, keys);
    }
    public static String createIndex(String collectionName, String keys, String indexOptions) {
        return mongoDBPro.createIndex(collectionName, keys, indexOptions);
    }
    public static List<String> listIndexes(String collectionName) {
        return mongoDBPro.listIndexes(collectionName);
    }
    public static void dropIndex(String collectionName, String indexName) {
        mongoDBPro.dropIndex(collectionName, indexName);
    }
    public static void dropIndexes(String collectionName) {
        mongoDBPro.dropIndexes(collectionName);
    }
    public static void renameCollection(String collectionName, String newCollectionName) {
        mongoDBPro.renameCollection(collectionName, newCollectionName);
    }
    public static void renameCollection(String collectionName, String newCollectionName, String renameCollectionOptions) {
        mongoDBPro.renameCollection(collectionName, newCollectionName, renameCollectionOptions);
    }
    public static MongoCollection<Document> getCollection(String collectionName) {
        return mongoDBPro.getCollection(collectionName);
    }
    public static List<String> listCollectionNames(){
        return mongoDBPro.listCollectionNames();
    }
}

+ 0 - 65
src/main/java/com/yihu/hos/common/mongo/MongoDBConfig.java

@ -1,65 +0,0 @@
package com.yihu.hos.common.mongo;
import com.mongodb.MongoClient;
import com.mongodb.MongoClientOptions;
import com.mongodb.MongoClientURI;
import com.mongodb.client.MongoDatabase;
/**
 * @created Airhead 2016/2/17.
 */
public class MongoDBConfig {
    String name;
    String uri;
    String defaultDatabaseName;
    static MongoClient mongoClient;
    MongoClientOptions mongoClientOptions;      //暂未使用,Mongo默认配置
    MongoDatabase mongoDatabase;
    public MongoDBConfig(String uri) {
        this.name = MongoDBKit.MAIN_CONFIG_NAME;
        this.uri = uri;
        this.defaultDatabaseName = MongoDBKit.DEFAULT_DB_NAME;
    }
    public MongoDBConfig(String name, String uri) {
        this.name = name;
        this.uri = uri;
        this.defaultDatabaseName = MongoDBKit.DEFAULT_DB_NAME;
    }
    public MongoDBConfig(String name, String uri, String databaseName) {
        this.name = name;
        this.uri = uri;
        this.defaultDatabaseName = databaseName;
    }
    public String getName() {
        return name;
    }
    public MongoDatabase getDatabase(String databaseName) {
        if (mongoClient == null) {
            MongoClientURI mongoClientURI = new MongoClientURI(uri);
            mongoClient = new MongoClient(mongoClientURI);
        }
        if (mongoDatabase != null) {
            if (mongoDatabase.getName().equals(databaseName)) {
                return mongoDatabase;
            }
        }
        mongoDatabase = mongoClient.getDatabase(databaseName);
        return mongoDatabase;
    }
    public MongoDatabase getDatabase() {
        if (mongoDatabase != null) {
            return mongoDatabase;
        }
        return getDatabase(defaultDatabaseName);
    }
}

+ 0 - 57
src/main/java/com/yihu/hos/common/mongo/MongoDBKit.java

@ -1,57 +0,0 @@
package com.yihu.hos.common.mongo;
import java.util.HashMap;
import java.util.Map;
/**
 * @created Airhead 2016/2/17.
 */
public class MongoDBKit {
    public static final String MAIN_CONFIG_NAME = "main";
    public static final String DEFAULT_DB_NAME = "test";
    static MongoDBConfig config = null;
    private static Map<String, MongoDBConfig> configNameToConfig = new HashMap<>();
    public static void start() {
        MongoDB.init();
    }
    public static MongoDBConfig getConfig() {
        return config;
    }
    public static MongoDBConfig getConfig(String configName) {
        return configNameToConfig.get(configName);
    }
    /**
     * Add Config object
     *
     * @param config the Config contains Mongodb uri and MongoClientOptions etc.
     */
    public static void addConfig(MongoDBConfig config) {
        if (config == null) {
            throw new IllegalArgumentException("Config can not be null");
        }
        if (configNameToConfig.containsKey(config.getName())) {
            throw new IllegalArgumentException("Config already exists: " + config.getName());
        }
        configNameToConfig.put(config.getName(), config);
        /**
         * Replace the main config if current config name is MAIN_CONFIG_NAME
         */
        if (MAIN_CONFIG_NAME.equals(config.getName())) {
            MongoDBKit.config = config;
        }
        /**
         * The configName may not be MAIN_CONFIG_NAME,
         * the main config have to set the first comming Config if it is null
         */
        if (MongoDBKit.config == null) {
            MongoDBKit.config = config;
        }
    }
}

+ 0 - 92
src/main/java/com/yihu/hos/common/mongo/MongoDBOperator.java

@ -1,92 +0,0 @@
package com.yihu.hos.common.mongo;
/**
 * Query Selectors
 * $eq	Matches values that are equal to a specified value.
 * $gt	Matches values that are greater than a specified value.
 * $gte	Matches values that are greater than or equal to a specified value.
 * $lt	Matches values that are less than a specified value.
 * $lte	Matches values that are less than or equal to a specified value.
 * $ne	Matches all values that are not equal to a specified value.
 * $in	Matches any of the values specified in an array.
 * $nin	Matches none of the values specified in an array.
 *
 * Logical
 * $or	Joins query clauses with a logical OR returns all documents that match the conditions of either clause.
 * $and	Joins query clauses with a logical AND returns all documents that match the conditions of both clauses.
 * $not	Inverts the effect of a query expression and returns documents that do not match the query expression.
 * $nor	Joins query clauses with a logical NOR returns all documents that fail to match both clauses.
 *
 * Element
 * $exists	Matches documents that have the specified field.
 * $type	Selects documents if a field is of the specified type.
 *
 * Evaluation
 * $mod	Performs a modulo operation on the value of a field and selects documents with a specified result.
 * $regex	Selects documents where values match a specified regular expression.
 * $text	Performs text search.
 * $where	Matches documents that satisfy a JavaScript expression.
 *
 * Geospatial
 * $geoWithin	Selects geometries within a bounding GeoJSON geometry. The 2dsphere and 2d indexes support $geoWithin.
 * $geoIntersects	Selects geometries that intersect with a GeoJSON geometry. The 2dsphere index supports $geoIntersects.
 * $near	Returns geospatial objects in proximity to a point. Requires a geospatial index. The 2dsphere and 2d indexes support $near.
 * $nearSphere	Returns geospatial objects in proximity to a point on a sphere. Requires a geospatial index. The 2dsphere and 2d indexes support $nearSphere.
 *
 * Array
 * $all	Matches arrays that contain all elements specified in the query.
 * $elemMatch	Selects documents if element in the array field matches all the specified $elemMatch conditions.
 * $size	Selects documents if the array field is a specified size.
 *
 * Bitwise
 * $bitsAllSet	Matches numeric or binary values in which a set of bit positions all have a value of 1.
 * $bitsAnySet	Matches numeric or binary values in which any bit from a set of bit positions has a value of 1.
 * $bitsAllClear	Matches numeric or binary values in which a set of bit positions all have a value of 0.
 * $bitsAnyClear	Matches numeric or binary values in which any bit from a set of bit positions has a value of 0.
 *
 * Comments
 * $comment	Adds a comment to a query predicate.
 *
 * Projection Operators
 * $	Projects the first element in an array that matches the query condition.
 * $elemMatch	Projects the first element in an array that matches the specified $elemMatch condition.
 * $meta	Projects the document’s score assigned during $text operation.
 * $slice	Limits the number of elements projected from an array. Supports skip and limit slices.
 *
 * Update Operators
 * $inc	Increments the value of the field by the specified amount.
 * $mul	Multiplies the value of the field by the specified amount.
 * $rename	Renames a field.
 * $setOnInsert	Sets the value of a field if an update results in an insert of a document. Has no effect on update operations that modify existing documents.
 * $set	Sets the value of a field in a document.
 * $unset	Removes the specified field from a document.
 * $min	Only updates the field if the specified value is less than the existing field value.
 * $max	Only updates the field if the specified value is greater than the existing field value.
 * $currentDate	Sets the value of a field to current date, either as a Date or a Timestamp.
 *
 * Array
 * $	Acts as a placeholder to update the first element that matches the query condition in an update.
 * $addToSet	Adds elements to an array only if they do not already exist in the set.
 * $pop	Removes the first or last item of an array.
 * $pullAll	Removes all matching values from an array.
 * $pull	Removes all array elements that match a specified query.
 * $pushAll	Deprecated. Adds several items to an array.
 * $push	Adds an item to an array.
 *
 * Modifiers
 * $each	Modifies the $push and $addToSet operators to append multiple items for array updates.
 * $slice	Modifies the $push operator to limit the size of updated arrays.
 * $sort	Modifies the $push operator to reorder documents stored in an array.
 * $position	Modifies the $push operator to specify the position in the array to add elements.
 *
 * Bitwise
 * $bit	Performs bitwise AND, OR, and XOR updates of integer values.
 *
 * Isolation
 * $isolated	Modifies the behavior of a write operation to increase the isolation of the operation.
 * @created Airhead 2016/2/17.
 */
public class MongoDBOperator {
}

+ 0 - 566
src/main/java/com/yihu/hos/common/mongo/MongoDBPro.java

@ -1,566 +0,0 @@
package com.yihu.hos.common.mongo;
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.mongodb.MongoNamespace;
import com.mongodb.client.*;
import com.mongodb.client.model.*;
import com.mongodb.client.result.DeleteResult;
import com.mongodb.client.result.UpdateResult;
import org.apache.commons.lang3.StringUtils;
import org.bson.Document;
import java.io.IOException;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
/**
 * MongoDBPro. Professional database CURD and Manager tool.
 *
 * @created Airhead 2016/2/17.
 */
public class MongoDBPro implements IMongoDBRunner, IMongoDBAdminer {
    private static final Map<String, MongoDBPro> map = new HashMap<String, MongoDBPro>();
    private final MongoDBConfig config;
    public MongoDBPro() {
        if (MongoDBKit.config == null) {
            throw new RuntimeException("The main config is null, initialize MonogDBKit first");
        }
        this.config = MongoDBKit.config;
    }
    public MongoDBPro(String configName) {
        this.config = MongoDBKit.getConfig(configName);
        if (this.config == null) {
            throw new IllegalArgumentException("Config not found by configName: " + configName);
        }
    }
    public static MongoDBPro use() {
        return use(MongoDBKit.config.name);
    }
    public static MongoDBPro use(String configName) {
        MongoDBPro result = map.get(configName);
        if (result == null) {
            result = new MongoDBPro(configName);
            map.put(configName, result);
        }
        return result;
    }
    public MongoDBPro db(String databaseName) {
        config.getDatabase(databaseName);
        return this;
    }
    @Override
    public long count(String collectionName) {
        return count(collectionName, null);
    }
    @Override
    public long count(String collectionName, String filter) {
        return count(collectionName, filter, null);
    }
    /**
     * @param collectionName
     * @param filter         the query filter
     * @param options        the options describing the count
     *                       <p>
     *                       {
     *                       limit: <integer>,
     *                       skip: <integer>,
     *                       hint: <hint>
     *                       }
     * @return
     */
    @Override
    public long count(String collectionName, String filter, String options) {
        MongoCollection<Document> collection = getCollection(collectionName);
        Document filterDocument = new Document();
        if (filter != null) {
            filterDocument = Document.parse(filter);
        }
        CountOptions countOptions = new CountOptions();
        if (options != null) {
            ObjectMapper mapper = new ObjectMapper();
            try {
                JsonNode rootNode = mapper.readValue(options, JsonNode.class);
                String hintString = rootNode.path("hint").toString();
                if (!StringUtils.isEmpty(hintString)) {
                    Document hint = Document.parse(hintString);
                    countOptions.hint(hint);
                } else {
                    countOptions.hint(new Document());
                }
                countOptions.limit(rootNode.path("limit").asInt());
                countOptions.skip(rootNode.path("skip").asInt());
            } catch (IOException e) {
                e.printStackTrace();
            }
        }
        return collection.count(filterDocument, countOptions);
    }
    @Override
    public List<String> find(String collectionName) {
        return find(collectionName, null);
    }
    @Override
    public List<String> find(String collectionName, String filter) {
        return find(collectionName, filter, null);
    }
    @Override
    public List<String> find(String collectionName, String filter, String projection) {
        return find(collectionName, filter, projection, null);
    }
    @Override
    public List<String> find(String collectionName, String filter, String projection, String options) {
        MongoCollection<Document> collection = getCollection(collectionName);
        Document filterDocument = new Document();
        if (filter != null) {
            filterDocument = Document.parse(filter);
        }
        Document projectionDocument = new Document();
        if (projection != null) {
            projectionDocument = Document.parse(projection);
        }
        FindIterable<Document> documents = collection.find(filterDocument).projection(projectionDocument);
        List<String> list = new ArrayList<>();
        try (MongoCursor<Document> cursor = documents.iterator()) {
            while (cursor.hasNext()) {
                Document doc = cursor.next();
                list.add(doc.toJson());
            }
        }
        return list;
    }
//    @Override
//    public List<String> aggregate(String collectionName, List<? extends String> pipeline) {
//        return null;
//    }
//    @Override
//    public List<String> mapReduce(String collectionName, String mapFunction, String reduceFunction) {
//        return null;
//    }
    @Override
    public void insertOne(String collectionName, String document) {
        MongoCollection<Document> collection = getCollection(collectionName);
        Document doc = Document.parse(document);
        collection.insertOne(doc);
    }
    @Override
    public void insertMany(String collectionName, List<String> documents) {
        insertMany(collectionName, documents, null);
    }
    /**
     * @param collectionName
     * @param documents      the documents to insert
     * @param options        the options to apply to the operation
     *                       {
     *                       orderd:<orderd>
     *                       }
     */
    @Override
    public void insertMany(String collectionName, List<String> documents, String options) {
        MongoCollection<Document> collection = getCollection(collectionName);
        List<Document> list = new ArrayList<>();
        for (String document : documents) {
            Document doc = Document.parse(document);
            list.add(doc);
        }
        InsertManyOptions insertManyOptions = new InsertManyOptions();
        if (options != null) {
            ObjectMapper mapper = new ObjectMapper();
            try {
                JsonNode rootNode = mapper.readValue(options, JsonNode.class);
                insertManyOptions.ordered(rootNode.path("ordered").asBoolean());
            } catch (IOException e) {
                e.printStackTrace();
            }
        }
        collection.insertMany(list, insertManyOptions);
    }
    @Override
    public long deleteOne(String collectionName, String filter) {
        MongoCollection<Document> collection = getCollection(collectionName);
        Document filterDocument = Document.parse(filter);
        DeleteResult deleteResult = collection.deleteOne(filterDocument);
        return deleteResult.getDeletedCount();
    }
    @Override
    public long deleteMany(String collectionName, String filter) {
        MongoCollection<Document> collection = getCollection(collectionName);
        Document filterDocument = Document.parse(filter);
        DeleteResult deleteResult = collection.deleteMany(filterDocument);
        return deleteResult.getDeletedCount();
    }
    @Override
    public long replaceOne(String collectionName, String filter, String replacement) {
        return replaceOne(collectionName, filter, replacement, null);
    }
    /**
     * @param collectionName
     * @param filter         the query filter to apply the the replace operation
     * @param replacement    the replacement document
     * @param updateOptions  the options to apply to the replace operation
     *                       {
     *                       upsert:<upsert>
     *                       }
     * @return
     */
    @Override
    public long replaceOne(String collectionName, String filter, String replacement, String updateOptions) {
        MongoCollection<Document> collection = getCollection(collectionName);
        Document filterDocument = Document.parse(filter);
        Document document = Document.parse(replacement);
        UpdateOptions options = new UpdateOptions();
        if (updateOptions != null) {
            ObjectMapper mapper = new ObjectMapper();
            try {
                JsonNode rootNode = mapper.readValue(updateOptions, JsonNode.class);
                options.upsert(rootNode.path("upsert").asBoolean());
            } catch (IOException e) {
                e.printStackTrace();
            }
        }
        UpdateResult updateResult = collection.replaceOne(filterDocument, document, options);
        return updateResult.getModifiedCount();
    }
    @Override
    public long updateOne(String collectionName, String filter, String update) {
        return updateOne(collectionName, filter, update, null);
    }
    /**
     * @param collectionName
     * @param filter         a document describing the query filter, which may not be null.
     * @param update         a document describing the update, which may not be null. The update to apply must include only update operators.
     * @param updateOptions  the options to apply to the update operation
     *                       {
     *                       upsert:<upsert>
     *                       }
     * @return
     */
    @Override
    public long updateOne(String collectionName, String filter, String update, String updateOptions) {
        MongoCollection<Document> collection = getCollection(collectionName);
        Document filterDocument = Document.parse(filter);
        Document document = Document.parse(update);
        UpdateOptions options = new UpdateOptions();
        if (updateOptions != null) {
            ObjectMapper mapper = new ObjectMapper();
            try {
                JsonNode rootNode = mapper.readValue(updateOptions, JsonNode.class);
                options.upsert(rootNode.path("upsert").asBoolean());
            } catch (IOException e) {
                e.printStackTrace();
            }
        }
        UpdateResult updateResult = collection.updateOne(filterDocument, document, options);
        return updateResult.getModifiedCount();
    }
    @Override
    public long updateMany(String collectionName, String filter, String update) {
        return updateMany(collectionName, filter, update, null);
    }
    /**
     * @param collectionName
     * @param filter         a document describing the query filter, which may not be null.
     * @param update         a document describing the update, which may not be null. The update to apply must include only update operators.
     * @param updateOptions  the options to apply to the update operation
     *                       {
     *                       upsert:<upsert>
     *                       }
     * @return
     */
    @Override
    public long updateMany(String collectionName, String filter, String update, String updateOptions) {
        MongoCollection<Document> collection = getCollection(collectionName);
        Document filterDocument = Document.parse(filter);
        Document document = Document.parse(update);
        UpdateOptions options = new UpdateOptions();
        if (updateOptions != null) {
            ObjectMapper mapper = new ObjectMapper();
            try {
                JsonNode rootNode = mapper.readValue(updateOptions, JsonNode.class);
                options.upsert(rootNode.path("upsert").asBoolean());
            } catch (IOException e) {
                e.printStackTrace();
            }
        }
        UpdateResult updateResult = collection.updateMany(filterDocument, document, options);
        return updateResult.getModifiedCount();
    }
    @Override
    public String findOneAndDelete(String collectionName, String filter) {
        return findOneAndDelete(collectionName, filter, null);
    }
    /**
     * @param collectionName
     * @param filter         the query filter to find the document with
     * @param options        the options to apply to the operation
     *                       {
     *                       projection:<document>,
     *                       sort:<document>
     *                       }
     * @return
     */
    @Override
    public String findOneAndDelete(String collectionName, String filter, String options) {
        MongoCollection<Document> collection = getCollection(collectionName);
        Document filterDocument = Document.parse(filter);
        FindOneAndDeleteOptions findOneAndDeleteOptions = new FindOneAndDeleteOptions();
        if (options != null) {
            ObjectMapper mapper = new ObjectMapper();
            try {
                JsonNode rootNode = mapper.readValue(options, JsonNode.class);
                String projection = rootNode.path("projection").toString();
                Document projectionDoc = new Document();
                if (!StringUtils.isEmpty(projection)) {
                    Document.parse(projection);
                }
                String sort = rootNode.path("sort").toString();
                Document sortDoc = new Document();
                if (!StringUtils.isEmpty(sort)) {
                    Document.parse(sort);
                }
                findOneAndDeleteOptions.projection(projectionDoc);
                findOneAndDeleteOptions.sort(sortDoc);
            } catch (IOException e) {
                e.printStackTrace();
            }
        }
        Document document = collection.findOneAndDelete(filterDocument, findOneAndDeleteOptions);
        return document == null ? "{}" : document.toJson();
    }
    @Override
    public String findOneAndReplace(String collectionName, String filter, String replacement) {
        return findOneAndReplace(collectionName, filter, replacement, null);
    }
    /**
     * @param collectionName
     * @param filter         the query filter to apply the the replace operation
     * @param replacement    the replacement document
     * @param options        the options to apply to the operation
     *                       {
     *                       projection:<document>,
     *                       sort:<document>,
     *                       upsert:<upsert>
     *                       }
     * @return
     */
    @Override
    public String findOneAndReplace(String collectionName, String filter, String replacement, String options) {
        MongoCollection<Document> collection = getCollection(collectionName);
        Document filterDocument = Document.parse(filter);
        Document replacementDocument = Document.parse(replacement);
        FindOneAndReplaceOptions findOneAndReplaceOptions = new FindOneAndReplaceOptions();
        if (options != null) {
            ObjectMapper mapper = new ObjectMapper();
            try {
                JsonNode rootNode = mapper.readValue(options, JsonNode.class);
                String projection = rootNode.path("projection").toString();
                Document projectionDoc = new Document();
                if (!StringUtils.isEmpty(projection)) {
                    Document.parse(projection);
                }
                String sort = rootNode.path("sort").toString();
                Document sortDoc = new Document();
                if (!StringUtils.isEmpty(sort)) {
                    Document.parse(sort);
                }
                findOneAndReplaceOptions.projection(projectionDoc);
                findOneAndReplaceOptions.sort(sortDoc);
                findOneAndReplaceOptions.upsert(rootNode.path("upsert").asBoolean());
            } catch (IOException e) {
                e.printStackTrace();
            }
        }
        Document document = collection.findOneAndReplace(filterDocument, replacementDocument, findOneAndReplaceOptions);
        return document == null ? "{}" : document.toJson();
    }
    @Override
    public String findOneAndUpdate(String collectionName, String filter, String update) {
        return findOneAndUpdate(collectionName, filter, update, null);
    }
    /**
     * @param collectionName
     * @param filter         a document describing the query filter, which may not be null.
     * @param update         a document describing the update, which may not be null. The update to apply must include only update operators.
     * @param options        the options to apply to the operation
     *                       {
     *                       projection:<document>,
     *                       sort:<document>,
     *                       upsert:<upsert>
     *                       }
     * @return
     */
    @Override
    public String findOneAndUpdate(String collectionName, String filter, String update, String options) {
        MongoCollection<Document> collection = getCollection(collectionName);
        Document filterDocument = Document.parse(filter);
        Document updateDocument = Document.parse(update);
        FindOneAndUpdateOptions findOneAndUpdateOptions = new FindOneAndUpdateOptions();
        if (options != null) {
            ObjectMapper mapper = new ObjectMapper();
            try {
                JsonNode rootNode = mapper.readValue(options, JsonNode.class);
                String projection = rootNode.path("projection").asText();
                Document projectionDoc = Document.parse(projection);
                String sort = rootNode.path("sort").asText();
                Document sortDoc = Document.parse(sort);
                findOneAndUpdateOptions.projection(projectionDoc);
                findOneAndUpdateOptions.sort(sortDoc);
                findOneAndUpdateOptions.upsert(rootNode.path("upsert").asBoolean());
            } catch (IOException e) {
                e.printStackTrace();
            }
        }
        Document document = collection.findOneAndUpdate(filterDocument, updateDocument, findOneAndUpdateOptions);
        return document.toJson();
    }
    @Override
    public void drop(String collectionName) {
        getCollection(collectionName).drop();
    }
    @Override
    public String createIndex(String collectionName, String keys) {
        MongoCollection<Document> collection = getCollection(collectionName);
        Document keysDocument = Document.parse(keys);
        return collection.createIndex(keysDocument);
    }
    @Override
    public String createIndex(String collectionName, String keys, String indexOptions) {
        MongoCollection<Document> collection = getCollection(collectionName);
        Document keysDocument = Document.parse(keys);
        IndexOptions options = new IndexOptions();
//TODO:解析indexOptions
//        try {
//            ObjectMapper mapper = new ObjectMapper();
//            JsonNode rootNode = mapper.readValue(indexOptions, JsonNode.class);
//
//
//        } catch (IOException e) {
//            e.printStackTrace();
//        }
        return collection.createIndex(keysDocument, options);
    }
    @Override
    public List<String> listIndexes(String collectionName) {
        MongoCollection<Document> collection = getCollection(collectionName);
        ListIndexesIterable<Document> indexes = collection.listIndexes();
        List<String> list = new ArrayList<>();
        try (MongoCursor<Document> cursor = indexes.iterator()) {
            while (cursor.hasNext()) {
                Document doc = cursor.next();
                list.add(doc.toJson());
            }
        }
        return list;
    }
    @Override
    public void dropIndex(String collectionName, String indexName) {
        getCollection(collectionName).dropIndex(indexName);
    }
    @Override
    public void dropIndexes(String collectionName) {
        getCollection(collectionName).dropIndexes();
    }
    @Override
    public void renameCollection(String collectionName, String newCollectionName) {
        MongoCollection<Document> collection = getCollection(collectionName);
        MongoNamespace namespace = collection.getNamespace();
        collection.renameCollection(new MongoNamespace(namespace.getDatabaseName(), newCollectionName));
    }
    @Override
    public void renameCollection(String collectionName, String newCollectionName, String renameCollectionOptions) {
        MongoCollection<Document> collection = getCollection(collectionName);
        MongoNamespace namespace = collection.getNamespace();
        RenameCollectionOptions options = new RenameCollectionOptions();
        try {
            ObjectMapper mapper = new ObjectMapper();
            JsonNode rootNode = mapper.readValue(renameCollectionOptions, JsonNode.class);
            options.dropTarget(rootNode.path("dropTarget").asBoolean());
        } catch (IOException e) {
            e.printStackTrace();
        }
        collection.renameCollection(new MongoNamespace(namespace.getDatabaseName(), newCollectionName), options);
    }
    public MongoCollection<Document> getCollection(String collectionName) {
        MongoDatabase database = config.getDatabase();
        return database.getCollection(collectionName);
    }
    public List<String> listCollectionNames() {
        MongoDatabase database = config.getDatabase();
        MongoIterable<String> listCollectionNames = database.listCollectionNames();
        List<String> list = new ArrayList<>();
        for (String collectionName : listCollectionNames) {
            list.add(collectionName);
        }
        return list;
    }
}

+ 0 - 27
src/main/java/com/yihu/hos/config/WebConfig.java

@ -1,27 +0,0 @@
package com.yihu.hos.config;
import org.springframework.context.annotation.Bean;
import org.springframework.web.servlet.view.UrlBasedViewResolver;
import org.springframework.web.servlet.view.tiles3.TilesConfigurer;
import org.springframework.web.servlet.view.tiles3.TilesView;
//@EnableWebMvc
//@Configuration
public class WebConfig {
    @Bean
    public UrlBasedViewResolver tilesViewResolver() {
        UrlBasedViewResolver tilesViewResolver = new UrlBasedViewResolver();
        tilesViewResolver.setViewClass(TilesView.class);
        return tilesViewResolver;
    }
    @Bean
    public TilesConfigurer tilesConfigurer() {
        TilesConfigurer tilesConfigurer = new TilesConfigurer();
        tilesConfigurer.setDefinitions(new String[]{"file:src/main/webapp/WEB-INF/ehr/commons/layout/layout.xml"});
        return tilesConfigurer;
    }
}

+ 6 - 30
src/main/java/com/yihu/hos/crawler/controller/CrawlerController.java

@ -1,13 +1,11 @@
package com.yihu.hos.crawler.controller;
import com.yihu.hos.crawler.model.patient.Patient;
import com.yihu.hos.crawler.service.CrawlerManager;
import com.yihu.hos.core.datatype.StringUtil;
import com.yihu.hos.crawler.service.CrawlerService;
import com.yihu.hos.standard.service.adapter.AdapterSchemeService;
import com.yihu.hos.web.framework.model.ActionResult;
import com.yihu.hos.web.framework.model.DetailModelResult;
import com.yihu.hos.web.framework.model.Result;
import com.yihu.hos.core.datatype.StringUtil;
import com.yihu.hos.standard.service.adapter.AdapterSchemeService;
import io.swagger.annotations.Api;
import io.swagger.annotations.ApiOperation;
import io.swagger.annotations.ApiParam;
@ -39,8 +37,8 @@ public class CrawlerController {
    private AdapterSchemeService adapterSchemeService;
    /**
    任务编排
    */
     * 任务编排
     */
    @RequestMapping("jobLayout")
    public String jobLayout(Model model) {
        model.addAttribute("contentPage", "/crawler/jobLayout");
@ -73,27 +71,6 @@ public class CrawlerController {
        return "pageView";
    }
    @RequestMapping(value = "patient", method = RequestMethod.POST)
    @ApiOperation(value = "采集病人健康档案", produces = "application/json", notes = "采集病人健康档案")
    @ResponseBody
    public Result crawler(
            @ApiParam(name = "patient", value = "病人索引信息", required = true)
            @RequestParam(value = "patient") String patientInfo) {
        CrawlerManager crawlerManager = new CrawlerManager();
        Patient patient = crawlerManager.parsePatient(patientInfo);
        if (patient != null) {
            Boolean result = crawlerManager.collectProcess(patient);
            if (result) {
                return Result.success("采集上传成功");
            } else {
                return Result.error("采集上传失败");
            }
        } else {
            return Result.error("参数转换病人实体失败");
        }
    }
    /**
     * 保存任务编排数据
     */
@ -225,7 +202,6 @@ public class CrawlerController {
    }
    @RequestMapping(value = "getSchemeList", method = RequestMethod.POST)
    @ApiOperation(value = "获取适配方案-方案版本下拉框", produces = "application/json", notes = "获取适配方案-方案版本下拉框")
    @ResponseBody
@ -239,7 +215,7 @@ public class CrawlerController {
     * 获取任务编排保存数据集
     */
    @RequestMapping(value = "savedJobData", method = RequestMethod.POST)
    @ApiOperation(value = "获取保存的数据集",produces = "application/json", notes = "保存的数据集")
    @ApiOperation(value = "获取保存的数据集", produces = "application/json", notes = "保存的数据集")
    @ResponseBody
    public DetailModelResult ListSavedJobData(
            @ApiParam(name = "version", value = "版本号", required = true)
@ -253,7 +229,7 @@ public class CrawlerController {
     * 获取任务编排保存适配方案-方案版本
     */
    @RequestMapping(value = "savedSchemeList", method = RequestMethod.POST)
    @ApiOperation(value = "获取保存的适配方案",produces = "application/json", notes = "保存的适配方案")
    @ApiOperation(value = "获取保存的适配方案", produces = "application/json", notes = "保存的适配方案")
    @ResponseBody
    public DetailModelResult SavedSchemeList() {
        return crawlerService.getSchemeSavedResult();

+ 0 - 159
src/main/java/com/yihu/hos/crawler/format/DataSetTransformer.java

@ -1,159 +0,0 @@
package com.yihu.hos.crawler.format;
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.node.ArrayNode;
import com.fasterxml.jackson.databind.node.ObjectNode;
import com.yihu.ehr.dbhelper.jdbc.DBHelper;
import com.yihu.hos.web.framework.constrant.SqlConstants;
import com.yihu.hos.core.datatype.StringUtil;
import com.yihu.hos.crawler.model.adapter.AdapterDataSet;
import com.yihu.hos.crawler.model.adapter.AdapterDict;
import com.yihu.hos.crawler.model.adapter.AdapterMetaData;
import com.yihu.hos.crawler.model.patient.Patient;
import com.yihu.hos.crawler.model.transform.DictDataType;
import com.yihu.hos.crawler.model.transform.TransformType;
import java.util.Iterator;
import java.util.Map;
/**
 * 目前只处理json格式
 * <p>
 * json 格式
 * {
 * "inner_version":"xxxxx",
 * "patient_id":"xxxx",
 * "event_no":"xxxx",
 * "code":"dataset_code",
 * "org_code":"xxxx"
 * "data":
 * [{"metadata_code1":"5","metadata_code2":"6"},
 * [{"metadata_code1":"1","metadata_code2":"2"}]}
 * <p>
 * Created by Air on 2015/6/4.
 */
public class DataSetTransformer implements IDataTransformer{
    private JsonNode jsonObject;
    private Patient patient;
    private static DBHelper db;
    public JsonNode getJsonObject() {
        return jsonObject;
    }
    public DataSetTransformer() {
        if (db == null) {
            db = new DBHelper();
        }
    }
    public boolean transfer(Map<String, AdapterDataSet> dataSetMap) {
        if (jsonObject != null) {
            String dataSetCode = jsonObject.get("code").asText();
            ArrayNode jsonArray = (ArrayNode) jsonObject.get("data");
            AdapterDataSet adapterDataSet = dataSetMap.get(dataSetCode);
            boolean transfer = transferJson(jsonArray, adapterDataSet);
            return transfer;
        }
        return false;
    }
    public String getData() {
        //确保文档有版本信息
        return jsonObject.asText();
    }
    public void setData(JsonNode data) {
        jsonObject = data;
        setPatient();
    }
    @Override
    public Patient getPatient() {
        return patient;
    }
    public TransformType getTransformType() {
        return TransformType.DATA_SET_JSON;
    }
    /**
     * json 格式
     * {
     * "inner_version":"xxxxx",
     * "patient_id":"xxxx",
     * "event_no":"xxxx",
     * "code":"dataset_code",
     * "data":
     * [{"metadata_code1":"5","metadata_code2":"6"},
     * [{"metadata_code1":"1","metadata_code2":"2"}]}
     *
     * @param jsonArray
     * @param adapterDataSet
     * @return
     */
    public boolean transferJson(ArrayNode jsonArray, AdapterDataSet adapterDataSet) {
        for (Object objectRow : jsonArray) {
            if (objectRow instanceof JsonNode) {
                transferJsonRow((ObjectNode) objectRow, adapterDataSet);
            }
        }
        return false;
    }
    public void transferJsonRow(ObjectNode jsonObject, AdapterDataSet adapterDataSet) {
        Iterator<Map.Entry<String, JsonNode>> fields = jsonObject.fields();
        while (fields.hasNext()) {
            Map.Entry<String, JsonNode> next = fields.next();
            String key = next.getKey();
            JsonNode jsonNode = next.getValue();
            String value = jsonNode.asText();
            String stdValue = transferElem(adapterDataSet, key, value);
            if (jsonNode instanceof ObjectNode) {
                ObjectNode objectNode = (ObjectNode) next;
                objectNode.put(key, stdValue);
            }
        }
    }
    /**
     * @param adapterDataSet 数据集编码
     * @param code        数据元编码
     * @param esbData        数据
     * @return String 标准值
     * @modify 2015.09.16 airhead 增加值与编码转换
     */
    public String transferElem(AdapterDataSet adapterDataSet, String code, String esbData) {
        Map<String, AdapterMetaData> adapterMetaDataMap = adapterDataSet.getAdapterMetaDataMap();
        AdapterMetaData adapterMetaData = adapterMetaDataMap.get(code);
        AdapterDict adapterDict = adapterMetaData.getAdapterDict();
        if (adapterDict == null) {
            return esbData;
        }
        String ehrData = null;
        DictDataType adapterDictDataType = adapterMetaData.getAdapterDictDataType();
        if (adapterDictDataType == DictDataType.VALUE) {
            ehrData = adapterDict.getAdapterValueToCodeMap().get(esbData);
        } else if (adapterDictDataType == DictDataType.CODE) {
            ehrData = adapterDict.getAdapterCodeToCodeMap().get(esbData);
        }
        if (StringUtil.isEmpty(ehrData)) {
            return SqlConstants.EMPTY;
        }
        return ehrData;
    }
    /**
     * 根据DataSet信息构造Patient
     */
    private void setPatient() {
        patient = new Patient();
        patient.setPatientId(jsonObject.get("patient_id").asText());
        patient.setEventNo(jsonObject.get("event_no").asText());
        patient.setOrgCode(jsonObject.get("org_code").asText());
    }
}

+ 0 - 54
src/main/java/com/yihu/hos/crawler/format/DocumentTransformer.java

@ -1,54 +0,0 @@
package com.yihu.hos.crawler.format;
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.node.ObjectNode;
import com.yihu.hos.crawler.model.adapter.AdapterDataSet;
import com.yihu.hos.crawler.model.patient.Patient;
import com.yihu.hos.crawler.model.transform.TransformType;
import java.io.IOException;
import java.util.Map;
/**
 * Created by Administrator on 2015/10/15.
 */
public class DocumentTransformer implements IDataTransformer  {
    private JsonNode jsonObject;
    protected AdapterScheme adapterScheme;
    protected Patient patient;
    public DocumentTransformer(AdapterScheme adapterScheme) {
        this.adapterScheme = adapterScheme;
    }
    @Override
    public Patient getPatient() {
        return patient;
    }
    /**
     * 非结构化的不需要转换
     *
     * @return
     */
    public boolean transfer(Map<String, AdapterDataSet> dataSetMap) {
        return true;
    }
    public JsonNode getJsonObject() {
        return jsonObject;
    }
    public String getData() {
        return jsonObject.asText();
    }
    public void setData(JsonNode data) {
        jsonObject = data;
    }
    public TransformType getTransformType() {
        return TransformType.DOCUMENT;
    }
}

+ 0 - 27
src/main/java/com/yihu/hos/crawler/format/IDataTransformer.java

@ -1,27 +0,0 @@
package com.yihu.hos.crawler.format;
import com.fasterxml.jackson.databind.JsonNode;
import com.yihu.hos.crawler.model.adapter.AdapterDataSet;
import com.yihu.hos.crawler.model.patient.Patient;
import com.yihu.hos.crawler.model.transform.TransformType;
import java.util.Map;
/**
 * @author Airhead
 * @version 1.0
 * @created 22-5月-2015 11:24:26
 */
public interface IDataTransformer {
    boolean transfer(Map<String, AdapterDataSet> dataSetMap);
    String getData();
    void setData(JsonNode data);
    Patient getPatient();
    TransformType getTransformType();
}

+ 0 - 235
src/main/java/com/yihu/hos/crawler/origin/FileSystemOrigin.java

@ -1,235 +0,0 @@
package com.yihu.hos.crawler.origin;
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.node.ObjectNode;
import com.yihu.hos.common.Services;
import com.yihu.hos.core.datatype.DateUtil;
import com.yihu.hos.core.datatype.StringUtil;
import com.yihu.hos.core.file.FtpFileUtil;
import com.yihu.hos.core.log.Logger;
import com.yihu.hos.core.log.LoggerFactory;
import com.yihu.hos.crawler.format.AdapterScheme;
import com.yihu.hos.crawler.model.adapter.AdapterDataSet;
import com.yihu.hos.crawler.model.patient.Patient;
import com.yihu.hos.crawler.model.transform.LogicValues;
import com.yihu.hos.crawler.service.EsbHttp;
import com.yihu.hos.system.model.SystemDatasource;
import com.yihu.hos.system.model.SystemOrganization;
import com.yihu.hos.system.service.OrganizationManager;
import com.yihu.hos.web.framework.util.springutil.SpringBeanUtil;
import java.io.IOException;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.Date;
import java.util.List;
import java.util.Map;
/**
 * �ݻ�
 *
 * @author Airhead
 * @version 1.0
 * @created 22-5��-2015 11:24:24
 */
public class FileSystemOrigin implements IDataOrigin {
    private static Logger logger = LoggerFactory.getLogger(FileSystemOrigin.class);
    public static String dirHear = "/home/test/patient/";        //病人数据文件根目录
    public static String fileType = "/image/";                    //采集的文件类型文件夹
    protected AdapterScheme adapterScheme;
    public FileSystemOrigin(AdapterScheme adapterScheme) {
        this.adapterScheme=adapterScheme;
    }
    /**
     * ftp采集数据
     * 非结构化档案中,key_words格式暂定为:数据集-数据元,生成文件上传时再转成:数据集.数据源(主要因为mongodb的key不支持特殊符号"."
     * @param patient         病人ID
     * @param orgAgencyOrigin 数据源
     * @param adapterDataSet  适配数据集   @return
     */
    @Override
    public String fecthData(Patient patient, SystemDatasource orgAgencyOrigin, AdapterDataSet adapterDataSet) {
        try {
            String data = null;
            String innerVersion= EsbHttp.getRemoteVersion(patient.getOrgCode());
            List<String> datas = null;
            String agencyCode = patient.getOrgCode();
            String ftpConfig = orgAgencyOrigin.getConfig();
            FtpFileUtil ftp = genFtpUtil(ftpConfig);
            String filePath = "";//远程ftp文件路径
            ObjectNode jsonObject = null;
            boolean patientId = true;
            boolean eventNo = true;
            if (patient.getPatientId() != null && !"".equals(patient.getPatientId())) {
                if (patient.getEventNo() != null && !"".equals(patient.getEventNo())) {
                    //文件路径
                    filePath = dirHear + agencyCode + "/" + patient.getPatientId() + "/" + patient.getEventNo() +  fileType;
                } else {
                    eventNo = false;
                }
            } else {
                patientId = false;
            }
            if (!patientId || !eventNo) {
                throw new Exception("采集病人数据集必须要有病人ID,事件号,数据集名:" + adapterDataSet.getAdapterDataSetT().getStdDatasetCode());
            }
            datas = ftp.readFileData(filePath);
            if (datas != null && datas.size() > 0) {
                data = datas.get(0);
            }
            //TODO "data"内容实现,主要包括key_words和content,
            //json生成
            jsonObject.put("patient_id", patient.getPatientId());
            jsonObject.put("event_no", patient.getEventNo());
            jsonObject.put("org_code", agencyCode);
            jsonObject.put("inner_version", innerVersion);
            jsonObject.put("create_date", DateUtil.toString(new Date(), DateUtil.DEFAULT_YMDHMSDATE_FORMAT));
            jsonObject.put("event_time", patient.getReferenceTime());//DateUtil.toString(patient.getReferenceTime(), DateUtil.DEFAULT_YMDHMSDATE_FORMAT)
            if (StringUtil.isEmpty(patient.getReUploadFlg())) {
                jsonObject.put("reUploadFlg", LogicValues.LOGIC_FALSE);
            } else {
                jsonObject.put("reUploadFlg", patient.getReUploadFlg());
            }
            return jsonObject.toString();
        } catch (SQLException e) {
//            e.printStackTrace();
            logger.error("", e);
        } catch (Exception e) {
//            e.printStackTrace();
            logger.error("", e);
        }
        return null;
    }
    /**
     * 获取病人列表
     *
     * @param orgAgencyOrigin 数据源
     * @param adapterDataSet  适配数据集
     * @param condition       查询条件
     * @return 病人集合
     */
    @Override
    public List<Patient> getPatientList(SystemDatasource orgAgencyOrigin, AdapterDataSet adapterDataSet, Map<String, Object> condition) {
        ArrayList<Patient> patientList = new ArrayList<>();
        try {
            String ftpConfig = orgAgencyOrigin.getConfig();
            FtpFileUtil ftp = genFtpUtil(ftpConfig);
//			StdDataSet stdDataSet = adapterDataSet.getStdDataSet();
            OrganizationManager organizationManager= SpringBeanUtil.getService(Services.Organization);
            SystemOrganization orgAgency =organizationManager.getOrgById(orgAgencyOrigin.getOrgId());
            String agencyCode =orgAgency.getCode();
            List<Map<String, String>> patientMaps = ftp.getPatientList(dirHear, agencyCode);
            if (patientMaps != null && patientMaps.size() > 0) {
                for (Map<String, String> patientMap : patientMaps) {
                    Patient patient = new Patient();
                    String patientId = patientMap.get("patient_id");
                    String eventNo = patientMap.get("event_no");
                    if (orgAgency == null) {
                        logger.error("获取病人列表错误,无法获取机构代码.");
                        continue;
                    }
                    patient.setPatientId(patientId);
                    patient.setEventNo(eventNo);
                    patient.setReferenceTime(DateUtil.toString(new Date(), DateUtil.DEFAULT_YMDHMSDATE_FORMAT));//暂设置为当前时间
                    patient.setOrgCode(orgAgency.getCode());
                    patientList.add(patient);
                }
            }
        } catch (Exception e) {
//            e.printStackTrace();
            logger.error("", e);
        }
        return patientList;
    }
    /**
     * 清除ftp数据
     *
     * @param patient
     * @param orgAgencyOrigin
     * @param adapterDataSet  @return
     */
    @Override
    public boolean clearData(Patient patient, SystemDatasource orgAgencyOrigin, AdapterDataSet adapterDataSet) {
        try {
            boolean clear = false;
            String agencyCode = patient.getOrgCode();
            String ftpConfig = orgAgencyOrigin.getConfig();
            FtpFileUtil ftp = genFtpUtil(ftpConfig);
            String filePath = "";//远程ftp文件路径
            boolean patientId = true;
            boolean eventNo = true;
            if (patient.getPatientId() != null && !"".equals(patient.getPatientId())) {
                if (patient.getEventNo() != null && !"".equals(patient.getEventNo())) {
                    //文件路径
                    filePath = dirHear + agencyCode + "/" + patient.getPatientId() + "/" + patient.getEventNo()  + fileType;
                } else {
                    eventNo = false;
                }
            } else {
                patientId = false;
            }
            if (!patientId || !eventNo) {
                throw new Exception("清除病人数据集必须要有病人ID,事件号,数据集名:" + adapterDataSet.getAdapterDataSetT().getStdDatasetCode());
            }
            ftp.connect();
            clear = ftp.removeData(filePath);
            ftp.closeConnect();
            return clear;
        } catch (SQLException e) {
            e.printStackTrace();
            logger.error("", e);
        } catch (Exception e) {
            e.printStackTrace();
            logger.error("", e);
        }
        return false;
    }
    @Override
    public Date getServerDateTime(SystemDatasource orgAgencyOrigin) {
        return null;
    }
    public void finalize() throws Throwable {
    }
    public FtpFileUtil genFtpUtil(String ftpConfig) {
        ObjectMapper mapper = new ObjectMapper();
        FtpFileUtil ftpUtil = null;
        JsonNode rootNode = null;
        try {
            rootNode = mapper.readValue(ftpConfig, JsonNode.class);
            String username = rootNode.path("username").asText();
            String password = rootNode.path("password").asText();
            String host = rootNode.path("host").asText();
            int port = rootNode.path("port").asInt();
            ftpUtil = new FtpFileUtil(username, password, host, port);
        } catch (IOException e) {
            logger.error("获取Ftp服务器配置失败", e);
            e.printStackTrace();
        }
        return ftpUtil;
    }
}//end FileSystemOrigin

+ 0 - 47
src/main/java/com/yihu/hos/crawler/origin/IDataOrigin.java

@ -1,47 +0,0 @@
package com.yihu.hos.crawler.origin;
import com.yihu.hos.crawler.model.adapter.AdapterDataSet;
import com.yihu.hos.crawler.model.patient.Patient;
import com.yihu.hos.system.model.SystemDatasource;
import java.util.Date;
import java.util.List;
import java.util.Map;
/**
 * @author Airhead
 * @version 1.0
 * @created 22-5-2015 11:24:26
 */
public interface IDataOrigin {
    String fecthData(Patient patient, SystemDatasource orgAgencyOrigin, AdapterDataSet adapterDataSet);
    List<Patient> getPatientList(SystemDatasource orgAgencyOrigin, AdapterDataSet adapterDataSet, Map<String, Object> condition);
    boolean clearData(Patient patient, SystemDatasource orgAgencyOrigin, AdapterDataSet adapterDataSet);
    Date getServerDateTime(SystemDatasource orgAgencyOrigin);
    enum OriginType {
        /**
         * 数据库
         */
        DB,
        /**
         * RESTful Web Service
         */
        REST,
        /**
         * FileSystem
         */
        FS,
        /**
         * SOAP Web Service
         */
        SOAP
    }
}

+ 0 - 323
src/main/java/com/yihu/hos/crawler/service/CrawlerFlowManager.java

@ -1,323 +0,0 @@
package com.yihu.hos.crawler.service;
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.yihu.hos.core.datatype.CollectionUtil;
import com.yihu.hos.core.datatype.StringUtil;
import com.yihu.hos.core.log.Logger;
import com.yihu.hos.core.log.LoggerFactory;
import com.yihu.hos.crawler.dao.CrawlerDatasetDao;
import com.yihu.hos.crawler.dao.CrawlerFlowDao;
import com.yihu.hos.crawler.dao.CrawlerFlowHeadDao;
import com.yihu.hos.crawler.model.adapter.AdapterDataSet;
import com.yihu.hos.crawler.model.config.SysConfig;
import com.yihu.hos.crawler.model.flow.CrawlerDataSetModel;
import com.yihu.hos.crawler.model.flow.CrawlerFlowHeadModel;
import com.yihu.hos.crawler.model.flow.CrawlerFlowModel;
import com.yihu.hos.crawler.model.patient.Patient;
import com.yihu.hos.standard.model.adapter.AdapterDatasetModel;
import com.yihu.hos.standard.model.adapter.AdapterDictEntryModel;
import com.yihu.hos.standard.model.adapter.AdapterSchemeVersionModel;
import com.yihu.hos.standard.service.adapter.AdapterDatasetService;
import com.yihu.hos.standard.service.adapter.AdapterSchemeVersionService;
import com.yihu.hos.standard.service.bo.AdapterVersion;
import com.yihu.hos.web.framework.model.DictItem;
import com.yihu.hos.web.framework.util.springutil.SpringBeanUtil;
import java.util.*;
public class CrawlerFlowManager {
    private static Logger logger = LoggerFactory.getLogger(CrawlerFlowManager.class);
    private static DataCollectDispatcher dispatch = DataCollectDispatcher.getInstance();
    private static SysConfig sysConfig = SysConfig.getInstance();
    private Map<String, AdapterDataSet> adapterDataSetMap;
    private List<CrawlerFlowHeadModel> crawlerFlowHeadModelList;
    private Map<String, List<CrawlerFlowModel>> crawlerFlowDatasetMap;
    private Map<String, List<CrawlerFlowModel>> crawlerFlowMetadataMap;
    private Boolean adapterFlg = false;
    private List<DictItem> datasetList;
    private String schemeVersion;
    public CrawlerFlowManager(List datasetList, String schemeVersion) {
        this.datasetList = datasetList;
        this.schemeVersion = schemeVersion;
    }
    public CrawlerFlowManager() {
    }
    public void finalize() throws Throwable {
    }
    public String dataCrawler(Map<String, Object> condition) {
        Integer count = 0;
        Integer totalCount = 0;
        String message;
        /**
         * 适配基本数据准备
         */
        if (!getDataForPrepare()) {
            message = "适配数据尚未准备";
            logger.error(message);
            return message;
        }
        List<Patient> patientList = dispatch.getPatientList(condition, adapterDataSetMap);
        if (!CollectionUtil.isEmpty(patientList)) {
            totalCount = patientList.size();
            for (Patient patient : patientList) {
                boolean result = collectProcess(patient);
                if (result) {
                    count++;
                }
            }
        }
        message = "本次采集病人共" + totalCount + "条,成功采集信息" + count + "条";
        return message;
    }
    public boolean collectProcess(Patient patient) {
        if (!getDataForPrepare()) {
            logger.error("适配数据尚未准备完毕");
            return false;
        }
        patient.setReUploadFlg(StringUtil.toString(false));
        logger.trace("采集->注册->打包上传,任务ID:,patient_id:" + patient.getPatientId() + ", event_no:" + patient.getEventNo());
        try {
            /**
             * 获取token
             */
            if (!dispatch.getToken()) {
                logger.error("token获取失败");
                return false;
            }
            /**
             * 获取远程版本
             */
//            if (!dispatch.getRemoteVersion(patient.getOrgCode())) {
//                logger.error("远程版本获取失败");
//                return false;
//            }
            /**
             * 获取版本
             */
            if (StringUtil.isEmpty(sysConfig.getVersionMap().get(patient.getOrgCode()))) {
                logger.error("版本获取失败");
                return false;
            }
            Map<String, JsonNode> dataMap = new HashMap<>();
            for (CrawlerFlowHeadModel crawlerFlowHeadModel : crawlerFlowHeadModelList) {
                /**
                 * 采集信息
                 */
                String datasetCode = crawlerFlowHeadModel.getDatasetCode();
                AdapterDataSet adapterDataSet = adapterDataSetMap.get(datasetCode);
                JsonNode data = dispatch.fecthData(patient, adapterDataSet);
                if (StringUtil.isEmpty(data)) {
                    continue;
                }
                dataMap.put(datasetCode, data);
                /**
                 * 根据采集流程递归查询
                 */
                getDataByCrawlerFlow(datasetCode, patient, dataMap);
                if (sysConfig.getRegisterDataSet().equals(adapterDataSet.getAdapterDataSetT().getStdDatasetCode())) {
                    if (!StringUtil.isEmpty(data.get("data")) && !StringUtil.isEmpty(data.get("data").get(0))) {
                        if (!StringUtil.isEmpty(data.get("data").get(0).get(SysConfig.getInstance().getRegisterIdCardNo()))) {
                            logger.info("注册病人");
                            dispatch.register(patient, data.toString());
                        }
                    }
                }
            }
            logger.info("采集病人成功,patient_id:" + patient.getPatientId() + ", event_no:" + patient.getEventNo());
            /**
             * 上传档案
             */
            try {
                if (!CollectionUtil.isEmpty(dataMap.keySet())) {
                    if (!dispatch.upload(dataMap, patient, adapterDataSetMap)) {
                        logger.error("上传档案失败");
                        return false;
                    }
                }
            } catch (Exception e) {
                logger.error("档案上传失败,patient_id:" + patient.getPatientId() + ", event_no:" + patient.getEventNo());
                return false;
            }
        } catch (Exception e) {
            logger.error("采集病人失败,patient_id:" + patient.getPatientId() + ", event_no:" + patient.getEventNo(), e);
            return false;
        }
        return true;
    }
    public Boolean getDataByCrawlerFlow(String preDatasetCode, Patient patient, Map<String, JsonNode> dataMap) {
        try {
            JsonNode preData = dataMap.get(preDatasetCode);
            Map<String, String> relationValueMap = new HashMap<>();
            List<CrawlerFlowModel> crawlerFlowDatasetList = crawlerFlowDatasetMap.get(preDatasetCode);
            for (CrawlerFlowModel crawlerFlowDataset : crawlerFlowDatasetList) {
                List<CrawlerFlowModel> crawlerFlowMetadataList = crawlerFlowMetadataMap.get(crawlerFlowDataset.getDatasetCode());
                for (CrawlerFlowModel crawlerFlowMetadata : crawlerFlowMetadataList) {
                    String metadataCode = crawlerFlowMetadata.getMetadataCode();
                    metadataCode = StringUtil.substring(metadataCode, metadataCode.indexOf("-") + 1, metadataCode.length());
                    String inputMetadataCode = crawlerFlowMetadata.getInputMetadataCode();
                    inputMetadataCode = StringUtil.substring(inputMetadataCode, inputMetadataCode.indexOf("-") + 1, inputMetadataCode.length());
                    Iterator<JsonNode> array = preData.get("data").iterator();
                    while (array.hasNext()) {
                        JsonNode dataNode = array.next();
                        relationValueMap.put(metadataCode, dataNode.get(inputMetadataCode).asText());
                    }
                }
                String datasetCode = crawlerFlowDataset.getDatasetCode();
                AdapterDataSet adapterDataSet = adapterDataSetMap.get(datasetCode);
                String data = dispatch.fecthData(patient, adapterDataSet, relationValueMap);
                if (StringUtil.isEmpty(data)) {
                    continue;
                } else {
                    ObjectMapper objectMapper = new ObjectMapper();
                    JsonNode jsonObject = objectMapper.readTree(data);
                    dataMap.put(datasetCode, jsonObject);
                }
                getDataByCrawlerFlow(datasetCode, patient, dataMap);
            }
            return true;
        } catch (Exception e) {
            return false;
        }
    }
    public Boolean getDataForPrepare() {
        if (adapterFlg) {
            return true;
        }
        logger.info("适配基本相关数据准备");
        try {
            adapterDataSetMap = new HashMap<>();
            AdapterVersion adapterVersion;
            List<Integer> datasetIdList = new ArrayList<>();
            /**
             * 推模式接口调用,默认只使用最新版本的适配
             */
            AdapterSchemeVersionService adapterSchemeVersionService = SpringBeanUtil.getService(AdapterSchemeVersionService.BEAN_ID);
            AdapterSchemeVersionModel adapterSchemeVersionModel;
            if (datasetList.isEmpty()) {
                adapterSchemeVersionModel = adapterSchemeVersionService.getEhrAdapterVersionLasted();
                if (adapterSchemeVersionModel == null) {
                    logger.error("获取最新ehr适配版本错误");
                    return false;
                } else {
                    this.schemeVersion = adapterSchemeVersionModel.getVersion();
                    adapterVersion = new AdapterVersion(schemeVersion);
                }
                /**
                 * 获取该版本下数据集
                 */
                CrawlerDatasetDao crawlerDatasetDao = SpringBeanUtil.getService(CrawlerDatasetDao.BEAN_ID);
                List<CrawlerDataSetModel> crawlerDataSetModelList = crawlerDatasetDao.getCrawlerDatasetList(adapterSchemeVersionModel.getId());
                if (CollectionUtil.isEmpty(crawlerDataSetModelList)) {
                    return false;
                }
                for (CrawlerDataSetModel crawlerDataSetModel : crawlerDataSetModelList) {
                    datasetIdList.add(crawlerDataSetModel.getDatasetId());
                }
            } else {
                /**
                 * 拉模式接口调用,由任务配置决定适配版本
                 */
                adapterSchemeVersionModel = adapterSchemeVersionService.getByVersion(schemeVersion);
                adapterVersion = new AdapterVersion(schemeVersion);
                for (DictItem dictItem : datasetList) {
                    datasetIdList.add(Integer.valueOf(dictItem.getCode()));
                }
            }
            AdapterDatasetService adapterDatasetService = SpringBeanUtil.getService(AdapterDatasetService.BEAN_ID);
            /**
             * 字典项初始化
             */
            List<AdapterDictEntryModel> adapterDictEntryModelList = adapterDatasetService.getList(AdapterDictEntryModel.class, adapterVersion.getDictEntryTableName(), null, null, null, null);
            Map<Integer, List<AdapterDictEntryModel>> adapterDictEntryModelMap = new HashMap<>();
            for (AdapterDictEntryModel adapterDictEntryModel : adapterDictEntryModelList) {
                List<AdapterDictEntryModel> entryModelList = adapterDictEntryModelMap.get(adapterDictEntryModel.getStdDictId());
                if (CollectionUtil.isEmpty(entryModelList)) {
                    entryModelList = new ArrayList<>();
                }
                entryModelList.add(adapterDictEntryModel);
                adapterDictEntryModelMap.put(adapterDictEntryModel.getStdDictId(), entryModelList);
            }
            /**
             * 数据集初始化
             */
            List<AdapterDatasetModel> adapterDataSetModelList = adapterDatasetService.getListByAdapterDatasetIdList(adapterVersion, datasetIdList);
            for (AdapterDatasetModel adapterDatasetModel : adapterDataSetModelList) {
                adapterDataSetMap.put(adapterDatasetModel.getStdDatasetCode(), new AdapterDataSet(adapterDatasetModel, adapterVersion, adapterDictEntryModelMap));
            }
            /**
             * 获取采集流程表头
             */
            CrawlerFlowHeadDao crawlerFlowHeadDao = SpringBeanUtil.getService(CrawlerFlowHeadDao.BEAN_ID);
            CrawlerFlowDao crawlerFlowDao = SpringBeanUtil.getService(CrawlerFlowDao.BEAN_ID);
            crawlerFlowHeadModelList = crawlerFlowHeadDao.getCrawlerFlowHeadList(adapterSchemeVersionModel.getId());
            List<CrawlerFlowModel> crawlerFlowModelList = crawlerFlowDao.getCrawlerFlowList(adapterSchemeVersionModel.getId());
            crawlerFlowDatasetMap = new HashMap<>();
            crawlerFlowMetadataMap = new HashMap<>();
            /**
             * 获取关联表
             */
            for (CrawlerFlowModel crawlerFlowModel : crawlerFlowModelList) {
                List<CrawlerFlowModel> datasetList = new ArrayList<>();
                List<CrawlerFlowModel> metadataList = new ArrayList<>();
                String inputDatasetCode = crawlerFlowModel.getInputDatasetCode();
                String datasetCode = crawlerFlowModel.getDatasetCode();
                if (StringUtil.isEmpty(inputDatasetCode)) {
                    continue;
                }
                if (crawlerFlowDatasetMap.containsKey(inputDatasetCode)) {
                    datasetList = crawlerFlowDatasetMap.get(inputDatasetCode);
                }
                datasetList.add(crawlerFlowModel);
                crawlerFlowDatasetMap.put(inputDatasetCode, datasetList);
                if (crawlerFlowMetadataMap.containsKey(datasetCode)) {
                    metadataList = crawlerFlowMetadataMap.get(datasetCode);
                }
                metadataList.add(crawlerFlowModel);
                crawlerFlowMetadataMap.put(datasetCode, metadataList);
            }
//            SysConfig.getInstance().setVersionMap(new HashMap<>());
            adapterFlg = true;
            return true;
        } catch (Exception e) {
            adapterFlg = false;
            return false;
        }
    }
    /**
     * 解析病人索引信息
     *
     * @param patientInfo 病人索引信息
     * @return
     */
    public Patient parsePatient(String patientInfo) {
        try {
            ObjectMapper mapper = new ObjectMapper();
            Patient patient = mapper.readValue(patientInfo, Patient.class);
            return patient;
        } catch (Exception e) {
            logger.error("patient参数错误:" + patientInfo, e);
            return null;
        }
    }
    public void setAdapterFlg(Boolean adapterFlg) {
        this.adapterFlg = adapterFlg;
    }
}

+ 0 - 243
src/main/java/com/yihu/hos/crawler/service/CrawlerManager.java

@ -1,243 +0,0 @@
package com.yihu.hos.crawler.service;
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.yihu.hos.core.datatype.CollectionUtil;
import com.yihu.hos.core.datatype.StringUtil;
import com.yihu.hos.core.log.Logger;
import com.yihu.hos.core.log.LoggerFactory;
import com.yihu.hos.crawler.model.adapter.AdapterDataSet;
import com.yihu.hos.crawler.model.config.SysConfig;
import com.yihu.hos.crawler.model.patient.Patient;
import com.yihu.hos.standard.model.adapter.AdapterDatasetModel;
import com.yihu.hos.standard.model.adapter.AdapterDictEntryModel;
import com.yihu.hos.standard.model.adapter.AdapterSchemeVersionModel;
import com.yihu.hos.standard.service.adapter.AdapterDatasetService;
import com.yihu.hos.standard.service.adapter.AdapterSchemeVersionService;
import com.yihu.hos.standard.service.bo.AdapterVersion;
import com.yihu.hos.web.framework.model.DictItem;
import com.yihu.hos.web.framework.util.springutil.SpringBeanUtil;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
public class CrawlerManager {
    private static Logger logger = LoggerFactory.getLogger(CrawlerManager.class);
    private static DataCollectDispatcher dispatch = DataCollectDispatcher.getInstance();
    private static SysConfig sysConfig=SysConfig.getInstance();
    private static Map<String, Map<String, AdapterDataSet>> adapterDataSetVersionMap = new HashMap<>();
    private Map<String, AdapterDataSet> adapterDataSetMap;
    private Boolean adapterFlg = false;
    private String schemeVersion;
    private List<DictItem> datasetList;
    public CrawlerManager(List datasetList, String schemeVersion) {
        this.datasetList = datasetList;
        this.schemeVersion = schemeVersion;
    }
    public CrawlerManager() {
    }
    public void finalize() throws Throwable {
    }
    public String dataCrawler(Map<String, Object> condition) {
        Integer count = 0;
        Integer totalCount = 0;
        String message;
        /**
         * 适配基本数据准备
         */
        if (!getDataForPrepare()) {
            message = "适配数据尚未准备";
            logger.error(message);
            return message;
        }
        List<Patient> patientList = dispatch.getPatientList(condition, adapterDataSetMap);
        if (!CollectionUtil.isEmpty(patientList)) {
            totalCount = patientList.size();
            for (Patient patient : patientList) {
                Boolean result = collectProcess(patient);
                if (result) {
                    count++;
                }
            }
        }
        message = "本次采集病人共" + totalCount + "条,成功采集信息"+ count + "条";
        return message;
    }
    //单个病人采集上传
    public Boolean collectProcess(Patient patient) {
        if (!getDataForPrepare()) {
            logger.error("适配数据尚未准备");
            return false;
        }
        patient.setReUploadFlg(StringUtil.toString(false));
        logger.trace("采集->注册->打包上传,patient_id:" + patient.getPatientId() + ", event_no:" + patient.getEventNo());
        try {
            //getToken
            if (!dispatch.getToken()) {
                logger.error("token获取失败");
                return false;
            }
            //getRemoteVersion
//            if (!dispatch.getRemoteVersion(patient.getOrgCode())) {
//                logger.error("远程版本获取失败");
//                return false;
//            }
            if(StringUtil.isEmpty(sysConfig.getVersionMap().get(patient.getOrgCode()))){
                logger.error("版本获取失败");
                return false;
            }
            Map<String, AdapterDataSet> dataSetMap = new HashMap<>();
            Map<String, JsonNode> dataMap = new HashMap<>();
            for (String key : adapterDataSetMap.keySet()) {
                /**
                 * 获取数据
                 */
                AdapterDataSet adapterDataSet = adapterDataSetMap.get(key);
                JsonNode jsonObject = dispatch.fecthData(patient, adapterDataSet);
                if (StringUtil.isEmpty(jsonObject)) {
                    continue;
                }
                dataSetMap.put(adapterDataSet.getAdapterDataSetT().getStdDatasetCode(), adapterDataSet);
                dataMap.put(key, jsonObject);
                /**
                 * 注册病人
                 */
                if (SysConfig.getInstance().getRegisterDataSet().equals(adapterDataSet.getAdapterDataSetT().getStdDatasetCode())) {
                    if (!StringUtil.isEmpty(jsonObject.get("data")) && !StringUtil.isEmpty(jsonObject.get("data").get(0))) {
                        if (!StringUtil.isEmpty(jsonObject.get("data").get(0).get(SysConfig.getInstance().getRegisterIdCardNo()))) {
                            logger.info("注册病人");
                            dispatch.register(patient, jsonObject.toString());
                        }
                    }
                }
            }
            logger.info("采集病人成功,patient_id:" + patient.getPatientId() + ", event_no:" + patient.getEventNo());
            //上传档案
            logger.info("上传病人档案");
            try {
                if (!CollectionUtil.isEmpty(dataMap.keySet())) {
                    if (!dispatch.upload(dataMap, patient, dataSetMap)) {
                        logger.error("上传档案失败");
                        return false;
                    }
                }
            } catch (Exception e) {
                logger.error("档案上传失败,patient_id:" + patient.getPatientId() + ", event_no:" + patient.getEventNo());
                return false;
            }
        } catch (Exception e) {
            logger.error("采集病人失败,patient_id:" + patient.getPatientId() + ", event_no:" + patient.getEventNo(), e);
            return false;
        }
        return true;
    }
    public Boolean getDataForPrepare() {
        if (adapterFlg) {
            return true;
        }
        logger.info("适配基本相关数据准备");
        try {
            adapterDataSetMap = new HashMap<>();
            AdapterVersion adapterVersion;
            AdapterDatasetService adapterDatasetService = SpringBeanUtil.getService(AdapterDatasetService.BEAN_ID);
            List<AdapterDatasetModel> adapterDataSetModelList = new ArrayList<>();
            if (!CollectionUtil.isEmpty(datasetList)) {
                /**
                 * 拉模式接口调用,由任务配置决定适配版本
                 */
                adapterVersion = new AdapterVersion(schemeVersion);
                List<Integer> datasetIdList = new ArrayList<>();
                for (DictItem dictItem : datasetList) {
                    datasetIdList.add(Integer.parseInt(dictItem.getCode()));
                }
                adapterDataSetModelList = adapterDatasetService.getAdapterDatasetByAdapterIdList(adapterVersion, datasetIdList);
            } else {
                /**
                 * 推模式接口调用,默认只使用最新版本的适配
                 */
                AdapterSchemeVersionService adapterSchemeVersionService = SpringBeanUtil.getService(AdapterSchemeVersionService.BEAN_ID);
                AdapterSchemeVersionModel adapterSchemeVersionModel = adapterSchemeVersionService.getEhrAdapterVersionLasted();
                if (adapterSchemeVersionModel == null) {
                    logger.error("获取最新ehr适配版本错误");
                    return false;
                } else {
                    this.schemeVersion = adapterSchemeVersionModel.getVersion();
                    adapterVersion = new AdapterVersion(schemeVersion);
                }
                if (adapterDataSetVersionMap.get(schemeVersion) != null) {
                    adapterDataSetMap = adapterDataSetVersionMap.get(schemeVersion);
                    adapterFlg = true;
                    return true;
                }
                Map<String, String> condition = new HashMap<>();
                condition.put("column", "adapter_dataset_code");
                ObjectMapper mapper = new ObjectMapper();
                String conditionStr = mapper.writeValueAsString(condition);
                adapterDataSetModelList = adapterDatasetService.getAdapterDatasetNotNullList(adapterVersion, conditionStr);
            }
            /**
             * 字典项初始化
             */
            List<AdapterDictEntryModel> adapterDictEntryModelList = adapterDatasetService.getList(AdapterDictEntryModel.class, adapterVersion.getDictEntryTableName(), null, null, null, null);
            Map<Integer, List<AdapterDictEntryModel>> adapterDictEntryModelMap = new HashMap<>();
            for (AdapterDictEntryModel adapterDictEntryModel : adapterDictEntryModelList) {
                List<AdapterDictEntryModel> entryModelList = adapterDictEntryModelMap.get(adapterDictEntryModel.getStdDictId());
                if (CollectionUtil.isEmpty(entryModelList)) {
                    entryModelList = new ArrayList<>();
                }
                entryModelList.add(adapterDictEntryModel);
                adapterDictEntryModelMap.put(adapterDictEntryModel.getStdDictId(), entryModelList);
            }
            /**
             * 数据集初始化
             */
            for (AdapterDatasetModel adapterDatasetModel : adapterDataSetModelList) {
                adapterDataSetMap.put(adapterDatasetModel.getStdDatasetCode(), new AdapterDataSet(adapterDatasetModel, adapterVersion, adapterDictEntryModelMap));
            }
            adapterDataSetVersionMap.put(schemeVersion, adapterDataSetMap);
            adapterFlg = true;
            return true;
        } catch (Exception e) {
            return false;
        }
    }
    /**
     * 解析病人索引信息
     *
     * @param patientInfo 病人索引信息
     * @return
     */
    public Patient parsePatient(String patientInfo) {
        try {
            ObjectMapper mapper = new ObjectMapper();
            Patient patient = mapper.readValue(patientInfo, Patient.class);
            return patient;
        } catch (Exception e) {
            logger.error("patient参数错误:" + patientInfo, e);
            return null;
        }
    }
    public void setSchemeVersion(String schemeVersion) {
        this.schemeVersion = schemeVersion;
    }
    public void setDatasetList(List<DictItem> datasetList) {
        this.datasetList = datasetList;
    }
}

+ 5 - 5
src/main/java/com/yihu/hos/crawler/service/CrawlerService.java

@ -185,8 +185,8 @@ public class CrawlerService {
        for (JsonNode obj : jsonList) {
            if (obj.has("schemeId") && obj.has("versionId")) {
                String schemeId = obj.get("schemeId").toString();
                String versionId = obj.get("versionId").toString();
                String schemeId = obj.get("schemeId").asText();
                String versionId = obj.get("versionId").asText();
                AdapterSchemeVersionModel versionModel = (AdapterSchemeVersionModel) adapterSchemeVersionService.get(Integer.valueOf(versionId));
                if (versionModel != null) {
                    AdapterVersion adapterVersion = new AdapterVersion(versionModel.getVersion());
@ -196,7 +196,7 @@ public class CrawlerService {
                    //根据id字符串获取编排数据集
                    if (obj.has("dataSets")) {
                        List<Integer> newDatasetIdList = new ArrayList<>();
                        String dataSetStr = obj.get("dataSets").toString();
                        String dataSetStr = obj.get("dataSets").asText();
                        if (StringUtils.isNotBlank(dataSetStr)) {
                            String[] IdList = dataSetStr.split(",");
                            for (String aIdList : IdList) {
@ -220,8 +220,8 @@ public class CrawlerService {
                    }
                    //如果保存传入编排映射关系,进行保存操作
                    if (obj.has("relation") && !Objects.equals(obj.get("relation").toString(), "")) {
                        saveDataSetRelation(versionId, obj.get("relation").toString());
                    if (obj.has("relation") && !Objects.equals(obj.get("relation").asText(), "")) {
                        saveDataSetRelation(versionId, obj.get("relation").asText());
                    }
                }
            }

+ 0 - 381
src/main/java/com/yihu/hos/crawler/service/DataCollectDispatcher.java

@ -1,381 +0,0 @@
package com.yihu.hos.crawler.service;
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.node.ArrayNode;
import com.fasterxml.jackson.databind.node.ObjectNode;
import com.yihu.hos.core.datatype.DateUtil;
import com.yihu.hos.core.datatype.StringUtil;
import com.yihu.hos.core.file.FileUtil;
import com.yihu.hos.core.log.Logger;
import com.yihu.hos.core.log.LoggerFactory;
import com.yihu.hos.crawler.format.DataSetTransformer;
import com.yihu.hos.crawler.model.adapter.AdapterDataSet;
import com.yihu.hos.crawler.model.adapter.AdapterMetaData;
import com.yihu.hos.crawler.model.config.SysConfig;
import com.yihu.hos.crawler.model.patient.Patient;
import com.yihu.hos.crawler.model.patient.PatientIdentity;
import com.yihu.hos.crawler.model.transform.EhrCondition;
import com.yihu.hos.crawler.model.transform.LogicValues;
import com.yihu.hos.standard.model.adapter.AdapterMetadataModel;
import com.yihu.hos.web.framework.constrant.SqlConstants;
import java.io.IOException;
import java.text.SimpleDateFormat;
import java.util.*;
public class DataCollectDispatcher {
    private static DataCollectDispatcher ourInstance = new DataCollectDispatcher();
    private static Logger logger = LoggerFactory.getLogger(DataCollectDispatcher.class);
    private String token;
    private DataCollectDispatcher() {
    }
    public static DataCollectDispatcher getInstance() {
        return ourInstance;
    }
    public void finalize() throws Throwable {
    }
    public Boolean getToken() {
        try {
            token = EsbHttp.getToken();
            if (StringUtil.isEmpty(token)) {
                return false;
            }
            return true;
        } catch (Exception e) {
            logger.error("本次任务执行失败,获取token失败!");
            return false;
        }
    }
    public Boolean getRemoteVersion(String orgCode) {
        try {
            if (StringUtil.isEmpty(SysConfig.getInstance().getVersionMap().get(orgCode))) {
                String stdVersion = EsbHttp.getRemoteVersion(orgCode);
                if (StringUtil.isEmpty(stdVersion)) {
                    return false;
                }
                SysConfig.getInstance().getVersionMap().put(orgCode, stdVersion);
            }
            return true;
        } catch (Exception e) {
            logger.error("本次任务执行失败,获取token失败!");
            return false;
        }
    }
    /**
     * 获取病人列表
     *
     * @param condition
     * @return List<PatientT>
     */
    public List<Patient> getPatientList(Map<String, Object> condition, Map<String, AdapterDataSet> adapterDataSetMap) {
        ArrayList<Patient> patientList = new ArrayList<>();
        SimpleDateFormat df = new SimpleDateFormat(DateUtil.DEFAULT_YMDHMSDATE_FORMAT);
        for (String key : adapterDataSetMap.keySet()) {
            PatientIdentity patientIdentity = SysConfig.getInstance().getPatientIdentity(key);
            if (patientIdentity == null) {
                continue;
            }
            try {
                //获取病人列表字段检验
                Map<String, String> propertyMap = getItemList(adapterDataSetMap.get(key));
                if (propertyMap == null) {
                    return patientList;
                }
                //请求参数
                Date beginDate = (Date) condition.get("beginDate");
                String beginTime = df.format(beginDate);
                Date endDate = (Date) condition.get("endDate");
                String endTime = df.format(endDate);
                List<EhrCondition> queryParams = new ArrayList<>();
                queryParams.add(new EhrCondition(" > ", patientIdentity.getRefTimeCode(), beginTime));
                queryParams.add(new EhrCondition(" < ", patientIdentity.getRefTimeCode(), endTime));
                //Rest 接口请求
                String rootStr = EsbHttp.getPatientList(adapterDataSetMap.get(key), queryParams);
                if (StringUtil.isEmpty(rootStr)) {
                    return null;
                }
                ObjectMapper mapper = new ObjectMapper();
                JsonNode resultNode = mapper.readValue(rootStr, JsonNode.class);
                JsonNode patientNode = resultNode.path("detailModelList");
                boolean isArr = patientNode.isArray();
                if (isArr) {
                    Iterator<JsonNode> array = patientNode.iterator();
                    while (array.hasNext()) {
                        JsonNode node = array.next();
                        Patient patient = new Patient();
                        String patientId = node.path(propertyMap.get(SqlConstants.PATIENT_ID)).asText();
                        String eventNo = node.path(propertyMap.get(SqlConstants.EVENT_NO)).asText();
                        String refTime = node.path(propertyMap.get(SqlConstants.EVENT_TIME)).asText();
                        String orgCode = node.path(SqlConstants.ORG_CODE.toUpperCase()).asText();
                        patient.setPatientId(patientId);
                        patient.setEventNo(eventNo);
                        patient.setReferenceTime(refTime);
                        patient.setOrgCode(orgCode);
                        patientList.add(patient);
                    }
                }
            } catch (Exception e) {
                logger.error("采集病人失败", e);
            }
        }
        return patientList;
    }
    public Map<String, String> getItemList(AdapterDataSet adapterDataSet) throws Exception {
        Map<String, String> propertyMap = new HashMap<>();
        PatientIdentity patientIdentity = SysConfig.getInstance().getPatientIdentity(adapterDataSet.getAdapterDataSetT().getStdDatasetCode());
        if (adapterDataSet.isHavePatientID()) {
            AdapterMetaData adapterMetaData = adapterDataSet.getAdapterMetaDataMap().get(PatientIdentity.getPatientIDCode());
            propertyMap.put(SqlConstants.PATIENT_ID, adapterMetaData.getAdapterMetadataModel().getStdMetadataCode().toUpperCase());
        } else {
            logger.error("", new Exception("采集病人列表数据集必须有patient_id."));
            return null;
        }
        if (adapterDataSet.isHaveEventNo()) {
            AdapterMetaData adapterMetaData = adapterDataSet.getAdapterMetaDataMap().get(patientIdentity.getEventNoCode());
            propertyMap.put(SqlConstants.EVENT_NO, adapterMetaData.getAdapterMetadataModel().getStdMetadataCode().toUpperCase());
        } else {
            logger.error("采集病人列表数据集必须有event_no.");
            return null;
        }
        AdapterMetaData adapterRefMetaData = adapterDataSet.getAdapterMetaDataMap().get(patientIdentity.getRefTimeCode());
        if (adapterRefMetaData == null) {
            logger.error("采集病人列表数据集必须有采集时间.");
            return null;
        }
        propertyMap.put(SqlConstants.EVENT_TIME, adapterRefMetaData.getAdapterMetadataModel().getStdMetadataCode().toUpperCase());
        return propertyMap;
    }
    public String fecthData(Patient patient, AdapterDataSet adapterDataSet, List<EhrCondition> queryParams) {
        try {
            ObjectMapper mapper = new ObjectMapper();
            ObjectNode paramsNode = mapper.createObjectNode();
            paramsNode.put("tableCode", adapterDataSet.getAdapterDataSetT().getStdDatasetCode());
            paramsNode.put("condition", mapper.writeValueAsString(queryParams));
            Map<String, String> formParams = new HashMap<>();
//            formParams.add(new BasicNameValuePair("secret", secret));
            formParams.put("api", "collectionData");
            formParams.put("param", mapper.writeValueAsString(paramsNode));
            //调用资源服务网关
            String rootStr = EsbHttp.getFecthData(formParams);
            JsonNode resultNode = mapper.readValue(rootStr, JsonNode.class);
            JsonNode result = resultNode.path("detailModelList");
            JsonNode data = matchAdapterData(result, adapterDataSet);
            ObjectNode jsonObject = mapper.createObjectNode();
            if (data != null && data.size() > 0) {
                jsonObject.set("data", data);
                jsonObject.put("code", adapterDataSet.getAdapterDataSetT().getStdDatasetCode());
                jsonObject.put("patient_id", patient.getPatientId());
                jsonObject.put("event_no", patient.getEventNo());
                String agencyCode = patient.getOrgCode();
                jsonObject.put("org_code", agencyCode);
                jsonObject.put("inner_version", SysConfig.getInstance().getVersionMap().get(patient.getOrgCode()));
                jsonObject.put("create_date", DateUtil.toString(new Date(), DateUtil.DEFAULT_YMDHMSDATE_FORMAT));
                jsonObject.put("event_time", patient.getReferenceTime());//DateUtil.toString(patient.getReferenceTime(), DateUtil.DEFAULT_YMDHMSDATE_FORMAT)
                if (StringUtil.isEmpty(patient.getReUploadFlg())) {
                    jsonObject.put("reUploadFlg", LogicValues.LOGIC_FALSE);
                } else {
                    jsonObject.put("reUploadFlg", patient.getReUploadFlg());
                }
                return jsonObject.toString();
            } else {
                return SqlConstants.EMPTY;
            }
        } catch (Exception e) {
            return SqlConstants.EMPTY;
        }
    }
    public String fecthData(Patient patient, AdapterDataSet adapterDataSet, Map<String, String> relationValueMap) {
        List<EhrCondition> queryParams = new ArrayList<>();
        for (String key : relationValueMap.keySet()) {
            queryParams.add(new EhrCondition(" = ", key, relationValueMap.get(key)));
        }
        return fecthData(patient, adapterDataSet, queryParams);
    }
    /**
     * 根据编排任务进行采集
     *
     * @param patient
     * @param adapterDataSet
     * @return
     */
    public JsonNode fecthData(Patient patient, AdapterDataSet adapterDataSet) {
        try {
            ObjectMapper mapper = new ObjectMapper();
            List<EhrCondition> queryParams = new ArrayList<>();
            boolean patientId = true;
            if (adapterDataSet.isHavePatientID()) {
                AdapterMetaData adapterMetaData = adapterDataSet.getAdapterMetaDataMap().get(PatientIdentity.getPatientIDCode());
                queryParams.add(new EhrCondition(" = ", adapterMetaData.getAdapterMetadataModel().getStdMetadataCode(), patient.getPatientId()));
            } else {
                patientId = false;
            }
            boolean eventNo = true;
            if (adapterDataSet.isHaveEventNo()) {
                AdapterMetaData adapterMetaData = adapterDataSet.getAdapterMetaDataMap().get(adapterDataSet.getEventNoCode());
                queryParams.add(new EhrCondition(" = ", adapterMetaData.getAdapterMetadataModel().getStdMetadataCode(), patient.getEventNo()));
            } else {
                eventNo = false;
            }
            if (!patientId && !eventNo) {
                logger.error("采集病人数据集至少需要一项病人标识.数据集名:" + adapterDataSet.getAdapterDataSetT().getStdDatasetCode());
                return null;
            }
            ObjectNode paramsNode = mapper.createObjectNode();
            paramsNode.put("tableCode", adapterDataSet.getAdapterDataSetT().getStdDatasetCode());
            paramsNode.put("condition", mapper.writeValueAsString(queryParams));
            Map<String, String> formParams = new HashMap<>();
//            formParams.add(new BasicNameValuePair("secret", secret));
            formParams.put("api", "collectionData");
            formParams.put("param", mapper.writeValueAsString(paramsNode));
            //调用资源服务网关
            String rootStr = EsbHttp.getFecthData(formParams);
            JsonNode resultNode = mapper.readValue(rootStr, JsonNode.class);
            JsonNode result = resultNode.path("detailModelList");
            JsonNode data = matchAdapterData(result, adapterDataSet);
            ObjectNode jsonObject = mapper.createObjectNode();
            if (data != null && data.size() > 0) {
                jsonObject.set("data", data);
                jsonObject.put("code", adapterDataSet.getAdapterDataSetT().getStdDatasetCode());
                jsonObject.put("patient_id", patient.getPatientId());
                jsonObject.put("event_no", patient.getEventNo());
                String agencyCode = patient.getOrgCode();
                jsonObject.put("org_code", agencyCode);
                jsonObject.put("inner_version", SysConfig.getInstance().getVersionMap().get(patient.getOrgCode()));
                jsonObject.put("create_date", DateUtil.toString(new Date(), DateUtil.DEFAULT_YMDHMSDATE_FORMAT));
                jsonObject.put("event_time", patient.getReferenceTime());//DateUtil.toString(patient.getReferenceTime(), DateUtil.DEFAULT_YMDHMSDATE_FORMAT)
                if (StringUtil.isEmpty(patient.getReUploadFlg())) {
                    jsonObject.put("reUploadFlg", LogicValues.LOGIC_FALSE);
                } else {
                    jsonObject.put("reUploadFlg", patient.getReUploadFlg());
                }
                return jsonObject;
            } else {
                return null;
            }
        } catch (Exception e) {
            logger.error("", e);
        }
        return null;
    }
    public JsonNode matchAdapterData(JsonNode data, AdapterDataSet adapterDataSet) {
        ObjectMapper mapper = new ObjectMapper();
        ArrayNode result = mapper.createArrayNode();
        Iterator<JsonNode> array = data.iterator();
        while (array.hasNext()) {
            JsonNode dataNode = array.next();
            ObjectNode jsonNode = mapper.createObjectNode();
            for (AdapterMetaData adapterMetaData : adapterDataSet.getAdapterMetaDataList()) {
                AdapterMetadataModel adapterMetadataModel = adapterMetaData.getAdapterMetadataModel();
                String orgMetaDataCode = adapterMetadataModel.getAdapterMetadataCode();
                String stdMetaDataCode = adapterMetadataModel.getStdMetadataCode();
                if (!StringUtil.isEmpty(orgMetaDataCode)) {
                    jsonNode.put(orgMetaDataCode, dataNode.path(stdMetaDataCode).asText());
                }
            }
            result.add(jsonNode);
        }
        return result;
    }
    public Boolean register(Patient patient, String data) {
        return EsbHttp.register(patient, data, token);
    }
    public Boolean upload(Map<String, JsonNode> dataMap, Patient patient, Map<String, AdapterDataSet> dataSetMap) {
        Boolean result = true;
        try {
            DataSetTransformer dataTransformer = new DataSetTransformer();
            for (String key : dataMap.keySet()) {
                dataTransformer.setData(dataMap.get(key));
                if (!toFile(dataTransformer, patient, "origin")) {
                    logger.info("存储原始文件失败:patient_id=" + patient.getPatientId()
                            + "event_no=" + patient.getEventNo());
                    result = false;
                    break;
                }
                dataTransformer.transfer(dataSetMap);
                if (!toFile(dataTransformer, patient, "standard")) {
                    logger.info("存储标准文件失败:patient_id=" + patient.getPatientId()
                            + "event_no=" + patient.getEventNo());
                    result = false;
                    break;
                }
            }
            PatientCDAUpload patientCDAUpload = new PatientCDAUpload();
            if (!patientCDAUpload.upload(patient, token)) {
                result = false;
            }
        } catch (Exception e) {
            result = false;
        }
        return result;
    }
    public boolean toFile(DataSetTransformer dataTransformer, Patient patient, String fileName) {
        JsonNode jsonObject = dataTransformer.getJsonObject();
        PatientCDAIndex patientCDAIndex = new PatientCDAIndex(patient);
        String filePath = patientCDAIndex.createDataIndex(fileName, PatientCDAIndex.FileType.JSON);
        boolean writeFile = false;
        try {
            writeFile = FileUtil.writeFile(filePath, jsonObject.toString(), "UTF-8");
        } catch (IOException e) {
            logger.info("存储临时文件失败.");
            logger.error("", e);
        }
        return writeFile;
    }
    /**
     * 解析token内容
     *
     * @param responToken
     * @return
     */
    public Map<String, Object> parseToken(String responToken) {
        ObjectMapper mapper = new ObjectMapper();
        Map<String, Object> tokenMap = null;
        try {
            Map<String, Object> map = mapper.readValue(responToken, Map.class);
            String code = (String) map.get("code");
            if (SqlConstants.OK.equals(code)) {
                tokenMap = (Map<String, Object>) map.get("result");
            }
        } catch (IOException e) {
            e.printStackTrace();
        }
        return tokenMap;
    }
}//end DataCollectDispatcher

+ 33 - 8
src/main/java/com/yihu/hos/crawler/service/EsbHttp.java

@ -15,24 +15,49 @@ import com.yihu.hos.crawler.model.patient.Patient;
import com.yihu.hos.crawler.model.transform.EhrCondition;
import com.yihu.hos.web.framework.constrant.SqlConstants;
import org.json.JSONObject;
import org.springframework.core.io.ClassPathResource;
import org.springframework.core.io.Resource;
import org.springframework.core.io.support.EncodedResource;
import org.springframework.core.io.support.PropertiesLoaderUtils;
import sun.misc.BASE64Encoder;
import java.io.File;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.UUID;
import java.util.*;
/**
 * Created by hzp on 2016/3/10.
 */
public class EsbHttp {
    public static String defaultHttpUrl;
    public static String clientId;
    public static String clientKey;
    public static String httpGateway;
    public static String defaultHttpUser;
    public static String defaultHttpPassword;
    public static String sslKeyStore;
    public static String sslPassword;
    private static Logger logger = LoggerFactory.getLogger(EsbHttp.class);
    public static String defaultHttpUrl = "";
    public static String clientId = "";
    public static String clientKey = "";
    public static String httpGateway = "";
    static {
        //默认配置
        try {
            Resource resource = new ClassPathResource("config/http.properties");
            EncodedResource encRes = new EncodedResource(resource, "UTF-8");
            Properties props = PropertiesLoaderUtils.loadProperties(encRes);
            defaultHttpUrl = props.getProperty("httpUrl");
            defaultHttpUser = props.getProperty("httpUser");
            defaultHttpPassword = props.getProperty("httpPassword");
            clientId = props.getProperty("clientId");
            clientKey = props.getProperty("clientKey");
            sslKeyStore = props.getProperty("sslKeystore");
            sslPassword = props.getProperty("sslPassword");
        } catch (Exception e) {
            System.out.print(e.getMessage());
        }
    }
    /***************************** 用户接口 *********************************************/
    /**

+ 0 - 148
src/main/java/com/yihu/hos/crawler/service/OldPatientCDAUpload.java

@ -1,148 +0,0 @@
package com.yihu.hos.crawler.service;
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.yihu.hos.core.compress.Zipper;
import com.yihu.hos.core.encrypt.MD5;
import com.yihu.hos.core.encrypt.RSA;
import com.yihu.hos.core.file.FileUtil;
import com.yihu.hos.core.http.HTTPResponse;
import com.yihu.hos.core.http.HttpClientKit;
import com.yihu.hos.core.log.Logger;
import com.yihu.hos.core.log.LoggerFactory;
import com.yihu.hos.crawler.model.config.SysConfig;
import com.yihu.hos.crawler.model.patient.Patient;
import java.io.File;
import java.security.Key;
import java.util.HashMap;
import java.util.Map;
import java.util.UUID;
/**
 * 档案上传
 *
 * @author Air
 * @version 1.0
 * @created 2015.07.06 15:58
 */
public class OldPatientCDAUpload {
    public static String uploadMethod;
    private static Logger logger = LoggerFactory.getLogger(OldPatientCDAUpload.class);
    /**
     * @param patient
     * @return
     * @modify 2015.09.15 airhead 修订删除目录
     * @modify 2015.09.19 airhead 修复无文档问题及错误信息
     */
    public boolean upload(Patient patient, String token) {
        ZipFile zipFile = zip(patient);
        try {
            if (zipFile == null || zipFile.file == null) {
                logger.info("压缩病人档案失败,病人文档未生成,patient_id:" + patient.getPatientId() + ",event_no:" + patient.getEventNo());
                return false;
            }
            boolean result = upload(patient, zipFile, token);
            if (!result) {
                logger.info("上传病人档案失败,patient_id:" + patient.getPatientId() + ",event_no:" + patient.getEventNo());
                FileUtil.deleteDirectory(new File(zipFile.directory));
                return false;
            }
            result = FileUtil.deleteDirectory(new File(zipFile.directory));
            if (!result) {
                logger.info("删除临时文件失败,patient_id:" + patient.getPatientId() + ",event_no:" + patient.getEventNo());
            }
        } catch (Exception e) {
            FileUtil.deleteDirectory(new File(zipFile.directory));
        }
        return true;
    }
    /**
     * @param patient
     * @return
     * @modify 从data目录生成zip数据
     */
    public ZipFile zip(Patient patient) {
        try {
            PatientCDAIndex patientCDAIndex = new PatientCDAIndex(patient);
            String dataDirectory = patientCDAIndex.getDataDirectory();
            String filePath = patientCDAIndex.createIndex(PatientCDAIndex.IndexType.ZIP, PatientCDAIndex.FileType.ZIP);
            UUID uuidPwd = UUID.randomUUID();
            String pwd = uuidPwd.toString();
            Key key = RSA.genPublicKey(SysConfig.getInstance().getPublicKeyMap().get(patient.getOrgCode()));
            if (key == null) {
                logger.info("压缩文件错误,无公钥信息.");
                FileUtil.deleteDirectory(new File(patientCDAIndex.getDirectory()));
                return null;
            }
            ZipFile zipFile = new ZipFile();
            zipFile.encryptPwd = RSA.encrypt(pwd, key);
            Zipper zipper = new Zipper();
            zipFile.file = zipper.zipFile(new File(dataDirectory), filePath, pwd);
            zipFile.dataDirectory = dataDirectory;
            zipFile.directory = patientCDAIndex.getDirectory();
            return zipFile;
        } catch (Exception e) {
            e.printStackTrace();
            logger.info("从data目录生成zip数据时,压缩文件异常", e);
        }
        return null;
    }
    private boolean upload(Patient patient, ZipFile zipFile, String token) {
        try {
            String uploadMethod = EsbHttp.defaultHttpUrl + "/packages";
            String fileMd5 = MD5.getMd5ByFile(zipFile.file);
            Map<String, String> formParams = new HashMap<>();
            formParams.put("md5", fileMd5);
            formParams.put("package_crypto", zipFile.encryptPwd);
            formParams.put("org_code", patient.getOrgCode());
            formParams.put("token", token);
            Map<String, String> header = new HashMap<>();
            header.put("Authorization", "Basic " + EsbHttp.clientKey);
            HTTPResponse response = HttpClientKit.postFile(uploadMethod, zipFile.file.getAbsolutePath(), formParams, header);
            if (response == null) {
                logger.info("上传病人档案请求失败,patient_id:" + patient.getPatientId() + ",event_no:" + patient.getEventNo());
                return false;
            }
            if (response.getStatusCode() != 200) {
                logger.info("上传病人档案请求失败,错误代码:" + response.getStatusCode() + ",patient_id:" + patient.getPatientId() + ",event_no:" + patient.getEventNo());
                return false;
            }
            ObjectMapper mapper = new ObjectMapper();
            JsonNode rootNode = mapper.readValue(response.getBody(), JsonNode.class);
            JsonNode codeNode = rootNode.get("code");
            String result = codeNode.asText();
            if (!result.equals("0")) {
                logger.info("上传病人档案失败,错误代码:" + result + ",patient_id:" + patient.getPatientId() + ",event_no:" + patient.getEventNo());
                return false;
            } else {
                logger.info("上传病人档案成功,patient_id:" + patient.getPatientId() + ",event_no:" + patient.getEventNo());
                return true;
            }
        } catch (Exception e) {
            e.printStackTrace();
            logger.info("上传病人档案异常,patient_id:" + patient.getPatientId() + ",event_no:" + patient.getEventNo());
            return false;
        }
    }
    private class ZipFile {
        public File file;
        public String encryptPwd;
        public String directory;
        public String dataDirectory;
    }
}

+ 0 - 83
src/main/java/com/yihu/hos/crawler/service/PatientCDAIndex.java

@ -1,83 +0,0 @@
package com.yihu.hos.crawler.service;
import com.yihu.hos.crawler.model.config.SysConfig;
import com.yihu.hos.crawler.model.patient.Patient;
import java.util.UUID;
/**
 * 病人文件索引类,用于生成文件路径,不确保文件路径存在
 *
 * @author Air
 * @version 1.0
 * @created 2015.07.01 18:06
 */
public class PatientCDAIndex {
    private Patient patient;
    public PatientCDAIndex(Patient patient) {
        this.patient = patient;
    }
    public String getDirectory() {
        String dir = SysConfig.getInstance().getTempFile();
        return dir + "/" + patient.getOrgCode() + "/" + patient.getOrgCode() + "-" + patient.getPatientId() + "-" + patient.getEventNo();
    }
    /**
     * 生成病人档案目录
     * orgCode-pateintId-eventNo/data
     *
     * @return
     */
    public String getDataDirectory() {
        return getDirectory() + "/" + IndexType.DATA;
    }
    public String createIndex(String indexType, String fileType) {
        UUID uuid = UUID.randomUUID();
        String index = uuid.toString();
        String dir = getDirectory() + "/" + indexType;
        return dir + "/" + index + fileType;
    }
    public String createDataSetIndex(String indexType, String fileType) {
        UUID uuid = UUID.randomUUID();
        String index = "dataset_index";
        String dir = getDirectory() + "/" + IndexType.DATA + "/" +indexType;
        return dir + "/" + index + fileType;
    }
    /**
     * 生成最终病人档案目录
     * data/cda
     * data/origin
     * data/standard
     *
     * @param indexType
     * @param fileType
     * @return
     */
    public String createDataIndex(String indexType, String fileType) {
        return createIndex(IndexType.DATA + "/" + indexType, fileType);
    }
    public class FileType {
        public final static String XML = ".xml";
        public final static String JSON = ".json";
        public final static String ZIP = ".zip";
    }
    public class IndexType {
        public final static String DATA = "data";   //病人档案数据目录
        public final static String CDA = "cda";     //病人cda档案目录
        public final static String STANDARD = "standard";   //病人标准档案目录
        public final static String ORIGIN = "origin";   //病人原始档案目录
        public final static String ZIP = "zip";         //病人压缩包目录
        public final static String DOCUMENT = "document";
    }
}

+ 0 - 113
src/main/java/com/yihu/hos/crawler/service/PatientCDAUpload.java

@ -1,113 +0,0 @@
package com.yihu.hos.crawler.service;
import com.yihu.hos.core.compress.Zipper;
import com.yihu.hos.core.encrypt.RSA;
import com.yihu.hos.core.file.FileUtil;
import com.yihu.hos.core.log.Logger;
import com.yihu.hos.core.log.LoggerFactory;
import com.yihu.hos.crawler.model.config.SysConfig;
import com.yihu.hos.crawler.model.patient.Patient;
import java.io.File;
import java.security.Key;
import java.util.UUID;
/**
 * 档案上传
 *
 * @author Air
 * @version 1.0
 * @created 2015.07.06 15:58
 */
public class PatientCDAUpload {
    private static Logger logger = LoggerFactory.getLogger(PatientCDAUpload.class);
    public static String uploadMethod;
    /**
     * @param patient
     * @return
     * @modify 2015.09.15 airhead 修订删除目录
     * @modify 2015.09.19 airhead 修复无文档问题及错误信息
     */
    public Boolean upload(Patient patient, String token) {
        ZipFile zipFile = zip(patient);
        try {
            if (zipFile == null || zipFile.file == null) {
                logger.info("压缩病人档案失败,病人文档未生成,patient_id:" + patient.getPatientId() + ",event_no:" + patient.getEventNo());
                return false;
            }
            boolean result = upload(patient, zipFile, token);
            if (!result) {
                logger.info("上传病人档案失败,patient_id:" + patient.getPatientId() + ",event_no:" + patient.getEventNo());
                return result;
            }
            logger.trace(zipFile.directory);
            result = FileUtil.deleteDirectory(new File(zipFile.directory));
            if (!result) {
                logger.info("删除临时文件失败,patient_id:" + patient.getPatientId() + ",event_no:" + patient.getEventNo());
                return result;
            }
        } catch (Exception e) {
            FileUtil.deleteDirectory(new File(zipFile.directory));
            return false;
        }
        return true;
    }
    /**
     * @param patient
     * @return
     * @modify 从data目录生成zip数据
     */
    public ZipFile zip(Patient patient) {
        try {
            PatientCDAIndex patientCDAIndex = new PatientCDAIndex(patient);
            String dataDirectory = patientCDAIndex.getDataDirectory();
            String filePath = patientCDAIndex.createIndex(PatientCDAIndex.IndexType.ZIP, PatientCDAIndex.FileType.ZIP);
            UUID uuidPwd = UUID.randomUUID();
            String pwd = uuidPwd.toString();
            String publicKey = SysConfig.getInstance().getPublicKeyMap().get(patient.getOrgCode());
            if(publicKey== null ||  publicKey.length() == 0) {
                publicKey = EsbHttp.getPublicKey(patient.getOrgCode());
                SysConfig.getInstance().getPublicKeyMap().put(patient.getOrgCode(), publicKey);
            }
            Key key = RSA.genPublicKey(publicKey);
            if (key == null) {
                logger.info("压缩文件错误,获取公钥错误.");
                return null;
            }
            ZipFile zipFile = new ZipFile();
            zipFile.encryptPwd = RSA.encrypt(pwd, key);
            Zipper zipper = new Zipper();
            zipFile.file = zipper.zipFileForAll(new File(dataDirectory), filePath, pwd);
            zipFile.dataDirectory = dataDirectory;
            zipFile.directory = patientCDAIndex.getDirectory();
            return zipFile;
        } catch (Exception e) {
            logger.error("从data目录生成zip数据时,压缩文件异常");
            logger.error(e.getCause().toString());
        }
        return null;
    }
    private boolean upload(Patient patient, ZipFile zipFile, String token) {
        return EsbHttp.upload(patient, zipFile.file, zipFile.encryptPwd, token);
    }
    private class ZipFile {
        public File file;
        public String encryptPwd;
        public String directory;
        public String dataDirectory;
    }
}

+ 0 - 37
src/main/java/com/yihu/hos/crawler/storage/DataSetStorage.java

@ -1,37 +0,0 @@
package com.yihu.hos.crawler.storage;
import com.yihu.hos.crawler.format.IDataTransformer;
import com.yihu.hos.crawler.model.patient.Patient;
import com.yihu.hos.crawler.format.AdapterScheme;
/**
 * Created by Administrator on 2015/10/15.
 */
public class DataSetStorage extends MongodbStorage {
    public static final String KEY_CODE = "code";
    public DataSetStorage(AdapterScheme adapterScheme, String dbName) {
        super(adapterScheme, dbName);
    }
    @Override
    public boolean save(IDataTransformer dataTransformer) {
        return true;
    }
    /**
     * @param patient
     * @return
     * @modify 将档案生成到到data目录
     */
    @Override
    public boolean toFile(Patient patient) {
        return true;
    }
    @Override
    public String getKey(){
        return KEY_CODE;
    }
}

+ 0 - 231
src/main/java/com/yihu/hos/crawler/storage/DocumentStorage.java

@ -1,231 +0,0 @@
package com.yihu.hos.crawler.storage;
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.node.ArrayNode;
import com.fasterxml.jackson.databind.node.ObjectNode;
import com.mongodb.client.FindIterable;
import com.mongodb.client.MongoCollection;
import com.mongodb.client.MongoCursor;
import com.mongodb.client.model.UpdateOptions;
import com.yihu.hos.common.mongo.MongoDB;
import com.yihu.hos.core.datatype.DateUtil;
import com.yihu.hos.core.encode.Base64;
import com.yihu.hos.core.file.FileUtil;
import com.yihu.hos.crawler.format.AdapterScheme;
import com.yihu.hos.crawler.format.DocumentTransformer;
import com.yihu.hos.crawler.format.IDataTransformer;
import com.yihu.hos.crawler.model.patient.Patient;
import com.yihu.hos.crawler.service.EsbHttp;
import com.yihu.hos.crawler.service.PatientCDAIndex;
import org.bson.Document;
import java.io.IOException;
import java.text.SimpleDateFormat;
import java.util.Date;
import java.util.Iterator;
import static com.mongodb.client.model.Filters.and;
import static com.mongodb.client.model.Filters.eq;
import static com.mongodb.client.model.Projections.excludeId;
//import com.yihu.common.util.log.DebugLogger;
/**
 * Created by Administrator on 2015/10/15.
 */
public class DocumentStorage extends MongodbStorage {
        public static final String KEY_CODE = "catalog";
    public DocumentStorage(AdapterScheme adapterScheme, String dbName) {
        super(adapterScheme, dbName);
    }
    @Override
    public boolean save(IDataTransformer dataTransformer) {
        DocumentTransformer documentTransformer = (DocumentTransformer) dataTransformer;
        JsonNode jsonObject = documentTransformer.getJsonObject();
        if (jsonObject != null) {
            String dataSetCode = jsonObject.get(getKey()).asText();
            String documentId = jsonObject.path(getKey()).asText();
            String patientId = jsonObject.get(PATIENT_ID).asText();
            String eventNo = jsonObject.get(EVENT_NO).asText();
            try {
                MongoCollection<Document> collection = MongoDB.db(dbName).getCollection(dataSetCode);
                createIndex(collection);    //创建索引
                Document filter = new Document();
                filter.append(PATIENT_ID, patientId);
                filter.append(EVENT_NO, eventNo);
                collection.deleteMany(filter);
                UpdateOptions updateOptions = new UpdateOptions();
                updateOptions.upsert(true);
                collection.replaceOne(filter, Document.parse(jsonObject.toString()), updateOptions);
                String url = createUrl(dataSetCode, patientId, eventNo);
                Date expiryDate = DateUtil.setDateTime(new Date(), getExpireDays().intValue());
                SimpleDateFormat sdf = new SimpleDateFormat(DateUtil.DEFAULT_YMDHMSDATE_FORMAT);
                String date = sdf.format(expiryDate);
                Document updateDoc = new Document(CREATE_AT, new Date());
                updateDoc.put("resource.url", url);
                updateDoc.put("resource.expiry_date", date);
                collection.updateMany(filter, new Document("$set", updateDoc));
            } catch (Exception e) {
                //DebugLogger.fatal("保存病人档案信息至MongoDB异常:", e);
                return false;
            }
            return true;
        }
        return false;
    }
    /**
     * @param patient
     * @return
     * @modify 将档案生成到到data目录
     */
    @Override
    public boolean toFile(Patient patient) {
        boolean result = true;
        PatientCDAIndex patientCDAIndex = new PatientCDAIndex(patient);
        ObjectMapper mapper = new ObjectMapper();
        ArrayNode arrayNode=mapper.createArrayNode();
        ObjectNode resultNode=mapper.createObjectNode();
        try {
            for (String name : MongoDB.db(dbName).listCollectionNames()) {
                MongoCollection<Document> collection = MongoDB.db(dbName).getCollection(name);
                FindIterable<Document> documents = collection.find(and(eq("patient_id", patient.getPatientId()), eq(EVENT_NO, patient.getEventNo()))).projection(excludeId());
                try (MongoCursor<Document> cursor = documents.iterator()) {
                    while (cursor.hasNext()) {
                        try {
                            String document = cursor.next().toJson();
                            ObjectNode rootNode = mapper.readValue(document, ObjectNode.class);
                            JsonNode jsonNode = rootNode.get("data");
                            boolean array = jsonNode.isArray();
                            if (!array) {
                                continue;
                            }
                            arrayNode=genunStructureData(jsonNode,patientCDAIndex);
                        } catch (IOException e) {
                            e.printStackTrace();
                            //DebugLogger.fatal("存储临时文件失败.");
                            result = false;
                        }
                    }
                } catch (Exception e) {
                    //DebugLogger.fatal("", e);
                    result = false;
                }
            }
            String innerVersion =  EsbHttp.getRemoteVersion(patient.getOrgCode());
            for (int i = 0; i != arrayNode.size(); ++i) {
                JsonNode keyWordsNode = arrayNode.get(i).path("key_words");
                ObjectNode newNode=mapper.createObjectNode();
                JsonNode jsonNode= transformKeyWords(keyWordsNode, newNode);
                ((ObjectNode) arrayNode.get(i)).set("key_words", jsonNode);
            }
            resultNode.set("data", arrayNode);
            resultNode.put("patient_id", patient.getPatientId());
            resultNode.put("event_no",patient.getEventNo());
            resultNode.put("org_code",patient.getOrgCode());
            resultNode.put("event_time",patient.getReferenceTime());
            resultNode.put("inner_version",innerVersion);
            String indexPath = patientCDAIndex.getDataDirectory()+"/"+"meta.json";
            boolean writeFile = FileUtil.writeFile(indexPath, mapper.writeValueAsString(resultNode), "UTF-8");
        } catch (Exception e) {
            //DebugLogger.fatal("", e);
            result = false;
        }
        return result;
    }
    @Override
    public String getDataSet(Patient patient, String dataSetCode) {
        return null;
    }
    @Override
    public String getKey(){
        return KEY_CODE;
    }
    /**
     * 生成非结构化 meta.json文件数据
     * @param jsonNode
     * @param patientCDAIndex
     * @return
     * @throws IOException
     */
    public ArrayNode genunStructureData(JsonNode jsonNode,PatientCDAIndex patientCDAIndex) throws IOException {
        ObjectMapper mapper=new ObjectMapper();
        ArrayNode arrayNode=mapper.createArrayNode();
        for (int i = 0; i != jsonNode.size(); ++i) {
            JsonNode documentNode = jsonNode.get(i);
            JsonNode contentNode=documentNode.path("content");
            if (contentNode.isArray()){
                for (int j = 0; j< contentNode.size(); j++) {
                    JsonNode fileArr = contentNode.get(j);
//                    String mimeType = fileArr.path("mime_type").asText();//文件类型
                    String names = fileArr.path("name").asText();
                    String fileType=names.substring(names.lastIndexOf("."));//文件后缀
                    JsonNode file=fileArr.path("file_content");//文件内容
                    Iterator<String> fileNames = file.fieldNames();
                    StringBuilder stringBuilder=new StringBuilder();
                    while (fileNames.hasNext()){
                        String key=fileNames.next();
                        String content =file.path(key).asText();
                        String filePath = patientCDAIndex.createDataIndex(dbName, fileType);
                        String fileName = filePath.substring(filePath.lastIndexOf("/")+1);
                        byte[]  fileContent = Base64.decode(content);
                        boolean writeFile = FileUtil.writeFile(filePath, fileContent, "UTF-8");
                        if (!writeFile) {
                            //DebugLogger.fatal("存储临时文件失败.");
                        } else {
                            stringBuilder.append(fileName).append(",");
                        }
                    }
                    if (file.isObject()) {
                        ((ObjectNode) fileArr).put("name", stringBuilder.toString());
                        ((ObjectNode) fileArr).remove("file_content");
                    }
                }
            }
            arrayNode.add(documentNode);
        }
        return arrayNode;
    }
    /**
     * 将key_words中key中包含的“-”转换成“."
     * @param keyWordsNode
     * @param newObjectNode
     * @return
     */
    public ObjectNode transformKeyWords(JsonNode keyWordsNode, ObjectNode newObjectNode){
        Iterator<String> iterator = keyWordsNode.fieldNames();
        while (iterator.hasNext()){
            String key=iterator.next();
            String value =keyWordsNode.path(key).asText();
            String newKey=key.replaceAll("-",".");
            newObjectNode.put(newKey,value);
        }
        return newObjectNode;
    }
}

+ 0 - 36
src/main/java/com/yihu/hos/crawler/storage/IDataStorage.java

@ -1,36 +0,0 @@
package com.yihu.hos.crawler.storage;
import com.yihu.hos.crawler.format.IDataTransformer;
import com.yihu.hos.crawler.model.patient.Patient;
import java.util.Map;
/**
 * 存储接口,应该只关心存取
 * 目前定义接口存在如下问题需修订:
 * 1.无取接口
 * 2.toFile超出职责范围
 *
 * @author Airhead
 * @version 1.0
 * @created 22-5-2015 11:24:26
 */
public interface IDataStorage {
    boolean save(IDataTransformer dataTransformer);
    boolean toFile(Patient patient);
    String getDataSet(Patient patient, String dataSetCode);
    String getArchive(String dataSetCode, Map<String, Object> params);
    Boolean isStored(String orgCode, String patientID, String eventNo);
    enum StorageType {
        MYSQL_DB,
        MONGODB,
        FILE_SYSTEM
    }
}

+ 0 - 390
src/main/java/com/yihu/hos/crawler/storage/MongodbStorage.java

@ -1,390 +0,0 @@
package com.yihu.hos.crawler.storage;
import com.fasterxml.jackson.databind.node.ObjectNode;
import com.mongodb.BasicDBObject;
import com.mongodb.client.FindIterable;
import com.mongodb.client.MongoCollection;
import com.mongodb.client.MongoCursor;
import com.mongodb.client.model.IndexOptions;
import com.mongodb.client.model.UpdateOptions;
//import com.yihu.common.util.log.DebugLogger;
import com.yihu.hos.common.mongo.MongoDB;
import com.yihu.hos.crawler.format.DataSetTransformer;
import com.yihu.hos.crawler.format.IDataTransformer;
import com.yihu.hos.crawler.model.config.SysConfig;
import com.yihu.hos.crawler.model.patient.Patient;
import com.yihu.hos.crawler.model.patient.PatientIdentity;
import com.yihu.hos.crawler.model.patient.PatientIndex;
import com.yihu.hos.crawler.format.AdapterScheme;
import com.yihu.hos.crawler.service.PatientCDAIndex;
import com.yihu.hos.web.framework.util.file.ConfigureUtil;
import com.yihu.hos.core.file.FileUtil;
import com.yihu.hos.core.datatype.DateUtil;
import com.yihu.hos.core.datatype.NumberUtil;
import org.bson.Document;
import java.io.IOException;
import java.text.SimpleDateFormat;
import java.util.*;
import java.util.concurrent.TimeUnit;
import static com.mongodb.client.model.Filters.and;
import static com.mongodb.client.model.Filters.eq;
import static com.mongodb.client.model.Projections.excludeId;
/**
 * 档案数据只使用Mongo进行存储
 * 目前阶段只会有两种数据类型
 * 1.结构化,数据集
 * 2.非结构化,文档(Pictures,Word,PDF,Video etc.)
 *
 * @author Air
 * @version 1.0
 * @created 2015.07.06 10:38
 */
public class MongodbStorage implements IDataStorage {
    public static final String KEY = "code";
    public static final String PATIENT_ID = "patient_id";
    public static final String EVENT_NO = "event_no";
    public static final String CREATE_AT = "create_at";
    public static final String CREATE_TIME = "create_time";
    public static final String ORG_CODE = "org_code";
    public static final String TTL_INDEX = "ceate_at_1";   //TTL index name, 过期时间索引
    public static final String TTL_INDEX_EXPIRED = "create_time_1"; //旧的TTL index name,已经作废,用于删除索引时使用。
    public static final String INNER_VERSION = "inner_version";
    public static final String EVENT_TIME = "event_time";
    protected String dbName;
    protected AdapterScheme adapterScheme;
    public MongodbStorage(AdapterScheme adapterScheme, String dbName) {
        this.adapterScheme = adapterScheme;
        this.dbName = dbName;
    }
    @Override
    public boolean save(IDataTransformer dataTransformer) {
        DataSetTransformer dataSetTransformer = (DataSetTransformer) dataTransformer;
        ObjectNode jsonObject = (ObjectNode) dataSetTransformer.getJsonObject();
        if (jsonObject != null) {
            String dataSetCode = jsonObject.get(getKey()).asText();
            String patientId = jsonObject.get(PATIENT_ID).asText();
            String eventNo = jsonObject.get(EVENT_NO).asText();
            try {
                MongoCollection<Document> collection = MongoDB.db(dbName).getCollection(dataSetCode);
                createIndex(collection);    //创建索引
                Document filter = new Document();
                filter.append(PATIENT_ID, patientId);
                filter.append(EVENT_NO, eventNo);
                collection.deleteMany(filter);
                UpdateOptions updateOptions = new UpdateOptions();
                updateOptions.upsert(true);
                collection.replaceOne(filter, Document.parse(jsonObject.toString()), updateOptions);
                String url = createUrl(dataSetCode, patientId, eventNo);
                Date expiryDate = DateUtil.setDateTime(new Date(), getExpireDays().intValue());
                SimpleDateFormat sdf = new SimpleDateFormat(DateUtil.DEFAULT_YMDHMSDATE_FORMAT);
                String date = sdf.format(expiryDate);
                Document updateDoc = new Document(CREATE_AT, new Date());
                updateDoc.put("resource.url", url);
                updateDoc.put("resource.expiry_date", date);
                collection.updateMany(filter, new Document("$set", updateDoc));
            } catch (Exception e) {
                //DebugLogger.fatal("保存病人档案信息至MongoDB异常:", e);
                return false;
            }
            return true;
        }
        return false;
    }
    /**
     * @param patient
     * @return
     * @modify 将档案生成到到data目录
     */
    @Override
    public boolean toFile(Patient patient) {
        boolean result = true;
        PatientCDAIndex patientCDAIndex = new PatientCDAIndex(patient);
        Document datasetDoc = new Document();
        Document resultDoc = new Document();
        try {
            // 生成文件,轻量模式需清空data中数据
            for (String name : MongoDB.db(dbName).listCollectionNames()) {
                MongoCollection<Document> collection = MongoDB.db(dbName).getCollection(name);
                FindIterable<Document> documents = collection.find(and(eq(PATIENT_ID, patient.getPatientId()), eq(EVENT_NO, patient.getEventNo()))).projection(excludeId());
                try (MongoCursor<Document> cursor = documents.iterator()) {
                    while (cursor.hasNext()) {
                        String filePath = patientCDAIndex.createDataIndex(dbName, PatientCDAIndex.FileType.JSON);
                        try {
                            Document doc = cursor.next();
                            if ("HDSC01_02".equals(name) || "HDSC02_09".equals(name)) {
                                resultDoc.put(PATIENT_ID, doc.get(PATIENT_ID));
                                resultDoc.put(EVENT_NO, doc.get(EVENT_NO));
                                resultDoc.put(ORG_CODE, doc.get(ORG_CODE));
                                resultDoc.put(INNER_VERSION, doc.get(INNER_VERSION));
                                resultDoc.put(EVENT_TIME, doc.get(EVENT_TIME));
                                if ("HDSC01_02".equals(name)) {
                                    resultDoc.put("visit_type", "1");
                                } else {
                                    resultDoc.put("visit_type", "2");//临时约定,后续从字典中获取
                                }
                            }
                            Map<String, String> resource = (Map<String, String>) doc.get("resource");
                            datasetDoc.put(name, "");
                            resultDoc.put("expiry_date", resource.get("expiry_date"));
                            boolean writeFile = FileUtil.writeFile(filePath, doc.toJson(), "UTF-8");
                            if (!writeFile) {
                                //DebugLogger.fatal("存储临时文件失败:" + cursor.next().toJson());
                                result = false;
                            }
                        } catch (IOException e) {
                            //DebugLogger.fatal("存储临时文件失败.", e);
                            result = false;
                        }
                    }
                } catch (Exception e) {
                    //DebugLogger.fatal("", e);
                }
            }
            //摘要信息生成
//            Document indexData = genPatientIndexData(patient);
//            if (indexData != null) {
//                resultDoc.put("dataset", datasetDoc);
//                resultDoc.put("sumary", indexData);
//                String indexPath = patientCDAIndex.createDataSetIndex("index", PatientCDAIndex.FileType.JSON);
//                boolean writeFile = FileUtil.writeFile(indexPath, resultDoc.toJson(), "UTF-8");
//                if (!writeFile) {
//                    //DebugLogger.fatal("存储索引临时文件失败:" + resultDoc.toJson());
//                    result = false;
//                }
//            }
        } catch (Exception e) {
            //DebugLogger.fatal("", e);
            result = false;
        }
        return result;
    }
    @Override
    public String getDataSet(Patient patient, String dataSetCode) {
        try {
            MongoCollection<Document> collection = MongoDB.db(dbName).getCollection(dataSetCode);
            FindIterable<Document> documents = collection.find(and(eq(PATIENT_ID, patient.getPatientId()), eq(EVENT_NO, patient.getEventNo()), eq(ORG_CODE, patient.getOrgCode()))).projection(excludeId());
            Document document = documents.first();
            if (document != null) {
                return document.toJson();
            }
        } catch (Exception e) {
            //DebugLogger.fatal("", e);
        }
        return null;
    }
    /**
     * 根据条件 获取数据集信息
     *
     * @param dataSetCode 数据集编码
     * @param params      map参数集合
     * @return
     */
    @Override
    public String getArchive(String dataSetCode, Map<String, Object> params) {
        String data = null;
        boolean result = true;
        try {
            MongoCollection<Document> collection = MongoDB.db(dbName).getCollection(dataSetCode);
            BasicDBObject basicDBObject = new BasicDBObject();
            for (Map.Entry<String, Object> entry : params.entrySet()) {
                basicDBObject.put(entry.getKey(), entry.getValue());
            }
            FindIterable<Document> documents = collection.find(basicDBObject);
            try (MongoCursor<Document> cursor = documents.iterator()) {
                while (cursor.hasNext()) {
                    data = cursor.next().toJson();
                    //DebugLogger.fatal("存储临时文 :" + cursor.next().toJson());
                }
            } catch (Exception e) {
                //DebugLogger.fatal("", e);
            }
        } catch (Exception e) {
            //DebugLogger.fatal("", e);
        }
        return data;
    }
    @Override
    public Boolean isStored(String orgCode, String patientID, String eventNo) {
        HashMap<String, PatientIdentity> patientIdentityHashMap = SysConfig.getInstance().getPatientIdentityHashMap();
        Set<Map.Entry<String, PatientIdentity>> entries = patientIdentityHashMap.entrySet();
        Iterator<Map.Entry<String, PatientIdentity>> iterator = entries.iterator();
        try {
            while (iterator.hasNext()) {
                Map.Entry<String, PatientIdentity> next = iterator.next();
                String datasetCode = next.getKey();
                MongoCollection<Document> collection = MongoDB.db(dbName).getCollection(datasetCode);
                Document document = new Document();
                document.append(ORG_CODE, orgCode);
                document.append(PATIENT_ID, patientID);
                document.append(EVENT_NO, eventNo);
                Document findDoc = collection.find(document).first();
                if (findDoc != null) {
                    return true;
                }
            }
        } catch (Exception e) {
            //DebugLogger.fatal("", e);
        }
        return false;
    }
    protected void createIndex(MongoCollection<Document> collection) {
        for (final Document index : collection.listIndexes()) {
            if (index.get("name").equals(TTL_INDEX_EXPIRED)) {
                collection.dropIndex(TTL_INDEX_EXPIRED);  //删除旧的TTL Index
            } else if (index.get("name").equals(TTL_INDEX)) {
                return;
            }
        }
        Document createTimeIndex = new Document(CREATE_AT, 1);
        IndexOptions indexOptions = new IndexOptions();
        indexOptions.expireAfter(getExpireDays(), TimeUnit.DAYS);
        indexOptions.name(TTL_INDEX);
        collection.createIndex(createTimeIndex, indexOptions);
        Document patientIndex = new Document();
        patientIndex.append(PATIENT_ID, 1);
        patientIndex.append(EVENT_NO, 1);
        collection.createIndex(patientIndex);
    }
    /**
     * url生成
     *
     * @param patientId 病人ID
     * @param eventNo   事件号
     * @return
     */
    protected String createUrl(String dataSetCode, String patientId, String eventNo) {
        String requestPath = ConfigureUtil.getProValue("archive.properties", "hos.archives.request.url");
        return requestPath + dataSetCode + "/" + patientId + "/" + eventNo;
    }
    protected String getKey() {
        return KEY;
    }
    protected Long getExpireDays() {
        final Long expireDay = 30L;
        String value = ConfigureUtil.getProValue("archive.properties","hos.archives.expiry.days");
        Long days = NumberUtil.toLong(value);
        return days == null ? expireDay : days;
    }
    /**
     * 病人摘要信息生成
     * 从sys.config文件中的配置读取所需的摘要信息
     *
     * @param patient
     * @return
     */
    protected Document genPatientIndexData(Patient patient) {
        Map<String, PatientIndex> patientIndexMap = SysConfig.getInstance().getPatientIndexMap();
        PatientIndex patientIndex = null;
        List<Document> arrayNode = null;
        Document objectNode = null;
        Document result = new Document();
        MongoCursor<Document> cursor = null;
        MongoCursor<Document> diagCursor = null;
        try {
            for (Map.Entry<String, PatientIndex> entry : patientIndexMap.entrySet()) {
                String dataSetCode = entry.getKey();
                patientIndex = entry.getValue();
                arrayNode = new ArrayList<>();
                MongoCollection<Document> collection = MongoDB.db(dbName).getCollection(dataSetCode);
                FindIterable<Document> documents = collection.find(and(eq(KEY, dataSetCode), eq(PATIENT_ID, patient.getPatientId()), eq(EVENT_NO, patient.getEventNo()))).projection(excludeId());
                cursor = documents.iterator();
                if (cursor.hasNext()) {
                    while (cursor.hasNext()) {
                        Document document = cursor.next();
                        List<Document> list = document.get("data", List.class);
                        for (Document doc : list) {
                            objectNode = new Document();
                            objectNode.put(patientIndex.getPatientId(), patient.getPatientId());
                            objectNode.put(patientIndex.getEventNoCode(), patient.getEventNo());
                            objectNode.put(patientIndex.getRefTimeCode(), doc.get(patientIndex.getRefTimeCode()) == null ? null : (String) doc.get(patientIndex.getRefTimeCode()));
                            objectNode.put("orgCode", patient.getOrgCode());
                            objectNode.put(patientIndex.getOfficeCode(), doc.get(patientIndex.getOfficeCode()) == null ? null : (String) doc.get(patientIndex.getOfficeCode()));
                            objectNode.put(patientIndex.getOfficeName(), doc.get(patientIndex.getOfficeName()) == null ? null : (String) doc.get(patientIndex.getOfficeName()));
                            if ("HDSC02_09".equals(dataSetCode)) {
                                objectNode.put(patientIndex.getLeaveTime(), doc.get(patientIndex.getLeaveTime()) == null ? null : (String) doc.get(patientIndex.getLeaveTime()));
                            }
                            arrayNode.add(objectNode);
                        }
                    }
                    if (arrayNode != null && arrayNode.size() > 0) {
                        result.put(dataSetCode, arrayNode);
                    } else {
                        continue;
                    }
                    String diagDataSet = patientIndex.getDiagDataSet();
                    MongoCollection<Document> diagCollection = MongoDB.db(dbName).getCollection(diagDataSet);
                    FindIterable<Document> diags = diagCollection.find(and(eq(KEY, diagDataSet), eq(PATIENT_ID, patient.getPatientId()), eq(EVENT_NO, patient.getEventNo()))).projection(excludeId());
                    diagCursor = diags.iterator();
                    arrayNode = new ArrayList<>();
                    while (diagCursor.hasNext()) {
                        Document document = diagCursor.next();
                        List<Document> list = document.get("data", List.class);
                        for (Document doc : list) {
                            objectNode = new Document();
                            objectNode.put(patientIndex.getDiagType(), doc.get(patientIndex.getDiagType()) == null ? null : (String) doc.get(patientIndex.getDiagType()));
                            objectNode.put(patientIndex.getDiagCode(), doc.get(patientIndex.getDiagCode()) == null ? null : (String) doc.get(patientIndex.getDiagCode()));
                            objectNode.put(patientIndex.getDiagName(), doc.get(patientIndex.getDiagName()) == null ? null : (String) doc.get(patientIndex.getDiagName()));
                            arrayNode.add(objectNode);
                        }
                    }
                    if (arrayNode != null && arrayNode.size() > 0) {
                        result.put(diagDataSet, arrayNode);
                    }
                } else {
                    continue;
                }
            }
            if (result == null) {
                return null;
            } else {
                return result;
            }
        } catch (Exception e) {
            //DebugLogger.fatal("", e);
        } finally {
            if (cursor != null) {
                cursor.close();
            }
            if (diagCursor != null) {
                diagCursor.close();
            }
        }
        return null;
    }
}

+ 1 - 1
src/main/java/com/yihu/hos/common/CheckLoginFilter.java

@ -1,4 +1,4 @@
package com.yihu.hos.common;
package com.yihu.hos.filter;
import javax.servlet.*;

+ 1 - 1
src/main/java/com/yihu/hos/common/SessionOutTimeFilter.java

@ -1,4 +1,4 @@
package com.yihu.hos.common;
package com.yihu.hos.filter;
import org.springframework.stereotype.Component;
import org.springframework.web.filter.OncePerRequestFilter;

+ 0 - 55
src/main/java/com/yihu/hos/services/ArchiveUploadJob.java

@ -1,55 +0,0 @@
package com.yihu.hos.services;
import com.yihu.hos.common.Services;
import com.yihu.hos.core.datatype.DateUtil;
import com.yihu.hos.core.log.Logger;
import com.yihu.hos.core.log.LoggerFactory;
import com.yihu.hos.crawler.service.CrawlerManager;
import com.yihu.hos.datacollect.model.RsJobConfig;
import com.yihu.hos.datacollect.service.intf.IDatacollectManager;
import com.yihu.hos.web.framework.model.DataGridResult;
import com.yihu.hos.web.framework.util.springutil.SpringBeanUtil;
import java.util.Date;
import java.util.HashMap;
import java.util.Map;
import java.util.UUID;
/**
 * Created by hzp on 2016/5/11.
 */
public class ArchiveUploadJob implements IBaseJob {
    private static Logger logger = LoggerFactory.getLogger(ArchiveUploadJob.class);
    @Override
    public void execute(String jobId) throws Exception{
        String random = UUID.randomUUID().toString();
        logger.info("档案采集上传开始,流水号:" + random + ",jobId:"+jobId);
        IDatacollectManager datacollect = SpringBeanUtil.getService(Services.Datacollect);
        RsJobConfig job = datacollect.getJobById(jobId);
        Map<String, Object> condition = new HashMap<>();
        Date begin = job.getRepeatStartTime();
        Date end = job.getRepeatEndTime();
        if (!job.getJobType().equals("0")) {
            //调整截止时间,当前时间-偏移量
            end = DateUtil.addDate(-job.getDelayTime(), DateUtil.getSysDateTime());
            if ((end.getTime() - begin.getTime()) <= 0) {
                return; //结束时间小于开始时间时,不获取
            }
        }
        condition.put("beginDate", begin);
        condition.put("endDate", end);
        DataGridResult result = datacollect.getJobDatasetByJobId(jobId);
        CrawlerManager crawlerManager = new CrawlerManager(result.getDetailModelList(), job.getSchemeVersion());
        String message = crawlerManager.dataCrawler(condition);
        if (!job.getJobType().equals("0")) {
            job.setRepeatStartTime(end);
            job.setRepeatEndTime(DateUtil.formatYMDToYMDHMS(DateUtil.getSysDateTime().toString()));
        }
        datacollect.updateJob(job);
        logger.info("档案采集上传结束,流水号:" + random + ",jobId:" + jobId + ",message:" + message);
        return;
    }
}

+ 0 - 56
src/main/java/com/yihu/hos/services/CrawlerFlowUploadJob.java

@ -1,56 +0,0 @@
package com.yihu.hos.services;
import com.yihu.hos.common.Services;
import com.yihu.hos.core.datatype.DateUtil;
import com.yihu.hos.core.log.Logger;
import com.yihu.hos.core.log.LoggerFactory;
import com.yihu.hos.crawler.service.CrawlerFlowManager;
import com.yihu.hos.datacollect.model.RsJobConfig;
import com.yihu.hos.datacollect.service.intf.IDatacollectManager;
import com.yihu.hos.web.framework.model.DataGridResult;
import com.yihu.hos.web.framework.util.springutil.SpringBeanUtil;
import java.util.Date;
import java.util.HashMap;
import java.util.Map;
import java.util.UUID;
/**
 * 任务编排任务执行
 *
 * Created by chenyingjie on 16/7/14.
 */
public class CrawlerFlowUploadJob implements IBaseJob {
    private static Logger logger = LoggerFactory.getLogger(CrawlerFlowUploadJob.class);
    @Override
    public void execute(String jobId) throws Exception {
        String random = UUID.randomUUID().toString();
        logger.info("任务编排——档案采集上传开始,流水号:" + random + ",jobId:"+jobId);
        IDatacollectManager datacollect = SpringBeanUtil.getService(Services.Datacollect);
        RsJobConfig job = datacollect.getJobById(jobId);
        Map<String, Object> condition = new HashMap<>();
        Date begin = job.getRepeatStartTime();
        Date end = job.getRepeatEndTime();
        if (!job.getJobType().equals("0")) {
            //调整截止时间,当前时间-偏移量
            end = DateUtil.addDate(-job.getDelayTime(), DateUtil.getSysDateTime());
            if ((end.getTime() - begin.getTime()) <= 0) {
                return; //结束时间小于开始时间时,不获取
            }
        }
        condition.put("beginDate", begin);
        condition.put("endDate", end);
        DataGridResult result = datacollect.getJobDatasetByJobId(jobId);
        CrawlerFlowManager crawlerFlowManager = new CrawlerFlowManager(result.getDetailModelList(), job.getSchemeVersion());
        String message = crawlerFlowManager.dataCrawler(condition);
        if (!job.getJobType().equals("0")) {
            job.setRepeatStartTime(end);
            job.setRepeatEndTime(DateUtil.formatYMDToYMDHMS(DateUtil.getSysDateTime().toString()));
        }
        datacollect.updateJob(job);
        logger.info("任务编排——档案采集上传结束,流水号:" + random + ",jobId:" + jobId + ",message:" + message);
        return;
    }
}

+ 7 - 7
src/main/webapp/WEB-INF/ehr/jsp/common/indexJs.jsp

@ -63,6 +63,12 @@
            //菜单列表
            var menu = [
                //标准管理
                //标准规范中心
                {id: 2, text: '标准规范中心', icon: '${staticRoot}/images/index/menu3_icon.png'},
                {id: 21, pid: 2, text: '平台标准', url: '${contextRoot}/integration/initial/standard'},
                {id: 22, pid: 2, text: '应用标准', url: '${contextRoot}/integration/initial/application'},
                {id: 23, pid: 2, text: '适配方案', url: '${contextRoot}/adapterPlan/initial'},
                //服务管理中心
                {id: 6, text: '服务管理中心', icon: '${staticRoot}/images/index/menu5_icon.png'},
                {id: 61, pid: 6, text: '服务管理', url: '${contextRoot}/app/initial'},
@ -77,14 +83,8 @@
                {id: 5, text: '运行监控中心', icon: '${staticRoot}/images/index/menu5_icon.png'},
                {id: 51, pid: 5, text: '系统监控', url: '${contextRoot}/monitor/server/initial'},
                {id: 52, pid: 5, text: '服务监控', url: '${contextRoot}/monitor/service/initial'},
                //标准管理
                //标准规范中心
                {id: 2, text: '标准规范中心', icon: '${staticRoot}/images/index/menu3_icon.png'},
                {id: 21, pid: 2, text: '平台标准', url: '${contextRoot}/integration/initial/standard'},
                {id: 22, pid: 2, text: '应用标准', url: '${contextRoot}/integration/initial/application'},
                {id: 23, pid: 2, text: '适配方案', url: '${contextRoot}/adapterPlan/initial'},
                //资源管理
                {id: 3, text: '资源管理中心', icon: '${staticRoot}/images/index/menu4_icon.png'},
                {id: 3, text: '资源服务中心', icon: '${staticRoot}/images/index/menu4_icon.png'},
                {id: 31, pid: 3, text: '资源注册', url: '${contextRoot}/resource/resource/initial'},
                {id: 32, pid: 3, text: '资源浏览', url: '${contextRoot}/resource/resourcePage'},
                {id: 34, pid: 3, text: '资源分类', url: '${contextRoot}/resource/rsCategory/initial'},