Browse Source

first commit

wangweiqun 6 years ago
parent
commit
795b63550f

+ 8 - 5
src/main/java/com/yihu/ehr/analyze/controller/dataQuality/DataQualityHomeEndpoint.java

@ -149,14 +149,17 @@ public class DataQualityHomeEndpoint extends EnvelopRestEndPoint {
    @RequestMapping(value = "bulkUpDateOrgArea", method = RequestMethod.GET)
    @ApiOperation(value = "批量更新机构关联的区域编码")
    public void bulkUploadOrgArea(
    @ApiOperation(value = "批量更新机构关联的区域编码(通过org_code字段更新org_area字段")
    public String bulkUploadOrgArea(
            @ApiParam(name = "index", value = "索引")
            @RequestParam(value = "index", required = false) String index,
            @RequestParam(value = "index", required = true) String index,
            @ApiParam(name = "type", value = "type")
            @RequestParam(value = "type", required = false) String type
            @RequestParam(value = "type", required = true) String type,
            @ApiParam(name = "filters", value = "es查询条件以“;”分割(示例:a=1;b=test)")
            @RequestParam(value = "filters", required = false) String filters
    ) throws Exception {
        dataQualityHomeService.bulkUpdateOrgArea(index,type);
        long result = dataQualityHomeService.bulkUpdateOrgArea(index, type, filters);
        return "本次更新总数:"+result;
    }
}

+ 5 - 5
src/main/java/com/yihu/ehr/analyze/service/dataQuality/DataCompleteService.java

@ -49,7 +49,7 @@ public class DataCompleteService extends DataQualityBaseService {
        //机构数据
        List<Map<String, Object>> groupList = dataCorrectService.getOrgDataMap(dataLevel, "create_date", startDate, end, null);
        //平台接收数据量
        Map<String, Object> platformDataGroup = getPlatformDataGroup(dataLevel, "receive_date", startDate, end, null);
        Map<String, Object> platformDataGroup = getPlatformDataGroup(dataLevel, "receive_date", startDate, endDate, null);
        // 计算
        for (Map<String, Object> map : groupList) {
            resMap = new HashMap<String, Object>();
@ -97,7 +97,7 @@ public class DataCompleteService extends DataQualityBaseService {
        //机构数据
        List<Map<String, Object>> groupList = dataCorrectService.getOrgDataMap(dataLevel, "create_date", startDate, end, areaCode);
        //平台接收数据量
        Map<String, Object> platformDataGroup = getPlatformDataGroup(dataLevel, "receive_date", startDate, end, areaCode);
        Map<String, Object> platformDataGroup = getPlatformDataGroup(dataLevel, "receive_date", startDate, endDate, areaCode);
        // 计算
        for (Map<String, Object> map : groupList) {
            resMap = new HashMap<String, Object>();
@ -165,17 +165,17 @@ public class DataCompleteService extends DataQualityBaseService {
            //添加标识,标识是机构数据
            resMap.put("type", "org_code");
            fields.add("org_code");
            sql1 = "SELECT count(DISTINCT event_no) as count ,org_code FROM json_archives/info where pack_type=1 and analyze_status=3 and org_area='" + orgArea + "' and " +
            sql1 = "SELECT count(DISTINCT event_no) as count ,org_code FROM json_archives/info where pack_type=1  and org_area='" + orgArea + "' and " +
                    dateField + ">='" + start + " 00:00:00' and " + dateField + "<='" + end + " 23:59:59' group by org_code";
        } else if (StringUtils.isEmpty(orgArea) && (dataLevel == 0 && StringUtils.isEmpty(orgArea))) {
            resMap.put("type", "org_area");
            fields.add("org_area");
            sql1 = "SELECT count(DISTINCT event_no) as count ,org_area FROM json_archives/info  where pack_type=1 and analyze_status=3 and " +
            sql1 = "SELECT count(DISTINCT event_no) as count ,org_area FROM json_archives/info  where pack_type=1  and " +
                    dateField + ">='" + start + " 00:00:00' and " + dateField + "<='" + end + " 23:59:59' group by org_area";
        } else if (StringUtils.isEmpty(orgArea) && (dataLevel == 1 && StringUtils.isEmpty(orgArea))) {
            resMap.put("type", "org_code");
            fields.add("org_code");
            sql1 = "SELECT count(DISTINCT event_no) as count ,org_code FROM json_archives/info  where pack_type=1 and analyze_status=3 and " +
            sql1 = "SELECT count(DISTINCT event_no) as count ,org_code FROM json_archives/info  where pack_type=1  and " +
                    dateField + ">='" + start + " 00:00:00' and " + dateField + "<='" + end + " 23:59:59' group by org_code";
        }

+ 2 - 2
src/main/java/com/yihu/ehr/analyze/service/dataQuality/DataCorrectService.java

@ -253,7 +253,7 @@ public class DataCorrectService extends DataQualityBaseService {
        //机构数据
        List<Map<String,Object>> groupList = getOrgDataMap(dataLevel,"create_date",startDate,end,null);
        //平台接收错误数据量
        Map<String, Object> platformErrorGroup = getErrorPlatformData(dataLevel,"receive_date",startDate, end,null);
        Map<String, Object> platformErrorGroup = getErrorPlatformData(dataLevel,"receive_date",startDate, endDate,null);
        // 计算
        for (Map<String,Object> map:groupList){
            resMap = new HashMap<String,Object>();
@ -303,7 +303,7 @@ public class DataCorrectService extends DataQualityBaseService {
        //机构数据
        List<Map<String,Object>> groupList = getOrgDataMap(dataLevel,"create_date",startDate,end,areaCode);
        //平台接收数据量
        Map<String, Object> platformDataGroup = getErrorPlatformData(dataLevel,"receive_date",startDate, end,areaCode);
        Map<String, Object> platformDataGroup = getErrorPlatformData(dataLevel,"receive_date",startDate, endDate,areaCode);
        // 计算
        for (Map<String,Object> map:groupList){
            resMap = new HashMap<String,Object>();

+ 2 - 2
src/main/java/com/yihu/ehr/analyze/service/dataQuality/DataInTimeService.java

@ -153,7 +153,7 @@ public class DataInTimeService extends DataQualityBaseService {
        //机构数据
        List<Map<String, Object>> groupList = dataCorrectService.getOrgDataMap(dataLevel,"create_date", startDate, end, null);
        //平台接收数据量
        Map<String, Object> platformDataGroup = getInTimeMap(dataLevel,"receive_date", startDate, end, null);
        Map<String, Object> platformDataGroup = getInTimeMap(dataLevel,"receive_date", startDate, endDate, null);
        // 计算
        for (Map<String, Object> map : groupList) {
            resMap = new HashMap<String, Object>();
@ -201,7 +201,7 @@ public class DataInTimeService extends DataQualityBaseService {
        //机构数据
        List<Map<String, Object>> groupList = dataCorrectService.getOrgDataMap(dataLevel,"create_date", startDate, end, areaCode);
        //平台接收数据量
        Map<String, Object> platformDataGroup = getInTimeMap(dataLevel,"receive_date", startDate, end, areaCode);
        Map<String, Object> platformDataGroup = getInTimeMap(dataLevel,"receive_date", startDate, endDate, areaCode);
        // 计算
        for (Map<String, Object> map : groupList) {
            resMap = new HashMap<String, Object>();

+ 27 - 13
src/main/java/com/yihu/ehr/analyze/service/dataQuality/DataQualityHomeService.java

@ -16,6 +16,7 @@ import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.context.ApplicationContext;
import org.springframework.data.domain.Page;
import org.springframework.stereotype.Service;
import javax.annotation.PostConstruct;
@ -369,19 +370,32 @@ public class DataQualityHomeService extends BaseJpaService {
     *  批量更新es中的区域编码org_area
     *  (通过机构编码org_code 更新org_area)
     */
    public void bulkUpdateOrgArea(String index,String type){
        List<Map<String, Object>> result = elasticSearchUtil.list(index, type, "");
        List<Map<String, Object>> updateSourceList = new ArrayList<>();
        result.forEach(item -> {
            Map<String, Object> updateSource = new HashMap<>();
            updateSource.put("_id", item.get("_id"));
            String orgCode = (String) item.get("org_code");
            String orgArea = redisClient.get("organizations:" + orgCode + ":area");
            updateSource.put("org_area", orgArea);
            updateSourceList.add(updateSource);
        });
        elasticSearchUtil.bulkUpdate(index, type, updateSourceList);
    public long bulkUpdateOrgArea(String index,String type,String filters){
        long page = 0;
        long count = elasticSearchUtil.count(index, type, filters);
        if (count >10000) {
            page = count/10000 + 1;
        }else {
            page = 1;
        }
        for (int i = 1;i<=page;i++) {
            Page<Map<String, Object>> result = elasticSearchUtil.page(index, type, filters, i, 10000);
            List<Map<String, Object>> updateSourceList = new ArrayList<>();
            result.forEach(item -> {
                Map<String, Object> updateSource = new HashMap<>();
                updateSource.put("_id", item.get("_id"));
                String orgCode = (String) item.get("org_code");
                String orgArea = redisClient.get("organizations:" + orgCode + ":area");
                updateSource.put("org_area", orgArea);
                updateSourceList.add(updateSource);
            });
            if (!updateSourceList.isEmpty()) {
                elasticSearchUtil.bulkUpdate(index, type, updateSourceList);
            }
        }
        return count;
    }