PackAnalyzeTask.java 4.5 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081
  1. package com.yihu.ehr.pack.task;
  2. import com.fasterxml.jackson.databind.ObjectMapper;
  3. import com.yihu.ehr.elasticsearch.ElasticSearchUtil;
  4. import com.yihu.ehr.model.packs.EsSimplePackage;
  5. import com.yihu.ehr.profile.queue.RedisCollection;
  6. import org.apache.commons.lang3.time.DateUtils;
  7. import org.springframework.beans.factory.annotation.Autowired;
  8. import org.springframework.data.domain.Page;
  9. import org.springframework.data.redis.core.RedisTemplate;
  10. import org.springframework.scheduling.annotation.Scheduled;
  11. import org.springframework.stereotype.Component;
  12. import java.io.Serializable;
  13. import java.text.DateFormat;
  14. import java.text.SimpleDateFormat;
  15. import java.util.*;
  16. /**
  17. * 档案包质控容错处理任务
  18. * 1. 当质控队列为空,将状态为待质控的档案包加入质控队列
  19. * 2. 将质控状态为失败且错误次数小于三次的档案包重新加入质控队列
  20. * 3. 将质控状态为正在进行质控处理但质控开始时间超过当前时间一定范围内的档案包重新加入质控队列
  21. * Created by progr1mmer on 2017/12/18.
  22. */
  23. @Component
  24. public class PackAnalyzeTask {
  25. private static final DateFormat dateFormat = new SimpleDateFormat("yyyy-MM-dd");
  26. private static final String INDEX = "json_archives";
  27. private static final String TYPE = "info";
  28. @Autowired
  29. private ObjectMapper objectMapper;
  30. @Autowired
  31. private ElasticSearchUtil elasticSearchUtil;
  32. @Autowired
  33. private RedisTemplate<String, Serializable> redisTemplate;
  34. @Scheduled(fixedDelay = 30000)
  35. public void delayPushTask() throws Exception {
  36. List<String> esSimplePackageList = new ArrayList<>(200);
  37. //当质控队列为空,将状态为待质控的档案包加入质控队列
  38. if (redisTemplate.opsForSet().size(RedisCollection.AnalyzeQueueVice) <= 0) {
  39. //影像档案,如果是待质控,不添加入次质控队列, 防止主消息队列刚消费完,还未进行更改状态,就添加到 次质控队列,进而添加到次解析队列,进行二次解析(此时,很可能主解析队列已经将该影像档案入库,将ftp文件干掉了!)
  40. Page<Map<String, Object>> result = elasticSearchUtil.page(INDEX, TYPE, "analyze_status=0;pack_type<>3", "+receive_date", 1, 1000);
  41. for (Map<String, Object> pack : result) {
  42. String packStr = objectMapper.writeValueAsString(pack);
  43. EsSimplePackage esSimplePackage = objectMapper.readValue(packStr, EsSimplePackage.class);
  44. redisTemplate.opsForSet().add(RedisCollection.AnalyzeQueueVice, objectMapper.writeValueAsString(esSimplePackage));
  45. }
  46. }
  47. //将质控状态为失败且错误次数小于三次的档案包重新加入质控队列
  48. Page<Map<String, Object>> result = elasticSearchUtil.page(INDEX, TYPE, "analyze_status=2;analyze_fail_count<3", "+receive_date", 1, 100);
  49. List<Map<String, Object>> updateSourceList = new ArrayList<>();
  50. for (Map<String, Object> pack : result) {
  51. Map<String, Object> updateSource = new HashMap<>();
  52. updateSource.put("_id", pack.get("_id"));
  53. updateSource.put("analyze_status", 0);
  54. updateSourceList.add(updateSource);
  55. String packStr = objectMapper.writeValueAsString(pack);
  56. EsSimplePackage esSimplePackage = objectMapper.readValue(packStr, EsSimplePackage.class);
  57. esSimplePackageList.add(objectMapper.writeValueAsString(esSimplePackage));
  58. }
  59. //将质控状态为正在进行质控处理但质控开始时间超过当前时间一定范围内的档案包重新加入质控队列
  60. Date past = DateUtils.addDays(new Date(), -1);
  61. String pastStr = dateFormat.format(past) + " 00:00:00";
  62. result = elasticSearchUtil.page(INDEX, TYPE, "analyze_status=1;analyze_date<" + pastStr, "+receive_date", 1, 100);
  63. for (Map<String, Object> pack : result) {
  64. Map<String, Object> updateSource = new HashMap<>();
  65. updateSource.put("_id", pack.get("_id"));
  66. updateSource.put("analyze_status", 0);
  67. updateSourceList.add(updateSource);
  68. String packStr = objectMapper.writeValueAsString(pack);
  69. EsSimplePackage esSimplePackage = objectMapper.readValue(packStr, EsSimplePackage.class);
  70. esSimplePackageList.add(objectMapper.writeValueAsString(esSimplePackage));
  71. }
  72. elasticSearchUtil.bulkUpdate(INDEX, TYPE, updateSourceList);
  73. esSimplePackageList.forEach(item -> redisTemplate.opsForSet().add(RedisCollection.AnalyzeQueueVice, item));
  74. }
  75. }