package com.yihu.hos.service; import com.yihu.hos.common.Services; import com.yihu.hos.crawler.service.CrawlerFlowManager; import com.yihu.hos.datacollect.model.RsJobConfig; import com.yihu.hos.datacollect.service.intf.IDatacollectManager; import com.yihu.hos.web.framework.model.DataGridResult; import com.yihu.hos.core.datatype.DateUtil; import com.yihu.hos.web.framework.util.springutil.SpringBeanUtil; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import java.util.Date; import java.util.HashMap; import java.util.Map; import java.util.UUID; /** * 任务编排任务执行 * * Created by chenyingjie on 16/7/14. */ public class CrawlerFlowUploadJob implements IBaseJob { private static Logger logger = LogManager.getLogger(CrawlerFlowUploadJob.class); @Override public void execute(String jobId) throws Exception { String random = UUID.randomUUID().toString(); logger.info("任务编排——档案采集上传开始,流水号:" + random + ",jobId:"+jobId); IDatacollectManager datacollect = SpringBeanUtil.getService(Services.Datacollect); RsJobConfig job = datacollect.getJobById(jobId); Map condition = new HashMap<>(); Date begin = job.getRepeatStartTime(); Date end = job.getRepeatEndTime(); if (!job.getJobType().equals("0")) { //调整截止时间,当前时间-偏移量 end = DateUtil.addDate(-job.getDelayTime(), DateUtil.getSysDateTime()); if ((end.getTime() - begin.getTime()) <= 0) { return; //结束时间小于开始时间时,不获取 } } condition.put("beginDate", begin); condition.put("endDate", end); DataGridResult result = datacollect.getJobDatasetByJobId(jobId); CrawlerFlowManager crawlerFlowManager = new CrawlerFlowManager(result.getDetailModelList(), job.getSchemeVersion()); String message = crawlerFlowManager.dataCrawler(condition); if (!job.getJobType().equals("0")) { job.setRepeatStartTime(end); job.setRepeatEndTime(DateUtil.formatYMDToYMDHMS(DateUtil.getSysDateTime().toString())); } datacollect.updateJob(job); logger.info("任务编排——档案采集上传结束,流水号:" + random + ",jobId:" + jobId + ",message:" + message); return; } }