package com.yihu.hos.service; import com.yihu.hos.common.Services; import com.yihu.hos.core.datatype.DateUtil; import com.yihu.hos.core.log.Logger; import com.yihu.hos.core.log.LoggerFactory; import com.yihu.hos.crawler.service.CrawlerManager; import com.yihu.hos.datacollect.model.RsJobConfig; import com.yihu.hos.datacollect.service.intf.IDatacollectManager; import com.yihu.hos.web.framework.model.DataGridResult; import com.yihu.hos.web.framework.util.springutil.SpringBeanUtil; import java.util.Date; import java.util.HashMap; import java.util.Map; import java.util.UUID; /** * Created by hzp on 2016/5/11. */ public class ArchiveUploadJob implements IBaseJob { private static Logger logger = LoggerFactory.getLogger(ArchiveUploadJob.class); @Override public void execute(String jobId) throws Exception{ String random = UUID.randomUUID().toString(); logger.info("档案采集上传开始,流水号:" + random + ",jobId:"+jobId); IDatacollectManager datacollect = SpringBeanUtil.getService(Services.Datacollect); RsJobConfig job = datacollect.getJobById(jobId); Map condition = new HashMap<>(); Date begin = job.getRepeatStartTime(); Date end = job.getRepeatEndTime(); if (!job.getJobType().equals("0")) { //调整截止时间,当前时间-偏移量 end = DateUtil.addDate(-job.getDelayTime(), DateUtil.getSysDateTime()); if ((end.getTime() - begin.getTime()) <= 0) { return; //结束时间小于开始时间时,不获取 } } condition.put("beginDate", begin); condition.put("endDate", end); DataGridResult result = datacollect.getJobDatasetByJobId(jobId); CrawlerManager crawlerManager = new CrawlerManager(result.getDetailModelList(), job.getSchemeVersion()); String message = crawlerManager.dataCrawler(condition); if (!job.getJobType().equals("0")) { job.setRepeatStartTime(end); job.setRepeatEndTime(DateUtil.formatYMDToYMDHMS(DateUtil.getSysDateTime().toString())); } datacollect.updateJob(job); logger.info("档案采集上传结束,流水号:" + random + ",jobId:" + jobId + ",message:" + message); return; } }