/** * @see org.quartz.JobListener#jobWasExecuted(JobExecutionContext, JobExecutionException) */ public void jobWasExecuted(JobExecutionContext context, JobExecutionException jobException) { Trigger trigger = context.getTrigger(); Object[] args = null; if (jobException != null) { if (!getLog().isWarnEnabled()) { return; } String errMsg = jobException.getMessage(); args = new Object[] { context.getJobDetail().getKey().getName(), context.getJobDetail().getKey().getGroup(), new java.util.Date(), trigger.getKey().getName(), trigger.getKey().getGroup(), trigger.getPreviousFireTime(), trigger.getNextFireTime(), Integer.valueOf(context.getRefireCount()), errMsg }; getLog().warn(MessageFormat.format(getJobFailedMessage(), args), jobException); } else { if (!getLog().isInfoEnabled()) { return; } String result = String.valueOf(context.getResult()); args = new Object[] { context.getJobDetail().getKey().getName(), context.getJobDetail().getKey().getGroup(), new java.util.Date(), trigger.getKey().getName(), trigger.getKey().getGroup(), trigger.getPreviousFireTime(), trigger.getNextFireTime(), Integer.valueOf(context.getRefireCount()), result }; getLog().info(MessageFormat.format(getJobSuccessMessage(), args)); } }
@SuppressWarnings("unchecked") @Override public void execute(JobExecutionContext job) throws JobExecutionException { TriggerScheduleServiceCenterToProviderServiceCenterMessage msg = new TriggerScheduleServiceCenterToProviderServiceCenterMessage(); String jobName = job.getJobDetail().getKey().getName(); JobDataMap jobDataMap = job.getJobDetail().getJobDataMap(); ConcurrentHashMap<Integer, ServiceXServerSession> rpcServers = (ConcurrentHashMap<Integer, ServiceXServerSession>) jobDataMap .get(RPCSERVERS); ConcurrentHashMap<String, ConcurrentHashSet<Integer>> schedules = (ConcurrentHashMap<String, ConcurrentHashSet<Integer>>) jobDataMap .get(SCHEDULES); ConcurrentHashSet<Integer> providerList = schedules.get(jobName); if (providerList == null) { log.error("Job:" + jobName + "找不到Provider"); return; } msg.setJobName(jobName); // 查看是否是最有一次执行,并且移除此job if (!job.getTrigger().mayFireAgain()) { msg.setEnd(true); schedules.remove(jobName); log.info("任务生命终结,执行删除:" + jobName); } // 选举式触发 ArrayList<Integer> arrayList = new ArrayList<>(providerList); int providerId = arrayList.get(RandomUtil.randomInt(0, arrayList.size() - 1)); ServiceXServerSession serviceXServerSession = rpcServers.get(providerId); if (serviceXServerSession != null) { serviceXServerSession.getSession().writeAndFlush(msg); log.info(jobName + "触发!分配的ProviderId为:" + providerId + ",下次触发时间:" + TimeUtil.date2Str(job.getTrigger().getNextFireTime().getTime())); } }
@Override public void execute(final JobExecutionContext context) throws JobExecutionException { ShardingContexts shardingContexts = jobFacade.getShardingContexts(); int jobEventSamplingCount = shardingContexts.getJobEventSamplingCount(); int currentJobEventSamplingCount = shardingContexts.getCurrentJobEventSamplingCount(); if (jobEventSamplingCount > 0 && ++currentJobEventSamplingCount < jobEventSamplingCount) { shardingContexts.setCurrentJobEventSamplingCount(currentJobEventSamplingCount); jobFacade.getShardingContexts().setAllowSendJobEvent(false); JobExecutorFactory.getJobExecutor(elasticJob, jobFacade).execute(); } else { jobFacade.getShardingContexts().setAllowSendJobEvent(true); executorDriver.sendStatusUpdate(Protos.TaskStatus.newBuilder().setTaskId(taskId).setState(Protos.TaskState.TASK_RUNNING).setMessage("BEGIN").build()); JobExecutorFactory.getJobExecutor(elasticJob, jobFacade).execute(); executorDriver.sendStatusUpdate(Protos.TaskStatus.newBuilder().setTaskId(taskId).setState(Protos.TaskState.TASK_RUNNING).setMessage("COMPLETE").build()); shardingContexts.setCurrentJobEventSamplingCount(0); } }
public void jobWasExecuted(JobExecutionContext context, JobExecutionException jobException) { Key sj = (Key) chainLinks.get(context.getJobDetail().getKey()); if(sj == null) { return; } getLog().info("Job '" + context.getJobDetail().getFullName() + "' will now chain to Job '" + sj + "'"); try { if(context.getJobDetail().isVolatile() || context.getTrigger().isVolatile()) { context.getScheduler().triggerJobWithVolatileTrigger(sj.getName(), sj.getGroup()); } else { context.getScheduler().triggerJob(sj.getName(), sj.getGroup()); } } catch(SchedulerException se) { getLog().error("Error encountered during chaining to Job '" + sj + "'", se); } }
private void throwAndFinish(JobDataMap jobDataMap, JobExecutionContext context, Throwable t) throws JobExecutionException { context.put(Constants.JOB_EXCEPTION, t); //reset retry param, just in case jobDataMap.putAsString(NUMBER_OF_RETRIES_PARAM, 0); JobExecutionException e = new JobExecutionException("This trigger has thrown a terminal exception. " + "Retries exceeded or job not retryable", t); throw e; }
private void triggerRefire(JobDataMap jobDataMap, int numberOfRetries, JobExecutionContext context, Throwable t) throws JobExecutionException { final long sleepTimeBetweenRetries = jobDataMap.containsKey(SLEEP_TIME_BETWEEN_RETRIES_PARAM) ? jobDataMap.getLongValue(SLEEP_TIME_BETWEEN_RETRIES_PARAM) : DEFAULT_SLEEP_TIME_BETWEEN_RETRIES; try { Thread.sleep(sleepTimeBetweenRetries); } catch (InterruptedException e) {} context.put(Constants.JOB_EXCEPTION, t); jobDataMap.putAsString(NUMBER_OF_RETRIES_PARAM, --numberOfRetries); //set refire flag as true throw new JobExecutionException(t, true); }
@Override public void execute(JobExecutionContext context) throws JobExecutionException { ApiFactoryService apiFactoryService = (ApiFactoryService) context.get(ApiFactoryService.class.getName()); try { EntityManager em = HibernateUtil.getTransactionalEntityManager(); List<DistributedAppliance> das = HibernateUtil.getTransactionControl().required(() -> { OSCEntityManager<DistributedAppliance> emgr = new OSCEntityManager<DistributedAppliance>( DistributedAppliance.class, em, StaticRegistry.transactionalBroadcastUtil()); return emgr.listAll(); }); for (DistributedAppliance da : das) { for (VirtualSystem vs : da.getVirtualSystems()) { ApplianceManagerConnector apmc = vs.getDistributedAppliance().getApplianceManagerConnector(); ManagerDeviceMemberApi agentApi = apiFactoryService.createManagerDeviceMemberApi(apmc, vs); if (apiFactoryService.providesDeviceStatus(vs)) { List<ManagerDeviceMemberStatusElement> agentElems = agentApi.getFullStatus( vs.getDistributedApplianceInstances().stream() .map(DistributedApplianceInstanceElementImpl::new) .collect(Collectors.toList())); for (DistributedApplianceInstance dai : vs.getDistributedApplianceInstances()) { getAgentFullStatus(dai, agentElems); } } } } } catch (Exception ex) { log.error("Fail to sync DAs", ex); } }
/** * @param context {@link JobExecutionContext} * @param jobException {@link JobExecutionException} */ @Trace(metricName = "JobListener{save}", async = true, dispatcher = true) private void save(JobExecutionContext context, JobExecutionException jobException) throws IOException { JobResult jobResult = (JobResult) context.getResult(); ExecutionStatusEntity.ExecutionStatusEntityBuilder builder = ExecutionStatusEntity.builder(); builder.created(new Date()); builder.nextFireTime(context.getNextFireTime()); builder.fireTime(context.getFireTime()); builder.jobRunTime(context.getJobRunTime()); builder.prevFireTime(context.getPreviousFireTime()); builder.scheduledFireTime(context.getScheduledFireTime()); if (jobResult != null && jobResult.getTmpFile() != null) { builder.output(FileUtils.readFileToString(jobResult.getTmpFile().toFile())); builder.exitCode(jobResult.getExitValue()); if (jobResult.getExitValue() != 0) builder.errors(true); if (jobResult.getTmpFile().toFile().delete()) logger.debug(String.format("%s file deleted", jobResult.getTmpFile().toAbsolutePath())); } else { builder.output("No output"); builder.exitCode(-1); builder.errors(true); } if (jobException != null) { builder.errors(true); builder.errorMessage(jobException.getMessage()); } builder.job(getJob()); executionStatusRepository.save(builder.build()); }
public void execute(JobExecutionContext context) throws JobExecutionException { JobDataMap jobData = context.getJobDetail().getJobDataMap(); // extract the content Cleanup to use Object nodeCleanupWorkerObj = jobData.get("nodeCleanupWorker"); if (nodeCleanupWorkerObj == null || !(nodeCleanupWorkerObj instanceof NodeCleanupWorker)) { throw new AlfrescoRuntimeException( "NodeCleanupJob data must contain valid 'nodeCleanupWorker' reference"); } NodeCleanupWorker nodeCleanupWorker = (NodeCleanupWorker) nodeCleanupWorkerObj; List<String> cleanupLog = nodeCleanupWorker.doClean(); // Done if (logger.isDebugEnabled()) { logger.debug("Node cleanup log:"); for (String log : cleanupLog) { logger.debug(log); } } }
@Override protected synchronized final void executeInternal(final JobExecutionContext jobContext) throws JobExecutionException { if (locker == null) { JobLockService jobLockServiceBean = (JobLockService) jobContext.getJobDetail() .getJobDataMap().get("jobLockService"); if (jobLockServiceBean == null) throw new JobExecutionException("Missing setting for bean jobLockService"); String name = (String) jobContext.getJobDetail().getJobDataMap().get("name"); String jobName = name == null ? this.getClass().getSimpleName() : name; locker = new ScheduledJobLockExecuter(jobLockServiceBean, jobName, this); } locker.execute(jobContext); }
@Override protected void executeInternal(JobExecutionContext jobExecutionContext) throws JobExecutionException { JobKey key = jobExecutionContext.getJobDetail().getKey(); System.out.println("Cron Job started with key :" + key.getName() + ", Group :"+key.getGroup() + " , Thread Name :"+Thread.currentThread().getName() + " ,Time now :"+new Date()); System.out.println("======================================"); System.out.println("Accessing annotation example: "+jobService.getAllJobs()); List<Map<String, Object>> list = jobService.getAllJobs(); System.out.println("Job list :"+list); System.out.println("======================================"); //*********** For retrieving stored key-value pairs ***********/ JobDataMap dataMap = jobExecutionContext.getMergedJobDataMap(); String myValue = dataMap.getString("myKey"); System.out.println("Value:" + myValue); System.out.println("Thread: "+ Thread.currentThread().getName() +" stopped."); }
/** * Called when the job is executed by quartz. This method delegates to the <tt>validateSessions()</tt> method on the * associated session manager. * * @param context * the Quartz job execution context for this execution. */ public void execute(JobExecutionContext context) throws JobExecutionException { JobDataMap jobDataMap = context.getMergedJobDataMap(); ValidatingSessionManager sessionManager = (ValidatingSessionManager) jobDataMap.get(SESSION_MANAGER_KEY); if (log.isDebugEnabled()) { log.debug("Executing session validation Quartz job..."); } sessionManager.validateSessions(); if (log.isDebugEnabled()) { log.debug("Session validation Quartz job complete."); } }
@Override public void execute(JobExecutionContext context) throws JobExecutionException { HistorianEntry entry = getInitialHistorianEntry(context); try { job.execute(context); entry.setOutput(context.getResult()); entry.setRunStatus(ExecutionStatus.COMPLETED_SUCCESSFULLY); } catch (Throwable t) { entry.setException(t); entry.setRunStatus(ExecutionStatus.COMPLETED_WITH_EXCEPTION); throw t; } finally { entry.setEndTime(Instant.now(clock)); historianDAO.writeEntry(entry); } }
/** * @see org.springframework.scheduling.quartz.QuartzJobBean#executeInternal(org.quartz.JobExecutionContext) */ @Override protected void executeInternal(JobExecutionContext context) throws JobExecutionException { IMonitoringService monitoringService = getMonitoringService(context); //getting gate id set from scheduler Map properties = context.getJobDetail().getJobDataMap(); Long gateId = (Long) properties.get("gateId"); if (log.isDebugEnabled()) { log.debug("Closing gate......[" + gateId.longValue() + "]"); } monitoringService.closeGate(gateId); if (log.isDebugEnabled()) { log.debug("Gate......[" + gateId.longValue() + "] Closed"); } }
public void execute(JobExecutionContext context) throws JobExecutionException { long start = System.currentTimeMillis(); JobDataMap jobDataMap = context.getJobDetail().getJobDataMap(); String taskType = jobDataMap.getString("taskType"); String targetObject = jobDataMap.getString("targetObject"); String targetMethod = jobDataMap.getString("targetMethod"); logger.info("定时任务开始执行: [{}.{}]", targetObject, targetMethod); try { ApplicationContext applicationContext = (ApplicationContext) context.getScheduler().getContext() .get("applicationContext"); if (TaskType.local.equals(taskType)) { Object refer = applicationContext.getBean(targetObject); refer.getClass().getDeclaredMethod(targetMethod).invoke(refer); } else if (TaskType.dubbo.equals(taskType)) { String system = jobDataMap.getString("targetSystem"); BaseProvider provider = (BaseProvider) DubboUtil.refer(applicationContext, system); provider.execute(new Parameter(targetObject, targetMethod)); } double time = (System.currentTimeMillis() - start) / 1000.0; logger.info("定时任务[{}.{}]用时:{}s", targetObject, targetMethod, time); } catch (Exception e) { throw new JobExecutionException(e); } }
@Override public void executeJob(JobExecutionContext context) throws JobExecutionException { JobDataMap jobData = context.getJobDetail().getJobDataMap(); // Extract the Job executer to use Object executerObj = jobData.get("jobExecuter"); if (executerObj == null || !(executerObj instanceof DeleteExpiredContentScheduledJobExecuter)) { throw new AlfrescoRuntimeException( "ScheduledJob data must contain valid 'Executer' reference"); } final DeleteExpiredContentScheduledJobExecuter jobExecuter = (DeleteExpiredContentScheduledJobExecuter) executerObj; AuthenticationUtil.runAs(new AuthenticationUtil.RunAsWork<Object>() { public Object doWork() throws Exception { jobExecuter.execute(); return null; } }, AuthenticationUtil.getSystemUserName()); }
public void jobWasExecuted(final JobExecutionContext context, JobExecutionException exp) { JobDetail jobDetail = context.getJobDetail(); if(jobDetail instanceof JobDetailImpl){ JobDetailImpl job = (JobDetailImpl)jobDetail; SchedulerLog log = new SchedulerLog(); log.setJobName(job.getName()); log.setGroupName(job.getGroup()); log.setTriggerName(context.getTrigger().getKey().getName()); log.setFireTime(context.getFireTime()); log.setScheduledFireTime(context.getScheduledFireTime()); log.setRefireCount(context.getRefireCount()); log.setPreviousFireTime(context.getPreviousFireTime()); log.setNextFireTime(context.getNextFireTime()); log.setCreateTime(new Date()); logMapper.insert(log); } }
@Override public void execute(JobExecutionContext context) throws JobExecutionException { JobDataMap dataMap = context.getJobDetail().getJobDataMap(); TimerConfig config = (TimerConfig) dataMap.get("CONFIG"); getLogger().info(config.getDesc() + ":" + config.getService() + " start"); CustomJob customJob = config.getCustomJob(); try { if (null == customJob) { final String service = config.getService(); // Object retObj = ServiceActuator.execute(config.getRealService(), new XCO()); Object retObj = ServiceActuator.execute(service, new XCO()); XCO result = TangYuanUtil.retObjToXco(retObj); getLogger().info(config.getDesc() + ":" + service + " end. code[" + result.getCode() + "], message[" + result.getMessage() + "]"); } else { customJob.execute(config); } } catch (Throwable e) { getLogger().error("ERROR:" + config.getService(), e); } }
@Override public void execute(JobExecutionContext context) throws JobExecutionException { final String branch = context.getMergedJobDataMap().getString(KEY_BRANCH); final String path = context.getMergedJobDataMap().getString(KEY_NODE_PATH); final JobService jobService = (JobService) context.getMergedJobDataMap().get("jobService"); final NodeService nodeService = (NodeService) context.getMergedJobDataMap().get("nodeService"); final UserService userService = (UserService) context.getMergedJobDataMap().get("userService"); LOGGER.debug("branch %s with node path %s", branch, path); try { Node flow = nodeService.find(path).root(); User owner = userService.findByEmail(flow.getCreatedBy()); Map<String, String> envs = EnvUtil.build(GitEnvs.FLOW_GIT_BRANCH.name(), branch); jobService.createFromFlowYml(path, JobCategory.SCHEDULER, envs, owner); } catch (Throwable e) { throw new JobExecutionException(e.getMessage()); } }
/** * This implementation applies the passed-in job data map as bean property * values, and delegates to {@code executeInternal} afterwards. * @see #executeInternal */ @Override public final void execute(JobExecutionContext context) throws JobExecutionException { try { // Reflectively adapting to differences between Quartz 1.x and Quartz 2.0... Scheduler scheduler = (Scheduler) ReflectionUtils.invokeMethod(getSchedulerMethod, context); Map<?, ?> mergedJobDataMap = (Map<?, ?>) ReflectionUtils.invokeMethod(getMergedJobDataMapMethod, context); BeanWrapper bw = PropertyAccessorFactory.forBeanPropertyAccess(this); MutablePropertyValues pvs = new MutablePropertyValues(); pvs.addPropertyValues(scheduler.getContext()); pvs.addPropertyValues(mergedJobDataMap); bw.setPropertyValues(pvs, true); } catch (SchedulerException ex) { throw new JobExecutionException(ex); } executeInternal(context); }
@Override protected void executeInternal(JobExecutionContext context) throws JobExecutionException { log.debug("CreateSiteMapJob start."); boolean createSiteMapFlag = YiDuConstants.yiduConf.getBoolean(YiDuConfig.CREATE_SITEMAP, false); if (createSiteMapFlag) { String uri = YiDuConstants.yiduConf.getString(YiDuConfig.URI); String sitemapUri = uri + (StringUtils.endsWith(uri, "/") ? "" : "/") + SITEMAP_DIR + "/"; try { String currentPath = CreateSiteMapJob.class.getClassLoader().getResource("").getPath(); File f = new File(currentPath).getParentFile().getParentFile(); currentPath = f.getAbsolutePath(); log.debug(currentPath); String sitemapDir = currentPath + "/" + SITEMAP_DIR + "/"; log.debug("sitemap dir: " + sitemapDir); if (!new File(sitemapDir).exists()) { new File(sitemapDir).mkdirs(); } if (SiteMapType.XML.getName().equalsIgnoreCase( YiDuConstants.yiduConf.getString(YiDuConfig.SITEMAP_TYPE))) { createXmlSiteMap(sitemapDir, sitemapUri, false); uri = YiDuConstants.yiduConf.getString(YiDuConfig.MOBILE_URI); sitemapUri = uri + (StringUtils.endsWith(uri, "/") ? "" : "/") + SITEMAP_DIR + "/"; createXmlSiteMap(sitemapDir, sitemapUri, true); } else { String responseBody = Utils.getContentFromUri(uri + SiteMapAction.URL); if (StringUtils.isNotBlank(responseBody)) { File destFile = new File(sitemapDir + "/index.html"); FileUtils.writeFile(destFile, responseBody, false); } } log.debug("CreateSiteMapJob normally end."); } catch (Exception e) { log.error(e.getMessage(), e); } } }
@Override public void execute(final JobExecutionContext jobExecutionContext) throws JobExecutionException { SpringBeanAutowiringSupport.processInjectionBasedOnCurrentContext(this); try { logger.info("Beginning ticket cleanup..."); final Collection<Ticket> ticketsToRemove = Collections2.filter(this.getTickets(), new Predicate<Ticket>() { @Override public boolean apply(@Nullable final Ticket ticket) { if (ticket.isExpired()) { if (ticket instanceof TicketGrantingTicket) { logger.debug("Cleaning up expired ticket-granting ticket [{}]", ticket.getId()); logoutManager.performLogout((TicketGrantingTicket) ticket); deleteTicket(ticket.getId()); } else if (ticket instanceof ServiceTicket) { logger.debug("Cleaning up expired service ticket [{}]", ticket.getId()); deleteTicket(ticket.getId()); } else { logger.warn("Unknown ticket type [{} found to clean", ticket.getClass().getSimpleName()); } return true; } return false; } }); logger.info("{} expired tickets found and removed.", ticketsToRemove.size()); } catch (final Exception e) { logger.error(e.getMessage(), e); } }
@Override protected void executeInternal(JobExecutionContext context) throws JobExecutionException { logger.info("任务执行了..."); /* * JobExecutionContext 将会合并JobDetail与Trigger的JobDataMap,如果其中属性名相同,后者将覆盖前者。 * 可以使用JobExecutionContext.getMergedJobDataMap()方法来获取合并后的JobDataMap */ ScheduleJobEntity scheduleJob = (ScheduleJobEntity) context.getMergedJobDataMap().get("scheduleJob"); System.out.println("任务名称 = [" + scheduleJob.getJobName() + "]"); }
@Override public void execute(JobExecutionContext arg0) throws JobExecutionException { logger.info("每日5点定时任务开始"); long now = System.currentTimeMillis(); SystemParameters.update("dailyResetTimestamp", now); Collection<Player> onlines = PlayerManager.getInstance().getOnlinePlayers().values(); for (Player player:onlines) { int distributeKey = player.distributeKey(); //将事件封装成timer任务,丢回业务线程处理 TaskHandlerContext.INSTANCE.acceptTask(new DailyResetTask(distributeKey, player)); } }
public void execute(JobExecutionContext context) throws JobExecutionException { long start = System.currentTimeMillis(); JobDataMap jobDataMap = context.getJobDetail().getJobDataMap(); String taskType = jobDataMap.getString("taskType"); String targetObject = jobDataMap.getString("targetObject"); String targetMethod = jobDataMap.getString("targetMethod"); String key = targetMethod + "." + targetObject; try { logger.info("定时任务[{}.{}]开始", targetObject, targetMethod); if (CacheUtil.getCache().lock(key)) { try { ApplicationContext applicationContext = (ApplicationContext)context.getScheduler().getContext() .get("applicationContext"); if (TaskType.local.equals(taskType)) { Object refer = applicationContext.getBean(targetObject); refer.getClass().getDeclaredMethod(targetMethod).invoke(refer); } else if (TaskType.dubbo.equals(taskType)) { BaseProvider provider = (BaseProvider)applicationContext .getBean(jobDataMap.getString("targetSystem")); provider.execute(new Parameter(targetObject, targetMethod)); } Double time = (System.currentTimeMillis() - start) / 1000.0; logger.info("定时任务[{}.{}]用时:{}s", targetObject, targetMethod, time.toString()); } finally { unLock(key); } } } catch (Exception e) { throw new JobExecutionException(e); } }
public void jobWasExecuted(final JobExecutionContext context, JobExecutionException exp) { Timestamp end = new Timestamp(System.currentTimeMillis()); final JobDataMap jobDataMap = context.getJobDetail().getJobDataMap(); String targetObject = jobDataMap.getString("targetObject"); String targetMethod = jobDataMap.getString("targetMethod"); if (logger.isInfoEnabled()) { logger.info("定时任务执行结束:{}.{}", targetObject, targetMethod); } // 更新任务执行状态 final TaskFireLog log = (TaskFireLog) jobDataMap.get(JOB_LOG); if (log != null) { log.setEndTime(end); if (exp != null) { logger.error("定时任务失败: [" + targetObject + "." + targetMethod + "]", exp); String contactEmail = jobDataMap.getString("contactEmail"); if (StringUtils.isNotBlank(contactEmail)) { String topic = String.format("调度[%s.%s]发生异常", targetMethod, targetMethod); sendEmail(new Email(contactEmail, topic, exp.getMessage())); } log.setStatus(JOBSTATE.ERROR_STATS); log.setFireInfo(exp.getMessage()); } else { if (log.getStatus().equals(JOBSTATE.INIT_STATS)) { log.setStatus(JOBSTATE.SUCCESS_STATS); } } } executorService.submit(new Runnable() { public void run() { if (log != null) { try { schedulerService.updateLog(log); } catch (Exception e) { logger.error("Update TaskRunLog cause error. The log object is : " + JSON.toJSONString(log), e); } } } }); }
@Override protected void executeInternal(JobExecutionContext context) throws JobExecutionException { logger.debug("CleanStatisticsDataJob start."); try { articleService.cleanStatistics(); } catch (Exception e) { logger.error(e.getMessage(), e); } logger.debug("CleanStatisticsDataJob normally end."); }
@Override public void execute(JobExecutionContext context) throws JobExecutionException { try { log.info("开始自动扫描插件服务"); PluginExecute.run(); } catch (Exception e) { log.error("agent运行异常",e); } }
@Override public void execute(JobExecutionContext context) throws JobExecutionException { long timestamp = System.currentTimeMillis() / 1000; JobDataMap jobDataMap = context.getJobDetail().getJobDataMap(); String pluginName = jobDataMap.getString("pluginName"); try { SNMPV3Plugin plugin = (SNMPV3Plugin) jobDataMap.get("pluginObject"); List<SNMPV3UserInfo> jobUsers = (List<SNMPV3UserInfo>) jobDataMap.get("userInfoList"); MetricsCommon metricsValue = new SNMPV3MetricsValue(plugin,jobUsers,timestamp); //SNMP监控数据获取时间较长,采用异步方式 ExecuteThreadUtil.execute(new JobThread(metricsValue,"snmp v3 job thread")); } catch (Exception e) { log.error("插件 {} 运行异常",pluginName,e); } }
@Override protected void executeInternal(JobExecutionContext arg0) throws JobExecutionException { boolean checkBorrowInfo = true; try{ checkBorrowInfo = borrowService.checkBorrowInfo(); }catch (Throwable e) { // TODO: handle exception e.printStackTrace(); } if(!checkBorrowInfo){ System.err.println("定时检查借阅表逾期出现了错误!!!"); } }
@Override public void execute(JobExecutionContext context) throws JobExecutionException { long timestamp = System.currentTimeMillis() / 1000; JobDataMap jobDataMap = context.getJobDetail().getJobDataMap(); String pluginName = jobDataMap.getString("pluginName"); try { JDBCPlugin jdbcPlugin = (JDBCPlugin) jobDataMap.get("pluginObject"); MetricsCommon jdbcMetricsValue = new JDBCMetricsValue(jdbcPlugin,timestamp); ReportMetrics.push(jdbcMetricsValue.getReportObjects()); } catch (Exception e) { log.error("插件 {} 运行异常",pluginName,e); } }
@Override public void execute(JobExecutionContext jobExecutionContext) throws JobExecutionException { try { JobDataMap jobDataMap = jobExecutionContext.getMergedJobDataMap(); message = (Message)jobDataMap.get(JobConstants.JOB_PARAM_MESSAGE); MessageProcessor.processMessage(message); } catch( Exception e) { throw new JobExecutionException(e); } }
@Override public void execute(JobExecutionContext jobExecutionContext) throws JobExecutionException { String key = jobExecutionContext.getJobDetail().getKey().getName(); JobVo jobVo = (JobVo) jobExecutionContext.getJobDetail().getJobDataMap().get(key); try { ExecuteService executeService = (ExecuteService) jobExecutionContext.getJobDetail().getJobDataMap().get("jobBean"); boolean success = executeService.executeJob(jobVo); this.loggerInfo("[opencron] job:{} at {}:{},execute:{}", jobVo, success ? "successful" : "failed"); } catch (Exception e) { logger.error(e.getLocalizedMessage(), e); } }
public void execute(JobExecutionContext context) throws JobExecutionException { //System.out.println(context.getTrigger().getCalendarName() + "triggered.time is :" + (new Date())); try { SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd hh:mm:ss"); System.out.println(sdf.format(new Date()) + " -- start crawl and push"); Controller.execute(); PushUpdateMessageRealtime.PushUpdateRealTime(); System.out.println(sdf.format(new Date()) + " -- finish crawl and push"); } catch (Exception e) { // TODO Auto-generated catch block e.printStackTrace(); } }
private <T> T getRequiredQuartzJobParameter(JobExecutionContext context, String dataKey, Class<T> requiredClass) throws JobExecutionException { @SuppressWarnings("unchecked") final T result = (T) context.getJobDetail().getJobDataMap().get(dataKey); if (result == null) { if (log.isErrorEnabled()) { log.error("PULL: Did not retrieve required service for quartz job: " + dataKey); } throw new JobExecutionException("Missing job data: " + dataKey); } return result; }
public void execute(JobExecutionContext context) throws JobExecutionException { DetectionJobDetail jobDetail=(DetectionJobDetail)context.getJobDetail(); Session session=jobDetail.getSessionFactory().openSession(); try { String currentInstanceName=jobDetail.getCurrentInstanceName(); Operation operation=detection(session,jobDetail.getJobInstanceNames(),currentInstanceName); if(operation.equals(Operation.reset)){ SchedulerService service=jobDetail.getSchedulerService(); service.resetScheduer(); Heartbeat beat=new Heartbeat(); Calendar c=Calendar.getInstance(); c.setTime(new Date()); c.add(Calendar.SECOND, 1); beat.setDate(c.getTime()); beat.setId(UUID.randomUUID().toString()); beat.setInstanceName(currentInstanceName); session.save(beat); initHeartJob(currentInstanceName, service.getScheduler()); } } catch (Exception e) { throw new JobExecutionException(e); }finally{ session.flush(); session.close(); } }
@SuppressWarnings("unchecked") public void execute(JobExecutionContext ctx) throws JobExecutionException { ProjectLogger.log("Fetching All unpublished course status.", LoggerEnum.INFO.name()); List<String> courseListWithStatusAsDraft = getAllUnPublishedCourseStatusId(); if (null != courseListWithStatusAsDraft && !courseListWithStatusAsDraft.isEmpty()) { ProjectLogger.log("Fetching All course details from ekstep.", LoggerEnum.INFO.name()); List<String> ekStepResult = getAllPublishedCourseListFromEKStep(courseListWithStatusAsDraft); if (null != ekStepResult && !ekStepResult.isEmpty()) { ProjectLogger.log("update course status table.", LoggerEnum.INFO.name()); updateCourseStatusTable(ekStepResult); for (String courseId : ekStepResult) { try { Map<String, Object> map = new HashMap<>(); map.put(JsonKey.COURSE_ID, courseId); map.put(JsonKey.STATUS, ProjectUtil.ProgressStatus.NOT_STARTED.getValue()); ProjectLogger.log("Fetching participants list from Db", LoggerEnum.INFO.name()); Response response = cassandraOperation.getRecordsByProperty(courseBatchDBInfo.getKeySpace(), courseBatchDBInfo.getTableName(), JsonKey.COURSE_ID, courseId); List<Map<String, Object>> batchList = (List<Map<String, Object>>) response.get(JsonKey.RESPONSE); ProjectLogger.log("Add participants to user course table", LoggerEnum.INFO.name()); addUserToUserCourseTable(batchList); } catch (Exception ex) { ProjectLogger.log(ex.getMessage(), ex); } } } } }
public void execute(JobExecutionContext executionContext) throws JobExecutionException { final UserRegistrySynchronizer userRegistrySynchronizer = (UserRegistrySynchronizer) executionContext .getJobDetail().getJobDataMap().get("userRegistrySynchronizer"); final String synchronizeChangesOnly = (String) executionContext.getJobDetail().getJobDataMap().get("synchronizeChangesOnly"); AuthenticationUtil.runAs(new RunAsWork<Object>() { public Object doWork() throws Exception { userRegistrySynchronizer.synchronize(synchronizeChangesOnly == null || !Boolean.parseBoolean(synchronizeChangesOnly), true); return null; } }, AuthenticationUtil.getSystemUserName()); }
@Override public void execute(JobExecutionContext jobExecutionContext) throws JobExecutionException { logger.debug("Reporting metrics"); long timestamp = System.currentTimeMillis(); try { for (KairosMetricReporter reporter : m_reporters) { List<DataPointSet> dpList = reporter.getMetrics(timestamp); for (DataPointSet dataPointSet : dpList) { for (DataPoint dataPoint : dataPointSet.getDataPoints()) { m_datastore.putDataPoint(dataPointSet.getName(), dataPointSet.getTags(), dataPoint); } } } Runtime runtime = Runtime.getRuntime(); ImmutableSortedMap<String, String> tags = Tags.create() .put("host", m_hostname).build(); m_datastore.putDataPoint("kairosdb.jvm.free_memory", tags, m_dataPointFactory.createDataPoint(timestamp, runtime.freeMemory())); m_datastore.putDataPoint("kairosdb.jvm.total_memory", tags, m_dataPointFactory.createDataPoint(timestamp, runtime.totalMemory())); m_datastore.putDataPoint("kairosdb.jvm.max_memory", tags, m_dataPointFactory.createDataPoint(timestamp, runtime.maxMemory())); m_datastore.putDataPoint("kairosdb.jvm.thread_count", tags, m_dataPointFactory.createDataPoint(timestamp, getThreadCount())); } catch (Throwable e) { // prevent the thread from dying logger.error("Reporter service error", e); } }
/** * Calls the post lookup to do its work */ public void execute(JobExecutionContext context) throws JobExecutionException { JobDataMap jobData = context.getJobDetail().getJobDataMap(); // extract the post cleaner to use Object postLookupObj = jobData.get("postLookup"); if (postLookupObj == null || !(postLookupObj instanceof PostLookup)) { throw new AlfrescoRuntimeException( "FeedCleanupJob data must contain valid 'postLookup' reference"); } PostLookup postLookup = (PostLookup)postLookupObj; postLookup.execute(); }