@Override public List<ScheduleJobEntity> getRunningJobList() { List<ScheduleJobEntity> jobList = null; Scheduler scheduler = schedulerFactoryBean.getScheduler(); try { List<JobExecutionContext> executingJobList = scheduler.getCurrentlyExecutingJobs(); jobList = new ArrayList<>(executingJobList.size()); for (JobExecutionContext executingJob : executingJobList) { ScheduleJobEntity scheduleJob = new ScheduleJobEntity(); JobDetail jobDetail = executingJob.getJobDetail(); JobKey jobKey = jobDetail.getKey(); Trigger trigger = executingJob.getTrigger(); this.wrapScheduleJob(scheduleJob, scheduler, jobKey, trigger); jobList.add(scheduleJob); } } catch (Exception e) { logger.error("获取计划任务列表失败", e); throw new ServiceException("获取计划任务列表失败", e); } return jobList; }
@Override public void execute(JobExecutionContext context) throws JobExecutionException { System.out.println("AsuraJob。。。" + new Date()); boolean flag = true; if (flag) { return; } else { run(flag); } }
@Override public void execute(final JobExecutionContext context) throws JobExecutionException { ShardingContexts shardingContexts = jobFacade.getShardingContexts(); int jobEventSamplingCount = shardingContexts.getJobEventSamplingCount(); int currentJobEventSamplingCount = shardingContexts.getCurrentJobEventSamplingCount(); if (jobEventSamplingCount > 0 && ++currentJobEventSamplingCount < jobEventSamplingCount) { shardingContexts.setCurrentJobEventSamplingCount(currentJobEventSamplingCount); jobFacade.getShardingContexts().setAllowSendJobEvent(false); JobExecutorFactory.getJobExecutor(elasticJob, jobFacade).execute(); } else { jobFacade.getShardingContexts().setAllowSendJobEvent(true); executorDriver.sendStatusUpdate(Protos.TaskStatus.newBuilder().setTaskId(taskId).setState(Protos.TaskState.TASK_RUNNING).setMessage("BEGIN").build()); JobExecutorFactory.getJobExecutor(elasticJob, jobFacade).execute(); executorDriver.sendStatusUpdate(Protos.TaskStatus.newBuilder().setTaskId(taskId).setState(Protos.TaskState.TASK_RUNNING).setMessage("COMPLETE").build()); shardingContexts.setCurrentJobEventSamplingCount(0); } }
private boolean notifyJobListenersComplete(JobExecutionContext jec, JobExecutionException jobExEx) { try { qs.notifyJobListenersWasExecuted(jec, jobExEx); } catch (SchedulerException se) { qs.notifySchedulerListenersError( "Unable to notify JobListener(s) of Job that was executed: " + "(error will be ignored). trigger= " + jec.getTrigger().getFullName() + " job= " + jec.getJobDetail().getFullName(), se); return false; } return true; }
/** * Called when the job is executed by quartz. This method delegates to the <tt>validateSessions()</tt> method on the * associated session manager. * * @param context * the Quartz job execution context for this execution. */ public void execute(JobExecutionContext context) throws JobExecutionException { JobDataMap jobDataMap = context.getMergedJobDataMap(); ValidatingSessionManager sessionManager = (ValidatingSessionManager) jobDataMap.get(SESSION_MANAGER_KEY); if (log.isDebugEnabled()) { log.debug("Executing session validation Quartz job..."); } sessionManager.validateSessions(); if (log.isDebugEnabled()) { log.debug("Session validation Quartz job complete."); } }
/** * @see org.springframework.scheduling.quartz.QuartzJobBean#executeInternal(org.quartz.JobExecutionContext) */ @Override protected void executeInternal(JobExecutionContext context) throws JobExecutionException { IMonitoringService monitoringService = getMonitoringService(context); //getting gate id set from scheduler Map properties = context.getJobDetail().getJobDataMap(); Long gateId = (Long) properties.get("gateId"); if (log.isDebugEnabled()) { log.debug("Closing gate......[" + gateId.longValue() + "]"); } monitoringService.closeGate(gateId); if (log.isDebugEnabled()) { log.debug("Gate......[" + gateId.longValue() + "] Closed"); } }
private void triggerRefire(JobDataMap jobDataMap, int numberOfRetries, JobExecutionContext context, Throwable t) throws JobExecutionException { final long sleepTimeBetweenRetries = jobDataMap.containsKey(SLEEP_TIME_BETWEEN_RETRIES_PARAM) ? jobDataMap.getLongValue(SLEEP_TIME_BETWEEN_RETRIES_PARAM) : DEFAULT_SLEEP_TIME_BETWEEN_RETRIES; try { Thread.sleep(sleepTimeBetweenRetries); } catch (InterruptedException e) {} context.put(Constants.JOB_EXCEPTION, t); jobDataMap.putAsString(NUMBER_OF_RETRIES_PARAM, --numberOfRetries); //set refire flag as true throw new JobExecutionException(t, true); }
@SuppressWarnings("unchecked") @Override public void execute(JobExecutionContext job) throws JobExecutionException { TriggerScheduleServiceCenterToProviderServiceCenterMessage msg = new TriggerScheduleServiceCenterToProviderServiceCenterMessage(); String jobName = job.getJobDetail().getKey().getName(); JobDataMap jobDataMap = job.getJobDetail().getJobDataMap(); ConcurrentHashMap<Integer, ServiceXServerSession> rpcServers = (ConcurrentHashMap<Integer, ServiceXServerSession>) jobDataMap .get(RPCSERVERS); ConcurrentHashMap<String, ConcurrentHashSet<Integer>> schedules = (ConcurrentHashMap<String, ConcurrentHashSet<Integer>>) jobDataMap .get(SCHEDULES); ConcurrentHashSet<Integer> providerList = schedules.get(jobName); if (providerList == null) { log.error("Job:" + jobName + "找不到Provider"); return; } msg.setJobName(jobName); // 查看是否是最有一次执行,并且移除此job if (!job.getTrigger().mayFireAgain()) { msg.setEnd(true); schedules.remove(jobName); log.info("任务生命终结,执行删除:" + jobName); } // 选举式触发 ArrayList<Integer> arrayList = new ArrayList<>(providerList); int providerId = arrayList.get(RandomUtil.randomInt(0, arrayList.size() - 1)); ServiceXServerSession serviceXServerSession = rpcServers.get(providerId); if (serviceXServerSession != null) { serviceXServerSession.getSession().writeAndFlush(msg); log.info(jobName + "触发!分配的ProviderId为:" + providerId + ",下次触发时间:" + TimeUtil.date2Str(job.getTrigger().getNextFireTime().getTime())); } }
public void triggerFired(Trigger trigger, JobExecutionContext context) { if (!getLog().isInfoEnabled()) { return; } Object[] args = { trigger.getName(), trigger.getGroup(), trigger.getPreviousFireTime(), trigger.getNextFireTime(), new java.util.Date(), context.getJobDetail().getName(), context.getJobDetail().getGroup(), new Integer(context.getRefireCount()) }; getLog().info(MessageFormat.format(getTriggerFiredMessage(), args)); }
@Override public void execute(JobExecutionContext context) throws JobExecutionException { JobDOExample jobDOExample = new JobDOExample(); jobDOExample.setLimit(querySupportPageSize()); JobDOExample.Criteria criteria = jobDOExample.createCriteria(); /* 查出非DONE 的job*/ criteria.andOwnSignEqualTo(CoreModule.getInstance().getOwnSign()).andStatusNotEqualTo( JobConstant.JOB_STATUS_DONE); try { handle(jobDOExample); } catch (Exception e) { logger.error("清理流程和节点逻辑出现异常!e=" + e.getMessage()); } }
public void execute(JobExecutionContext context) throws JobExecutionException { long start = System.currentTimeMillis(); JobDataMap jobDataMap = context.getJobDetail().getJobDataMap(); String taskType = jobDataMap.getString("taskType"); String targetObject = jobDataMap.getString("targetObject"); String targetMethod = jobDataMap.getString("targetMethod"); logger.info("定时任务开始执行: [{}.{}]", targetObject, targetMethod); try { ApplicationContext applicationContext = (ApplicationContext) context.getScheduler().getContext() .get("applicationContext"); if (TaskType.local.equals(taskType)) { Object object = applicationContext.getBean(targetObject); MethodAccess methodAccess = MethodAccess.get(object.getClass()); methodAccess.invoke(object, targetMethod); } else if (TaskType.dubbo.equals(taskType)) { // Object object = DubboUtil.refer(applicationContext, targetObject); // MethodAccess methodAccess = MethodAccess.get(object.getClass()); // methodAccess.invoke(object, targetMethod); } double time = (System.currentTimeMillis() - start) / 1000.0; logger.info("定时任务[{}.{}]用时:{}s", targetObject, targetMethod, time); } catch (Exception e) { throw new JobExecutionException(e); } }
public void notifyJobListenersWasExecuted(JobExecutionContext jec, JobExecutionException je) throws SchedulerException { // build a list of all job listeners that are to be notified... List jobListeners = buildJobListenerList(jec.getJobDetail() .getJobListenerNames()); // notify all job listeners java.util.Iterator itr = jobListeners.iterator(); while (itr.hasNext()) { JobListener jl = (JobListener) itr.next(); try { jl.jobWasExecuted(jec, je); } catch (Exception e) { SchedulerException se = new SchedulerException( "JobListener '" + jl.getName() + "' threw exception: " + e.getMessage(), e); se.setErrorCode(SchedulerException.ERR_JOB_LISTENER); throw se; } } }
/** * Calls the cleaner to do its work */ public void execute(JobExecutionContext context) throws JobExecutionException { JobDataMap jobData = context.getJobDetail().getJobDataMap(); // extract the content cleaner to use Object sharedFolderPatchObj = jobData.get("sharedFolderPatch"); if (sharedFolderPatchObj == null || !(sharedFolderPatchObj instanceof SharedFolderPatch)) { throw new AlfrescoRuntimeException( "'sharedFolderPatch' data must contain valid 'SharedFolderPatch' reference"); } // Job Lock Here - should probably move into the patch service at some time. SharedFolderPatch sharedFolderPatch = (SharedFolderPatch) sharedFolderPatchObj; sharedFolderPatch.executeAsync(); }
@Override public void execute(JobExecutionContext context) throws JobExecutionException { try { EventSchedulerJobParameters parameters = (EventSchedulerJobParameters) context.getJobDetail().getJobDataMap().get(KEY_PARAMETERS); // Add the event (or its clone if necessary) to the queue immediately. parameters.getScheduler().scheduleNow(parameters.isSingle() ? parameters.getEvent() : parameters.getEvent().clone()); } catch (Throwable e) { // Throw only JobExecutionException if (e instanceof JobExecutionException) { throw e; } else { throw new JobExecutionException(e); } } }
@Override public void execute(JobExecutionContext context) throws JobExecutionException { ApiFactoryService apiFactoryService = (ApiFactoryService) context.get(ApiFactoryService.class.getName()); try { EntityManager em = HibernateUtil.getTransactionalEntityManager(); List<DistributedAppliance> das = HibernateUtil.getTransactionControl().required(() -> { OSCEntityManager<DistributedAppliance> emgr = new OSCEntityManager<DistributedAppliance>( DistributedAppliance.class, em, StaticRegistry.transactionalBroadcastUtil()); return emgr.listAll(); }); for (DistributedAppliance da : das) { for (VirtualSystem vs : da.getVirtualSystems()) { ApplianceManagerConnector apmc = vs.getDistributedAppliance().getApplianceManagerConnector(); ManagerDeviceMemberApi agentApi = apiFactoryService.createManagerDeviceMemberApi(apmc, vs); if (apiFactoryService.providesDeviceStatus(vs)) { List<ManagerDeviceMemberStatusElement> agentElems = agentApi.getFullStatus( vs.getDistributedApplianceInstances().stream() .map(DistributedApplianceInstanceElementImpl::new) .collect(Collectors.toList())); for (DistributedApplianceInstance dai : vs.getDistributedApplianceInstances()) { getAgentFullStatus(dai, agentElems); } } } } } catch (Exception ex) { log.error("Fail to sync DAs", ex); } }
public void execute(JobExecutionContext context) throws JobExecutionException { long start = System.currentTimeMillis(); JobDataMap jobDataMap = context.getJobDetail().getJobDataMap(); String taskType = jobDataMap.getString("taskType"); String targetObject = jobDataMap.getString("targetObject"); String targetMethod = jobDataMap.getString("targetMethod"); try { ApplicationContext applicationContext = (ApplicationContext) context.getScheduler().getContext() .get("applicationContext"); if (TaskType.local.equals(taskType)) { Object refer = applicationContext.getBean(targetObject); refer.getClass().getDeclaredMethod(targetMethod).invoke(refer); } else if (TaskType.dubbo.equals(taskType)) { String system = "org.ibase4j.provider.I" + jobDataMap.getString("targetSystem"); BaseProvider provider = (BaseProvider) DubboUtil.refer(applicationContext, system); provider.execute(new Parameter(targetObject, targetMethod)); } double time = (System.currentTimeMillis() - start) / 1000.0; logger.info("定时任务[{}.{}]用时:{}s", targetObject, targetMethod, time); } catch (Exception e) { throw new JobExecutionException(e); } }
public void execute(JobExecutionContext context) throws JobExecutionException { DetectionJobDetail jobDetail=(DetectionJobDetail)context.getJobDetail(); Session session=jobDetail.getSessionFactory().openSession(); try { String currentInstanceName=jobDetail.getCurrentInstanceName(); Operation operation=detection(session,jobDetail.getJobInstanceNames(),currentInstanceName); if(operation.equals(Operation.reset)){ ISchedulerService service=jobDetail.getSchedulerService(); System.out.println("Current instance scheduler starting..."); service.resetScheduer(); System.out.println("Start successful..."); } } catch (Exception e) { throw new JobExecutionException(e); }finally{ session.flush(); session.close(); } }
public void execute(JobExecutionContext context) throws JobExecutionException { JobDataMap jobData = context.getJobDetail().getJobDataMap(); // extract the content Cleanup to use Object nodeCleanupWorkerObj = jobData.get("nodeCleanupWorker"); if (nodeCleanupWorkerObj == null || !(nodeCleanupWorkerObj instanceof NodeCleanupWorker)) { throw new AlfrescoRuntimeException( "NodeCleanupJob data must contain valid 'nodeCleanupWorker' reference"); } NodeCleanupWorker nodeCleanupWorker = (NodeCleanupWorker) nodeCleanupWorkerObj; List<String> cleanupLog = nodeCleanupWorker.doClean(); // Done if (logger.isDebugEnabled()) { logger.debug("Node cleanup log:"); for (String log : cleanupLog) { logger.debug(log); } } }
@Override protected void executeInternal(JobExecutionContext context) throws JobExecutionException { IMonitoringService monitoringService = getMonitoringService(context); //getting gate id set from scheduler Map properties = context.getJobDetail().getJobDataMap(); long lessonId = ((Long) properties.get(MonitoringConstants.KEY_LESSON_ID)).longValue(); Integer userId = (Integer) properties.get(MonitoringConstants.KEY_USER_ID); if (log.isDebugEnabled()) { log.debug("Lesson [" + lessonId + "] is starting..."); } monitoringService.startLesson(lessonId, userId); if (log.isDebugEnabled()) { log.debug("Lesson [" + lessonId + "] started"); } }
/** * @throws JobExecutionException * @see org.springframework.scheduling.quartz.QuartzJobBean#executeInternal(org.quartz.JobExecutionContext) */ @Override protected void executeInternal(JobExecutionContext context) throws JobExecutionException { IMonitoringService monitoringService = getMonitoringService(context); //getting gate id set from scheduler Map properties = context.getJobDetail().getJobDataMap(); Long gateId = (Long) properties.get("gateId"); if (log.isDebugEnabled()) { log.debug("Openning gate......[" + gateId.longValue() + "]"); } monitoringService.openGate(gateId); if (log.isDebugEnabled()) { log.debug("Gate......[" + gateId.longValue() + "] opened"); } }
@Override protected void executeInternal(JobExecutionContext jobExecutionContext) throws JobExecutionException { JobKey key = jobExecutionContext.getJobDetail().getKey(); System.out.println("Cron Job started with key :" + key.getName() + ", Group :"+key.getGroup() + " , Thread Name :"+Thread.currentThread().getName() + " ,Time now :"+new Date()); System.out.println("======================================"); System.out.println("Accessing annotation example: "+jobService.getAllJobs()); List<Map<String, Object>> list = jobService.getAllJobs(); System.out.println("Job list :"+list); System.out.println("======================================"); //*********** For retrieving stored key-value pairs ***********/ JobDataMap dataMap = jobExecutionContext.getMergedJobDataMap(); String myValue = dataMap.getString("myKey"); System.out.println("Value:" + myValue); System.out.println("Thread: "+ Thread.currentThread().getName() +" stopped."); }
public void triggerComplete(Trigger trigger, JobExecutionContext context, Trigger.CompletedExecutionInstruction triggerInstructionCode) { if (context.getJobRunTime() > 1000) { log.info("[{}] Trigger '{}' completed job in {}{}", context.getJobDetail().getKey(), trigger.getKey().getName(), humanize(Duration.ofMillis(context.getJobRunTime()), false, true), (context.getResult() != null ? " with result: " + context.getResult() : "")); } }
/** * @param context {@link JobExecutionContext} * @param jobException {@link JobExecutionException} */ @Trace(metricName = "JobListener{save}", async = true, dispatcher = true) private void save(JobExecutionContext context, JobExecutionException jobException) throws IOException { JobResult jobResult = (JobResult) context.getResult(); ExecutionStatusEntity.ExecutionStatusEntityBuilder builder = ExecutionStatusEntity.builder(); builder.created(new Date()); builder.nextFireTime(context.getNextFireTime()); builder.fireTime(context.getFireTime()); builder.jobRunTime(context.getJobRunTime()); builder.prevFireTime(context.getPreviousFireTime()); builder.scheduledFireTime(context.getScheduledFireTime()); if (jobResult != null && jobResult.getTmpFile() != null) { builder.output(FileUtils.readFileToString(jobResult.getTmpFile().toFile())); builder.exitCode(jobResult.getExitValue()); if (jobResult.getExitValue() != 0) builder.errors(true); if (jobResult.getTmpFile().toFile().delete()) logger.debug(String.format("%s file deleted", jobResult.getTmpFile().toAbsolutePath())); } else { builder.output("No output"); builder.exitCode(-1); builder.errors(true); } if (jobException != null) { builder.errors(true); builder.errorMessage(jobException.getMessage()); } builder.job(getJob()); executionStatusRepository.save(builder.build()); }
@Override public void execute(JobExecutionContext context) throws JobExecutionException { HistorianEntry entry = getInitialHistorianEntry(context); try { job.execute(context); entry.setOutput(context.getResult()); entry.setRunStatus(ExecutionStatus.COMPLETED_SUCCESSFULLY); } catch (Throwable t) { entry.setException(t); entry.setRunStatus(ExecutionStatus.COMPLETED_WITH_EXCEPTION); throw t; } finally { entry.setEndTime(Instant.now(clock)); historianDAO.writeEntry(entry); } }
public void jobWasExecuted(final JobExecutionContext context, JobExecutionException exp) { JobDetail jobDetail = context.getJobDetail(); if(jobDetail instanceof JobDetailImpl){ JobDetailImpl job = (JobDetailImpl)jobDetail; SchedulerLog log = new SchedulerLog(); log.setJobName(job.getName()); log.setGroupName(job.getGroup()); log.setTriggerName(context.getTrigger().getKey().getName()); log.setFireTime(context.getFireTime()); log.setScheduledFireTime(context.getScheduledFireTime()); log.setRefireCount(context.getRefireCount()); log.setPreviousFireTime(context.getPreviousFireTime()); log.setNextFireTime(context.getNextFireTime()); log.setCreateTime(new Date()); logMapper.insert(log); } }
public void jobToBeExecuted(final JobExecutionContext context) { final JobDataMap jobDataMap = context.getJobDetail().getJobDataMap(); String targetObject = jobDataMap.getString("targetObject"); String targetMethod = jobDataMap.getString("targetMethod"); if (logger.isInfoEnabled()) { logger.info("定时任务开始执行:{}.{}", targetObject, targetMethod); } // 保存日志 TaskFireLog log = new TaskFireLog(); log.setStartTime(context.getFireTime()); log.setGroupName(targetObject); log.setTaskName(targetMethod); log.setStatus(JOBSTATE.INIT_STATS); log.setServerHost(NativeUtil.getHostName()); log.setServerDuid(NativeUtil.getDUID()); schedulerService.updateLog(log); jobDataMap.put(JOB_LOG, log); }
public void execute(JobExecutionContext context) throws JobExecutionException { JobDataMap map=context.getJobDetail().getJobDataMap(); if(map.containsKey("overdueDays")){ int overdueDays=map.getInt("overdueDays"); Date createDate=(Date)map.get("createDate"); if(defaultCalculateOverdueDays(overdueDays,createDate)){ if(map.getBoolean("overdueMethodSendMessage")){ executeRemind(map); }else{ String overdueCustomBeanProcessor=map.getString("overdueCustomBeanProcessor"); ITaskOverdueProcessor processor=ContextHolder.getBean(overdueCustomBeanProcessor); processor.process(map.getString("taskId")); String reminderJobId=map.getString("reminderJobId"); CancelReminderJobBean cancelReminderJobBean=ContextHolder.getBean(CancelReminderJobBean.BEAN_ID); cancelReminderJobBean.cancelReminderJob(reminderJobId); } } }else{ executeRemind(map); } }
@Override public void executeJob(JobExecutionContext context) throws JobExecutionException { JobDataMap jobData = context.getJobDetail().getJobDataMap(); // Extract the Job executer to use Object executerObj = jobData.get("jobExecuter"); if (executerObj == null || !(executerObj instanceof DeleteExpiredContentScheduledJobExecuter)) { throw new AlfrescoRuntimeException( "ScheduledJob data must contain valid 'Executer' reference"); } final DeleteExpiredContentScheduledJobExecuter jobExecuter = (DeleteExpiredContentScheduledJobExecuter) executerObj; AuthenticationUtil.runAs(new AuthenticationUtil.RunAsWork<Object>() { public Object doWork() throws Exception { jobExecuter.execute(); return null; } }, AuthenticationUtil.getSystemUserName()); }
private void runCommand(JobExecutionContext context) throws IOException, InterruptedException { ProcessBuilder pb = new ProcessBuilder(); setEnvironments(pb); Path output = setOutputs(pb); pb.command(getCommand(getJob().getCommand())); try { Process process = pb.start(); int exitCode = process.waitFor(); context.setResult(JobResult.builder().exitValue(exitCode).tmpFile(output).build()); } catch (Exception e) { output.toFile().delete(); throw e; } }
@Override protected void executeInternal (JobExecutionContext arg0) throws JobExecutionException { if (!configurationManager.getDumpDatabaseCronConfiguration ().isActive ()) return; long start = System.currentTimeMillis (); LOGGER.info("SCHEDULER : Dumps of database."); if (!DHuS.isStarted ()) { LOGGER.warn("SCHEDULER : Not run while system not fully initialized."); return; } systemService.dumpDatabase (); LOGGER.info("SCHEDULER : Dumps of database done - " + (System.currentTimeMillis ()-start) + "ms"); }
protected void cleanupAuditData(final String auditApplicationName, final JobExecutionContext context) { final AuditService auditService = JobUtilities.getJobDataValue(context, "auditService", AuditService.class); final String cutOffPeriodStr = JobUtilities.getJobDataValue(context, "cutOffPeriod", String.class); final String timezoneStr = JobUtilities.getJobDataValue(context, "timezone", String.class, false); final Period cutOffPeriod = Period.parse(cutOffPeriodStr); final ZoneId zone = ZoneId.of(timezoneStr != null ? timezoneStr : "Z"); final ZonedDateTime now = LocalDateTime.now(ZoneId.of("Z")).atZone(zone); final ZonedDateTime cutOffDate = now.minus(cutOffPeriod); final long epochSecond = cutOffDate.toEpochSecond(); LOGGER.debug("Clearing all audit entries of application {} until {}", auditApplicationName, cutOffDate); auditService.clearAudit(auditApplicationName, null, Long.valueOf(epochSecond)); }
/** * Interrupt the identified InterruptableJob executing in this Scheduler instance. * * <p> * This method is not cluster aware. That is, it will only interrupt * instances of the identified InterruptableJob currently executing in this * Scheduler instance, not across the entire cluster. * </p> * * @see org.quartz.core.RemotableQuartzScheduler#interrupt(JobKey) */ public boolean interrupt(String fireInstanceId) throws UnableToInterruptJobException { List<JobExecutionContext> jobs = getCurrentlyExecutingJobs(); Job job = null; for(JobExecutionContext jec : jobs) { if (jec.getFireInstanceId().equals(fireInstanceId)) { job = jec.getJobInstance(); if (job instanceof InterruptableJob) { ((InterruptableJob)job).interrupt(); return true; } else { throw new UnableToInterruptJobException( "Job " + jec.getJobDetail().getKey() + " can not be interrupted, since it does not implement " + InterruptableJob.class.getName()); } } } return false; }
@Override public void execute(final JobExecutionContext context) throws JobExecutionException { if (LocalContext.isVerboseEnabled()) { log.info(this.getClass().getSimpleName() + " fired ... next fire time: " + context.getNextFireTime()); } RequestMonitor.resetCounter(); }
@Override public void execute(JobExecutionContext context) throws JobExecutionException { JobDOExample jobDOExample = new JobDOExample(); jobDOExample.setLimit(querySupportPageSize()); jobDOExample.createCriteria().andStatusEqualTo(JobConstant.JOB_STATUS_DONE).andOwnSignEqualTo( CoreModule.getInstance().getOwnSign()); try { handle(jobDOExample); } catch (Exception e) { logger.error("已经完成的job清理逻辑出现异常!e=" + e.getMessage()); } }
@Override public void execute(JobExecutionContext context) throws JobExecutionException { log.info("Job triggered to send emails"); JobDataMap map = context.getMergedJobDataMap(); sendEmail(map); log.info("Job completed"); }
private HistorianEntry getInitialHistorianEntry(JobExecutionContext context) { String contextKey = context.get(CONTEXT_KEY).toString(); TriggerKey triggerKey = context.getTrigger().getKey(); NameAndGroup current = new NameAndGroup(triggerKey.getName(), triggerKey.getGroup()); List<NameAndGroup> previouses = afterDAO.getPreviousTriggersByKey(current); List<String> prevTriggersFireKeys = historianDAO.readEntriesByContext(contextKey).stream().filter(e -> previouses.contains(e.getTriggerKey())) .map(HistorianEntry::getFireKey).collect(Collectors.toList()); return new HistorianEntry(schedulerName, schedulerInstanceId, contextKey, context.getFireInstanceId(), current, prevTriggersFireKeys, Instant.now(clock), null, context.getTrigger().getJobDataMap(), null, ExecutionStatus.FIRED, null); }
public void jobExecutionVetoed(JobExecutionContext context) { if(!shouldDispatch(context)) { return; } Iterator itr = listeners.iterator(); while(itr.hasNext()) { JobListener jl = (JobListener) itr.next(); jl.jobExecutionVetoed(context); } }
@Override public void execute(JobExecutionContext context) throws JobExecutionException { long timestamp = System.currentTimeMillis() / 1000; JobDataMap jobDataMap = context.getJobDetail().getJobDataMap(); String pluginName = jobDataMap.getString("pluginName"); try { SNMPV3Plugin plugin = (SNMPV3Plugin) jobDataMap.get("pluginObject"); List<SNMPV3UserInfo> jobUsers = (List<SNMPV3UserInfo>) jobDataMap.get("userInfoList"); MetricsCommon metricsValue = new SNMPV3MetricsValue(plugin,jobUsers,timestamp); //SNMP监控数据获取时间较长,采用异步方式 ExecuteThreadUtil.execute(new JobThread(metricsValue,"snmp v3 job thread")); } catch (Exception e) { log.error("插件 {} 运行异常",pluginName,e); } }
@Override public void execute(JobExecutionContext jobExecutionContext) throws JobExecutionException { try { JobDataMap jobDataMap = jobExecutionContext.getMergedJobDataMap(); message = (Message)jobDataMap.get(JobConstants.JOB_PARAM_MESSAGE); MessageProcessor.processMessage(message); } catch( Exception e) { throw new JobExecutionException(e); } }
@Override public void execute(JobExecutionContext jobExecutionContext) throws JobExecutionException { String key = jobExecutionContext.getJobDetail().getKey().getName(); JobVo jobVo = (JobVo) jobExecutionContext.getJobDetail().getJobDataMap().get(key); try { ExecuteService executeService = (ExecuteService) jobExecutionContext.getJobDetail().getJobDataMap().get("jobBean"); boolean success = executeService.executeJob(jobVo); this.loggerInfo("[opencron] job:{} at {}:{},execute:{}", jobVo, success ? "successful" : "failed"); } catch (Exception e) { logger.error(e.getLocalizedMessage(), e); } }