/** * Called when the job is executed by quartz. This method delegates to the <tt>validateSessions()</tt> method on the * associated session manager. * * @param context * the Quartz job execution context for this execution. */ public void execute(JobExecutionContext context) throws JobExecutionException { JobDataMap jobDataMap = context.getMergedJobDataMap(); ValidatingSessionManager sessionManager = (ValidatingSessionManager) jobDataMap.get(SESSION_MANAGER_KEY); if (log.isDebugEnabled()) { log.debug("Executing session validation Quartz job..."); } sessionManager.validateSessions(); if (log.isDebugEnabled()) { log.debug("Session validation Quartz job complete."); } }
@Override public void execute(JobExecutionContext context) throws JobExecutionException { final int numberOfRetries; JobDataMap jobDataMap = context.getMergedJobDataMap(); numberOfRetries = jobDataMap.containsKey(NUMBER_OF_RETRIES_PARAM) ? jobDataMap.getIntValue(NUMBER_OF_RETRIES_PARAM) : DEFAULT_NUMBER_OF_RETRIES; //check if running job class has the "NonRetryable" annotation boolean isRetryable = jobDataMap.containsKey(Constants.JOB_DEFINITION) ? ((JobDefinition)jobDataMap.get(Constants.JOB_DEFINITION)).isRetryable() : true; try { job.execute(context); //reset retry param, just in case jobDataMap.putAsString(NUMBER_OF_RETRIES_PARAM, 0); } catch (Exception e) { //do not retry if job is not retryable or no more retries left if (!isRetryable || numberOfRetries <= 0) { throwAndFinish(jobDataMap, context, e); } triggerRefire(jobDataMap, numberOfRetries, context, e); } catch (Throwable t) { //do not retry on throwable throwAndFinish(jobDataMap, context, t); } }
public JobDetail addMethodInovkeJob(String schedulerName, String jobName, String jobGroup, String jobClass, Object[] constructorArguments, String jobClassMethodName, Object[] jobClassMethodArgs, String description) throws SchedulerException { Assert.notNull(jobClass, "jobClass can not be null"); Assert.notEmpty(schedulerName, "schedulerName can not be empty"); Assert.notEmpty(jobName, "jobName can not be empty"); Assert.notEmpty(jobGroup, "jobGroup can not be empty"); Assert.notEmpty(jobClassMethodName, "jobClassMethodName can not be empty"); JobDataMap jobDataMap = new JobDataMap(); jobDataMap.put("jobClass", jobClass); jobDataMap.put("constructorArguments", constructorArguments); jobDataMap.put("jobClassMethodName", jobClassMethodName); jobDataMap.put("jobClassMethodArgs", jobClassMethodArgs); JobDetail jobDetail = JobBuilder.newJob(MethodInvokeJob.class).withIdentity(jobName, jobGroup) .withDescription(description).setJobData(jobDataMap).storeDurably().build(); addJob(schedulerName, jobDetail); return jobDetail; }
public JobDetail addStatefulMethodJob(String schedulerName, String jobName, String jobGroup, String jobClass, Object[] constructorArguments, String jobClassMethodName, Object[] jobClassMethodArgs, String description) throws SchedulerException { Assert.notNull(jobClass, "jobClass can not be null"); Assert.notEmpty(schedulerName, "schedulerName can not be empty"); Assert.notEmpty(jobName, "jobName can not be empty"); Assert.notEmpty(jobGroup, "jobGroup can not be empty"); Assert.notEmpty(jobClassMethodName, "jobClassMethodName can not be empty"); JobDataMap jobDataMap = new JobDataMap(); jobDataMap.put("jobClass", jobClass); jobDataMap.put("constructorArguments", constructorArguments); jobDataMap.put("jobClassMethodName", jobClassMethodName); jobDataMap.put("jobClassMethodArgs", jobClassMethodArgs); JobDetail jobDetail = JobBuilder.newJob(StatefulMethodInvokeJob.class).withIdentity(jobName, jobGroup) .withDescription(description).setJobData(jobDataMap).storeDurably().build(); addJob(schedulerName, jobDetail); return jobDetail; }
@SuppressWarnings("unchecked") @Override public void execute(JobExecutionContext job) throws JobExecutionException { TriggerScheduleServiceCenterToProviderServiceCenterMessage msg = new TriggerScheduleServiceCenterToProviderServiceCenterMessage(); String jobName = job.getJobDetail().getKey().getName(); JobDataMap jobDataMap = job.getJobDetail().getJobDataMap(); ConcurrentHashMap<Integer, ServiceXServerSession> rpcServers = (ConcurrentHashMap<Integer, ServiceXServerSession>) jobDataMap .get(RPCSERVERS); ConcurrentHashMap<String, ConcurrentHashSet<Integer>> schedules = (ConcurrentHashMap<String, ConcurrentHashSet<Integer>>) jobDataMap .get(SCHEDULES); ConcurrentHashSet<Integer> providerList = schedules.get(jobName); if (providerList == null) { log.error("Job:" + jobName + "找不到Provider"); return; } msg.setJobName(jobName); // 查看是否是最有一次执行,并且移除此job if (!job.getTrigger().mayFireAgain()) { msg.setEnd(true); schedules.remove(jobName); log.info("任务生命终结,执行删除:" + jobName); } // 选举式触发 ArrayList<Integer> arrayList = new ArrayList<>(providerList); int providerId = arrayList.get(RandomUtil.randomInt(0, arrayList.size() - 1)); ServiceXServerSession serviceXServerSession = rpcServers.get(providerId); if (serviceXServerSession != null) { serviceXServerSession.getSession().writeAndFlush(msg); log.info(jobName + "触发!分配的ProviderId为:" + providerId + ",下次触发时间:" + TimeUtil.date2Str(job.getTrigger().getNextFireTime().getTime())); } }
/** * 增加一个调度任务(cron版) * * @param name * 任务名称 * @param job * 执行内容 * @param cronExpression * cron表达式 * @throws SchedulerException */ public Trigger addSchedule(String name, Class<? extends Job> task, String cronExpression, JobDataMap param) throws SchedulerException { Scheduler sched = SF.getScheduler(); JobBuilder builder = JobBuilder.newJob(task); builder.withIdentity(name, Scheduler.DEFAULT_GROUP); if (param != null) { builder.usingJobData(param); } Trigger trigger = TriggerBuilder.newTrigger().withIdentity(name, Scheduler.DEFAULT_GROUP) .withSchedule(CronScheduleBuilder.cronSchedule(cronExpression)).build(); sched.scheduleJob(builder.build(), trigger); if (!sched.isShutdown()) sched.start(); return trigger; }
/** * <p> * Trigger the identified <code>{@link org.quartz.Job}</code> (execute it * now) - with a volatile trigger. * </p> */ public void triggerJobWithVolatileTrigger(SchedulingContext ctxt, String jobName, String groupName, JobDataMap data) throws SchedulerException { validateState(); if(groupName == null) { groupName = Scheduler.DEFAULT_GROUP; } Trigger trig = new org.quartz.SimpleTrigger(newTriggerId(), Scheduler.DEFAULT_MANUAL_TRIGGERS, jobName, groupName, new Date(), null, 0, 0); trig.setVolatility(true); trig.computeFirstFireTime(null); if(data != null) { trig.setJobDataMap(data); } boolean collision = true; while (collision) { try { resources.getJobStore().storeTrigger(ctxt, trig, false); collision = false; } catch (ObjectAlreadyExistsException oaee) { trig.setName(newTriggerId()); } } notifySchedulerThread(trig.getNextFireTime().getTime()); notifySchedulerListenersSchduled(trig); }
private void processUserMessage(String messageContent, SlackUser messageSender, SlackSession session, SlackChannel channel) { // TODO change it to be processed asynchronously // Parse the message content String parsedMessage = parseMessage(messageContent); if (parsedMessage != null) { logger.info("=> Received message from amigo bot:" + parsedMessage); try { // Some unique job name String jobName = "SLACK-MESG-JOB-" + UUID.randomUUID().toString(); String groupName = JOB_GRP_SLACKBOT; JobDataMap params = new JobDataMap(); params.put(JOB_PARAM_MESSAGE, parsedMessage); params.put(JOB_PARAM_MSG_SENDER, messageSender); params.put(JOB_PARAM_SLACK_SESSION, session); params.put(JOB_PARAM_SLACK_CHANNEL, channel); params.put(JOB_PARAM_BOT_TOK, System.getenv("SLACK_BOT_TOKEN")); logger.info("Processing message async with params: " + params); JobManager.getInstance().scheduleJob(SlackMessageProcessorJob.class, jobName, groupName, params); } catch (Exception e) { logger.log(Level.SEVERE, "Error in processing message", e); } } }
private void processMessageAsync(String value) { if (value != null && !value.trim().isEmpty()) { try { // Some unique job name String jobName = "CP-MESG-JOB-" + UUID.randomUUID().toString(); String groupName = JOB_GRP_CP; JobDataMap params = new JobDataMap(); params.put(JOB_PARAM_MESSAGE, value); params.put(JOB_PARAM_DBCLIENT, dbClient); JobManager.getInstance().scheduleJob(MessageProcessorJob.class, jobName, groupName, params); } catch (Exception e) { e.printStackTrace(); } } }
/** * Calls the cleaner to do its work */ public void execute(JobExecutionContext context) throws JobExecutionException { JobDataMap jobData = context.getJobDetail().getJobDataMap(); // extract the content cleaner to use Object sharedFolderPatchObj = jobData.get("sharedFolderPatch"); if (sharedFolderPatchObj == null || !(sharedFolderPatchObj instanceof SharedFolderPatch)) { throw new AlfrescoRuntimeException( "'sharedFolderPatch' data must contain valid 'SharedFolderPatch' reference"); } // Job Lock Here - should probably move into the patch service at some time. SharedFolderPatch sharedFolderPatch = (SharedFolderPatch) sharedFolderPatchObj; sharedFolderPatch.executeAsync(); }
@Override public HistorianEntry mapRow(ResultSet resultSet, int i) throws SQLException { return new HistorianEntry( resultSet.getString("SCHED_NAME"), resultSet.getString("SCHED_INSTANCE_ID"), resultSet.getString("CONTEXT_KEY"), resultSet.getString("FIRE_KEY"), new NameAndGroup(resultSet.getString("TRIGGER_NAME"), resultSet.getString("TRIGGER_GROUP")), resultSet.getString("PREV_TRIGGERS_FIRE_KEYS") == null ? null : Arrays.asList(resultSet.getString("PREV_TRIGGERS_FIRE_KEYS").split(",")), resultSet.getTimestamp("START_TIME").toInstant(), resultSet.getTimestamp("END_TIME") == null ? null : resultSet.getTimestamp("END_TIME").toInstant(), readByteValue(resultSet.getBytes("INPUT"), JobDataMap.class), readByteValue(resultSet.getBytes("OUTPUT"), Object.class), ExecutionStatus.valueOf(resultSet.getString("RUN_STATUS")), readByteValue(resultSet.getBytes("EXCEPTION"), String.class) ); }
/** * <p> * Create a JobExcecutionContext with the given context data. * </p> */ public JobExecutionContextImpl(Scheduler scheduler, TriggerFiredBundle firedBundle, Job job) { this.scheduler = scheduler; this.trigger = firedBundle.getTrigger(); this.calendar = firedBundle.getCalendar(); this.jobDetail = firedBundle.getJobDetail(); this.job = job; this.recovering = firedBundle.isRecovering(); this.fireTime = firedBundle.getFireTime(); this.scheduledFireTime = firedBundle.getScheduledFireTime(); this.prevFireTime = firedBundle.getPrevFireTime(); this.nextFireTime = firedBundle.getNextFireTime(); this.jobDataMap = new JobDataMap(); this.jobDataMap.putAll(jobDetail.getJobDataMap()); this.jobDataMap.putAll(trigger.getJobDataMap()); }
private void throwAndFinish(JobDataMap jobDataMap, JobExecutionContext context, Throwable t) throws JobExecutionException { context.put(Constants.JOB_EXCEPTION, t); //reset retry param, just in case jobDataMap.putAsString(NUMBER_OF_RETRIES_PARAM, 0); JobExecutionException e = new JobExecutionException("This trigger has thrown a terminal exception. " + "Retries exceeded or job not retryable", t); throw e; }
public void execute(JobExecutionContext context) throws JobExecutionException { long start = System.currentTimeMillis(); JobDataMap jobDataMap = context.getJobDetail().getJobDataMap(); String taskType = jobDataMap.getString("taskType"); String targetObject = jobDataMap.getString("targetObject"); String targetMethod = jobDataMap.getString("targetMethod"); try { ApplicationContext applicationContext = (ApplicationContext) context.getScheduler().getContext() .get("applicationContext"); if (TaskType.local.equals(taskType)) { Object refer = applicationContext.getBean(targetObject); refer.getClass().getDeclaredMethod(targetMethod).invoke(refer); } else if (TaskType.dubbo.equals(taskType)) { String system = "org.ibase4j.provider.I" + jobDataMap.getString("targetSystem"); BaseProvider provider = (BaseProvider) DubboUtil.refer(applicationContext, system); provider.execute(new Parameter(targetObject, targetMethod)); } double time = (System.currentTimeMillis() - start) / 1000.0; logger.info("定时任务[{}.{}]用时:{}s", targetObject, targetMethod, time); } catch (Exception e) { throw new JobExecutionException(e); } }
@Override protected void executeInternal(JobExecutionContext jobExecutionContext) throws JobExecutionException { JobKey key = jobExecutionContext.getJobDetail().getKey(); System.out.println("Cron Job started with key :" + key.getName() + ", Group :"+key.getGroup() + " , Thread Name :"+Thread.currentThread().getName() + " ,Time now :"+new Date()); System.out.println("======================================"); System.out.println("Accessing annotation example: "+jobService.getAllJobs()); List<Map<String, Object>> list = jobService.getAllJobs(); System.out.println("Job list :"+list); System.out.println("======================================"); //*********** For retrieving stored key-value pairs ***********/ JobDataMap dataMap = jobExecutionContext.getMergedJobDataMap(); String myValue = dataMap.getString("myKey"); System.out.println("Value:" + myValue); System.out.println("Thread: "+ Thread.currentThread().getName() +" stopped."); }
/** * Create Quartz Job. * * @param jobClass Class whose executeInternal() method needs to be called. * @param isDurable Job needs to be persisted even after completion. if true, job will be persisted, not otherwise. * @param context Spring application context. * @param jobName Job name. * @param jobGroup Job group. * * @return JobDetail object */ protected static JobDetail createJob(Class<? extends QuartzJobBean> jobClass, boolean isDurable, ApplicationContext context, String jobName, String jobGroup){ JobDetailFactoryBean factoryBean = new JobDetailFactoryBean(); factoryBean.setJobClass(jobClass); factoryBean.setDurability(isDurable); factoryBean.setApplicationContext(context); factoryBean.setName(jobName); factoryBean.setGroup(jobGroup); // set job data map JobDataMap jobDataMap = new JobDataMap(); jobDataMap.put("myKey", "myValue"); factoryBean.setJobDataMap(jobDataMap); factoryBean.afterPropertiesSet(); return factoryBean.getObject(); }
public void execute(JobExecutionContext context) throws JobExecutionException { long start = System.currentTimeMillis(); JobDataMap jobDataMap = context.getJobDetail().getJobDataMap(); String taskType = jobDataMap.getString("taskType"); String targetObject = jobDataMap.getString("targetObject"); String targetMethod = jobDataMap.getString("targetMethod"); logger.info("定时任务开始执行: [{}.{}]", targetObject, targetMethod); try { ApplicationContext applicationContext = (ApplicationContext) context.getScheduler().getContext() .get("applicationContext"); if (TaskType.local.equals(taskType)) { Object object = applicationContext.getBean(targetObject); MethodAccess methodAccess = MethodAccess.get(object.getClass()); methodAccess.invoke(object, targetMethod); } else if (TaskType.dubbo.equals(taskType)) { // Object object = DubboUtil.refer(applicationContext, targetObject); // MethodAccess methodAccess = MethodAccess.get(object.getClass()); // methodAccess.invoke(object, targetMethod); } double time = (System.currentTimeMillis() - start) / 1000.0; logger.info("定时任务[{}.{}]用时:{}s", targetObject, targetMethod, time); } catch (Exception e) { throw new JobExecutionException(e); } }
public void jobToBeExecuted(final JobExecutionContext context) { final JobDataMap jobDataMap = context.getJobDetail().getJobDataMap(); String targetObject = jobDataMap.getString("targetObject"); String targetMethod = jobDataMap.getString("targetMethod"); if (logger.isInfoEnabled()) { logger.info("定时任务开始执行:{}.{}", targetObject, targetMethod); } // 保存日志 TaskFireLog log = new TaskFireLog(); log.setStartTime(context.getFireTime()); log.setGroupName(targetObject); log.setTaskName(targetMethod); log.setStatus(JOBSTATE.INIT_STATS); log.setServerHost(NativeUtil.getHostName()); log.setServerDuid(NativeUtil.getDUID()); schedulerService.updateLog(log); jobDataMap.put(JOB_LOG, log); }
@Override public void execute(JobExecutionContext context) throws JobExecutionException { JobDataMap dataMap = context.getMergedJobDataMap(); String keys = dataMap.getString("keys"); if (keys == null) { throw new JobExecutionException("Data map key 'keys' is missing"); } Set<Long> checked = new LinkedHashSet<>(); for (String key : keys.split(";")) { Long id = gameQueryService.getSteamId64(key).exceptionally(t -> { log.warn("Could not get profile ID from key {}", key, t); return null; }).join(); if (id != null) { report(id, getPlayerProfile(id)); checked.add(id); } } // clean up removed ids metricRegistry.removeMatching((name, metric) -> name.startsWith("steam.profile") && !checked.contains(extractId(name))); }
public void updateJob(String group, String name, JobDescriptor descriptor) { try { JobDetail oldJobDetail = scheduler.getJobDetail(jobKey(name, group)); if(Objects.nonNull(oldJobDetail)) { JobDataMap jobDataMap = oldJobDetail.getJobDataMap(); jobDataMap.put("subject", descriptor.getSubject()); jobDataMap.put("messageBody", descriptor.getMessageBody()); jobDataMap.put("to", descriptor.getTo()); jobDataMap.put("cc", descriptor.getCc()); jobDataMap.put("bcc", descriptor.getBcc()); JobBuilder jb = oldJobDetail.getJobBuilder(); JobDetail newJobDetail = jb.usingJobData(jobDataMap).storeDurably().build(); scheduler.addJob(newJobDetail, true); log.info("Updated job with key - {}", newJobDetail.getKey()); return; } log.warn("Could not find job with key - {}.{} to update", group, name); } catch (SchedulerException e) { log.error("Could not find job with key - {}.{} to update due to error - {}", group, name, e.getLocalizedMessage()); } }
@Override public void execute(JobExecutionContext jobExecutionContext) throws JobExecutionException { JobDetail jobDetail = jobExecutionContext.getJobDetail(); _logger.info("Cron triggered for {0}", jobDetail.getDescription()); JobDataMap dataMap = jobDetail.getJobDataMap(); try { //Creating entry in redis to avoid duplicate job scheduling MemcacheService cache = AppFactory.get().getMemcacheService(); String jobKey = "SJOB_" + DigestUtils.md5Hex(dataMap.getString("url")); if (cache.get(jobKey) == null) { HttpUtil .connectMulti(HttpUtil.GET, ConfigUtil.get("task.url") + dataMap.getString("url"), null, null, null, null); cache.put(jobKey, true, 1800); } else { _logger.warn("Job with url {0} is already scheduled. Doing nothing!!", dataMap.getString("url")); } } catch (Exception e) { _logger.warn("Scheduled job failed for url {0} with exception {1}", dataMap.getString("url"), e.getMessage(), e); } }
/** * <p> * Trigger the identified <code>{@link org.quartz.Job}</code> (execute it * now) - with a non-volatile trigger. * </p> */ @SuppressWarnings("deprecation") public void triggerJob(JobKey jobKey, JobDataMap data) throws SchedulerException { validateState(); OperableTrigger trig = (OperableTrigger) newTrigger().withIdentity(newTriggerId(), Scheduler.DEFAULT_GROUP).forJob(jobKey).build(); trig.computeFirstFireTime(null); if(data != null) { trig.setJobDataMap(data); } boolean collision = true; while (collision) { try { resources.getJobStore().storeTrigger(trig, false); collision = false; } catch (ObjectAlreadyExistsException oaee) { trig.setKey(new TriggerKey(newTriggerId(), Scheduler.DEFAULT_GROUP)); } } notifySchedulerThread(trig.getNextFireTime().getTime()); notifySchedulerListenersSchduled(trig); }
@Override public Object clone() { AbstractTrigger<?> copy; try { copy = (AbstractTrigger<?>) super.clone(); // Shallow copy the jobDataMap. Note that this means that if a user // modifies a value object in this map from the cloned Trigger // they will also be modifying this Trigger. if (jobDataMap != null) { copy.jobDataMap = (JobDataMap)jobDataMap.clone(); } } catch (CloneNotSupportedException ex) { throw new IncompatibleClassChangeError("Not Cloneable."); } return copy; }
public void execute(JobExecutionContext context) throws JobExecutionException { long start = System.currentTimeMillis(); JobDataMap jobDataMap = context.getJobDetail().getJobDataMap(); String targetObject = jobDataMap.getString("targetObject"); String targetMethod = jobDataMap.getString("targetMethod"); logger.info("定时任务开始执行: [{}.{}]", targetObject, targetMethod); try { ApplicationContext applicationContext = (ApplicationContext) context.getScheduler().getContext().get("applicationContext"); Object refer = applicationContext.getBean(targetObject); refer.getClass().getDeclaredMethod(targetMethod).invoke(refer); double time = (System.currentTimeMillis() - start) / 1000.0; logger.info("定时任务[{}.{}]用时:{}s", targetObject, targetMethod, time); } catch (Exception e) { throw new JobExecutionException(e); } }
public void execute(JobExecutionContext context) throws JobExecutionException { long start = System.currentTimeMillis(); JobDataMap jobDataMap = context.getJobDetail().getJobDataMap(); String taskType = jobDataMap.getString("taskType"); String targetObject = jobDataMap.getString("targetObject"); String targetMethod = jobDataMap.getString("targetMethod"); String key = targetMethod + "." + targetObject; try { logger.info("定时任务[{}.{}]开始", targetObject, targetMethod); if (CacheUtil.getCache().lock(key)) { try { ApplicationContext applicationContext = (ApplicationContext)context.getScheduler().getContext() .get("applicationContext"); if (TaskType.local.equals(taskType)) { Object refer = applicationContext.getBean(targetObject); refer.getClass().getDeclaredMethod(targetMethod).invoke(refer); } else if (TaskType.dubbo.equals(taskType)) { BaseProvider provider = (BaseProvider)applicationContext .getBean(jobDataMap.getString("targetSystem")); provider.execute(new Parameter(targetObject, targetMethod)); } Double time = (System.currentTimeMillis() - start) / 1000.0; logger.info("定时任务[{}.{}]用时:{}s", targetObject, targetMethod, time.toString()); } finally { unLock(key); } } } catch (Exception e) { throw new JobExecutionException(e); } }
protected String getRequiredParm(JobDataMap data, String property, String constantName) { String value = getOptionalParm(data, property); if (value == null) { throw new IllegalArgumentException(constantName + " not specified."); } return value; }
public void jobWasExecuted(final JobExecutionContext context, JobExecutionException exp) { Timestamp end = new Timestamp(System.currentTimeMillis()); final JobDataMap jobDataMap = context.getJobDetail().getJobDataMap(); String targetObject = jobDataMap.getString("targetObject"); String targetMethod = jobDataMap.getString("targetMethod"); if (logger.isInfoEnabled()) { logger.info("定时任务执行结束:{}.{}", targetObject, targetMethod); } // 更新任务执行状态 final TaskFireLog log = (TaskFireLog) jobDataMap.get(JOB_LOG); if (log != null) { log.setEndTime(end); if (exp != null) { logger.error("定时任务失败: [" + targetObject + "." + targetMethod + "]", exp); String contactEmail = jobDataMap.getString("contactEmail"); if (StringUtils.isNotBlank(contactEmail)) { String topic = String.format("调度[%s.%s]发生异常", targetMethod, targetMethod); sendEmail(new Email(contactEmail, topic, exp.getMessage())); } log.setStatus(JOBSTATE.ERROR_STATS); log.setFireInfo(exp.getMessage()); } else { if (log.getStatus().equals(JOBSTATE.INIT_STATS)) { log.setStatus(JOBSTATE.SUCCESS_STATS); } } } executorService.submit(new Runnable() { public void run() { if (log != null) { try { schedulerService.updateLog(log); } catch (Exception e) { logger.error("Update TaskRunLog cause error. The log object is : " + JSON.toJSONString(log), e); } } } }); }
public JobDetail addMethodInovkeJob(String schedulerName, String jobName, String jobGroup, String description, MethodInvoker methodInvoker) throws SchedulerException { Assert.notNull(methodInvoker, "methodInvoker can not be null"); Assert.notEmpty(schedulerName, "schedulerName can not be empty"); Assert.notEmpty(jobName, "jobName can not be empty"); Assert.notEmpty(jobGroup, "jobGroup can not be empty"); JobDataMap jobDataMap = new JobDataMap(); jobDataMap.put("methodInvoker", methodInvoker); JobDetail jobDetail = JobBuilder.newJob(MethodInvokeJob.class).withIdentity(jobName, jobGroup) .withDescription(description).setJobData(jobDataMap).storeDurably().build(); addJob(schedulerName, jobDetail); return jobDetail; }
public JobDetail updateMethodInovkeJob(String schedulerName, String jobName, String jobGroup, String description, MethodInvoker methodInvoker) throws SchedulerException { Assert.notNull(methodInvoker, "methodInvoker can not be null"); Assert.notEmpty(schedulerName, "schedulerName can not be empty"); Assert.notEmpty(jobName, "jobName can not be empty"); Assert.notEmpty(jobGroup, "jobGroup can not be empty"); JobDataMap jobDataMap = new JobDataMap(); jobDataMap.put("methodInvoker", methodInvoker); JobDetail jobDetail = JobBuilder.newJob(MethodInvokeJob.class).withIdentity(jobName, jobGroup) .withDescription(description).setJobData(jobDataMap).storeDurably().build(); updateJob(schedulerName, jobDetail); return jobDetail; }
@Override public Trigger convert(After trigger) { OnDemandScheduleBuilder onDemandScheduleBuilder = OnDemandScheduleBuilder.onDemandSchedule(); JobDataMap dataMap = new JobDataMap(trigger.getTriggerData()); dataMap.put(Constants.CRONYX_TYPE, After.class); return newTrigger() .withIdentity(trigger.getTriggerKey().getName(), trigger.getTriggerKey().getGroup()) .forJob(trigger.getJobKey().getName(), trigger.getJobKey().getGroup()) .withDescription(trigger.getDescription()) .withSchedule(onDemandScheduleBuilder) .usingJobData(dataMap) .build(); }
public JobDetail addStatefulMethodJob(String schedulerName, String jobName, String jobGroup, String description, MethodInvoker methodInvoker) throws SchedulerException { Assert.notNull(methodInvoker, "methodInvoker can not be null"); JobDataMap jobDataMap = new JobDataMap(); jobDataMap.put("methodInvoker", methodInvoker); JobDetail jobDetail = JobBuilder.newJob(StatefulMethodInvokeJob.class).withIdentity(jobName, jobGroup) .withDescription(description).setJobData(jobDataMap).storeDurably().build(); addJob(schedulerName, jobDetail); return jobDetail; }
public static JobDataMap newJobDataMap(TabularData tabularData) { JobDataMap jobDataMap = new JobDataMap(); if(tabularData != null) { for (final Iterator<?> pos = tabularData.values().iterator(); pos.hasNext();) { CompositeData cData = (CompositeData) pos.next(); jobDataMap.put((String) cData.get("key"), (String) cData.get("value")); } } return jobDataMap; }
/** * 添加继承自 org.quartz.Job的类的方法 * @param schedulerName * @param jobName * @param jobGroup * @param jobClass * @param jobDataMap * @param description * @return */ private JSONResult addQuartzJob(String schedulerName, String jobName, String jobGroup, String jobClass, Map<String, Object> jobDataMap, String description) { try { Assert.notEmpty(schedulerName, "schedulerName can not be empty"); Assert.notEmpty(jobName, "jobName can not be empty"); Assert.notEmpty(jobGroup, "jobGroup can not be empty"); if (!QuartzWebManager.checkClass(jobClass)) { throw new IllegalArgumentException("jobClass no class found [" + jobClass + "] exception"); } Class beanClass = QuartzWebManager.getClass(jobClass); if (org.quartz.Job.class.isAssignableFrom(beanClass)) { // 开始添加job JobDataMap jobMap = new JobDataMap(); if (jobDataMap != null) { for (Map.Entry<String, Object> entry : jobDataMap.entrySet()) { if (!StringUtils.isEmpty(entry.getKey())) { jobMap.put(entry.getKey(), entry.getValue()); } } } JobDetail jobDetail = JobBuilder.newJob(beanClass).withIdentity(jobName, jobGroup) .withDescription(description).setJobData(jobMap).storeDurably().build(); if (!QuartzWebManager.checkJobExist(schedulerName, jobName, jobGroup)) { QuartzWebManager.addJob(schedulerName, jobDetail); } else { QuartzWebManager.updateJob(schedulerName, jobDetail); } return JSONResult.build(JSONResult.RESULT_CODE_SUCCESS, "ok"); } else { throw new UnsupportedClassException(jobClass + " must extends org.quartz.Job "); } } catch (Exception e) { e.printStackTrace(); return JSONResult.build(JSONResult.RESULT_CODE_ERROR, e.getMessage()); } }
/** * 增加一个调度任务 * * @param name * 任务名称 * @param job * 执行内容 * @param intervalInHours * 间隔小时 * @param intervalInMinutes * 间隔分钟 * @param intervalInSeconds * 间隔秒 * @param intervalInMillis * 间隔毫秒 * @param repeatCount * 重复次数 * @throws SchedulerException */ public Trigger addSchedule(String name, Class<? extends Job> task, int intervalInHours, int intervalInMinutes, int intervalInSeconds, int intervalInMillis, int repeatCount, JobDataMap param) throws SchedulerException { Scheduler sched = SF.getScheduler(); JobBuilder builder = JobBuilder.newJob(task); builder.withIdentity(name, Scheduler.DEFAULT_GROUP); if (param != null) { builder.usingJobData(param); } SimpleScheduleBuilder simpleSchedule = SimpleScheduleBuilder.simpleSchedule(); if (intervalInHours > 0) { simpleSchedule.withIntervalInHours(intervalInHours); } if (intervalInMinutes > 0) { simpleSchedule.withIntervalInMinutes(intervalInMinutes); } if (intervalInSeconds > 0) { simpleSchedule.withIntervalInSeconds(intervalInSeconds); } if (intervalInMillis > 0) { simpleSchedule.withIntervalInMilliseconds(intervalInMillis); } if (repeatCount >= 0) { simpleSchedule.withRepeatCount(repeatCount); } else { simpleSchedule.repeatForever(); } Trigger trigger = TriggerBuilder.newTrigger().withIdentity(name, Scheduler.DEFAULT_GROUP) .withSchedule(simpleSchedule).build(); sched.scheduleJob(builder.build(), trigger); if (!sched.isShutdown()) sched.start(); return trigger; }
/** * @param jobDataMap * @return TabularData */ public static TabularData toTabularData(JobDataMap jobDataMap) { TabularData tData = new TabularDataSupport(TABULAR_TYPE); ArrayList<CompositeData> list = new ArrayList<CompositeData>(); Iterator<String> iter = jobDataMap.keySet().iterator(); while (iter.hasNext()) { String key = iter.next(); list.add(toCompositeData(key, String.valueOf(jobDataMap.get(key)))); } tData.putAll(list.toArray(new CompositeData[list.size()])); return tData; }
static JobDetailImpl createJobDetail(RollupTask task, KairosDatastore dataStore, String hostName) { JobDetailImpl jobDetail = new JobDetailImpl(); jobDetail.setJobClass(RollUpJob.class); jobDetail.setKey(getJobKey(task)); JobDataMap map = new JobDataMap(); map.put("task", task); map.put("datastore", dataStore); map.put("hostName", hostName); jobDetail.setJobDataMap(map); return jobDetail; }
@Override public void execute(JobExecutionContext jobExecutionContext) throws JobExecutionException { try { JobDataMap jobDataMap = jobExecutionContext.getMergedJobDataMap(); message = (Message)jobDataMap.get(JobConstants.JOB_PARAM_MESSAGE); MessageProcessor.processMessage(message); } catch( Exception e) { throw new JobExecutionException(e); } }
/** * Calls the cleaner to do its work */ public void execute(JobExecutionContext context) throws JobExecutionException { JobDataMap jobData = context.getJobDetail().getJobDataMap(); // extract the content cleaner to use Object abstractAuthenticationServiceRef = jobData.get("abstractAuthenticationService"); if (abstractAuthenticationServiceRef == null || !(abstractAuthenticationServiceRef instanceof AbstractAuthenticationService)) { throw new AlfrescoRuntimeException( "ContentStoreCleanupJob data must contain valid 'contentStoreCleaner' reference"); } AbstractAuthenticationService abstractAuthenticationService = (AbstractAuthenticationService) abstractAuthenticationServiceRef; abstractAuthenticationService.invalidateTickets(true); }
/** * <p> * Calls the equivalent method on the 'proxied' <code>QuartzScheduler</code>, * passing the <code>SchedulingContext</code> associated with this * instance. * </p> */ public void triggerJobWithVolatileTrigger(String jobName, String groupName, JobDataMap data) throws SchedulerException { invoke( "triggerJobWithVolatileTrigger", new Object[] { schedulingContext, jobName, groupName, data}, new String[] { SchedulingContext.class.getName(), String.class.getName(), String.class.getName(), JobDataMap.class.getName() }); }
@Override public void execute(JobExecutionContext context) throws JobExecutionException { JobDataMap jobData = context.getJobDetail().getJobDataMap(); // extract the object to use Object asyncPatchObj = jobData.get(JOB_NAME); if (asyncPatchObj == null || !(asyncPatchObj instanceof AsynchronousPatch)) { throw new AlfrescoRuntimeException(JOB_NAME + " data must contain valid 'AsynchronousPatch' reference"); } // Job Lock AsynchronousPatch patch = (AsynchronousPatch) asyncPatchObj; patch.executeAsynchronously(); }