/** * Persist the trigger in the Quartz scheduler. */ private VmSchedule persistTrigger(final VmSchedule schedule) throws SchedulerException { // The trigger for the common VM Job will the following convention : // schedule.id-subscription.id final String id = VmJob.format(schedule); final JobDetailImpl object = (JobDetailImpl) vmJobDetailFactoryBean.getObject(); object.getJobDataMap().put("vmServicePlugin", this); final Trigger trigger = TriggerBuilder.newTrigger().withIdentity(id, SCHEDULE_TRIGGER_GROUP) .withSchedule(CronScheduleBuilder.cronSchedule(schedule.getCron()).inTimeZone(DateUtils.getApplicationTimeZone())) .forJob(object).usingJobData("subscription", schedule.getSubscription().getId()) .usingJobData("operation", schedule.getOperation().name()).usingJobData("schedule", schedule.getId()).build(); // Add this trigger vmSchedulerFactoryBean.getObject().scheduleJob(trigger); return schedule; }
/** * Produce the <code>JobDetail</code> instance defined by this * <code>JobBuilder</code>. * * @return the defined JobDetail. */ public JobDetail build() { JobDetailImpl job = new JobDetailImpl(); job.setJobClass(jobClass); job.setDescription(description); if(key == null) key = new JobKey(Key.createUniqueName(null), null); job.setKey(key); job.setDurability(durability); job.setRequestsRecovery(shouldRecover); if(!jobDataMap.isEmpty()) job.setJobDataMap(jobDataMap); return job; }
/** * @param cData * @return JobDetail */ public static JobDetail newJobDetail(CompositeData cData) throws ClassNotFoundException { JobDetailImpl jobDetail = new JobDetailImpl(); int i = 0; jobDetail.setName((String) cData.get(ITEM_NAMES[i++])); jobDetail.setGroup((String) cData.get(ITEM_NAMES[i++])); jobDetail.setDescription((String) cData.get(ITEM_NAMES[i++])); Class<?> jobClass = Class.forName((String) cData.get(ITEM_NAMES[i++])); @SuppressWarnings("unchecked") Class<? extends Job> jobClassTyped = (Class<? extends Job>)jobClass; jobDetail.setJobClass(jobClassTyped); jobDetail.setJobDataMap(JobDataMapSupport.newJobDataMap((TabularData) cData.get(ITEM_NAMES[i++]))); jobDetail.setDurability((Boolean) cData.get(ITEM_NAMES[i++])); jobDetail.setRequestsRecovery((Boolean) cData.get(ITEM_NAMES[i++])); return jobDetail; }
public void jobWasExecuted(final JobExecutionContext context, JobExecutionException exp) { JobDetail jobDetail = context.getJobDetail(); if(jobDetail instanceof JobDetailImpl){ JobDetailImpl job = (JobDetailImpl)jobDetail; SchedulerLog log = new SchedulerLog(); log.setJobName(job.getName()); log.setGroupName(job.getGroup()); log.setTriggerName(context.getTrigger().getKey().getName()); log.setFireTime(context.getFireTime()); log.setScheduledFireTime(context.getScheduledFireTime()); log.setRefireCount(context.getRefireCount()); log.setPreviousFireTime(context.getPreviousFireTime()); log.setNextFireTime(context.getNextFireTime()); log.setCreateTime(new Date()); logMapper.insert(log); } }
public Collection<RunJob> getRunJobs() { List<RunJob> result=new ArrayList<RunJob>(); Session session=this.getSessionFactory().openSession(); try{ Collection<ReminderJob> runningJobs=this.loadReminderJobs(session); for(ReminderJob job:runningJobs){ JobDetailImpl jobDetail=new JobDetailImpl(); JobDefinition jobDef=buildJobDefinition(job,session); jobDetail.setJobClass(TaskReminderJob.class); RunJob runJob=new RunJob(); runJob.setJobDefinition(jobDef); runJob.setJobDetail(jobDetail); result.add(runJob); } }finally{ session.flush(); session.close(); } return result; }
@Override @SuppressWarnings("unchecked") public void afterPropertiesSet() throws ClassNotFoundException, NoSuchMethodException { prepare(); // Use specific name if given, else fall back to bean name. String name = (this.name != null ? this.name : this.beanName); // Consider the concurrent flag to choose between stateful and stateless job. Class<?> jobClass = (this.concurrent ? MethodInvokingJob.class : StatefulMethodInvokingJob.class); // Build JobDetail instance. JobDetailImpl jdi = new JobDetailImpl(); jdi.setName(name); jdi.setGroup(this.group); jdi.setJobClass((Class) jobClass); jdi.setDurability(true); jdi.getJobDataMap().put("methodInvoker", this); this.jobDetail = jdi; postProcessJobDetail(this.jobDetail); }
private QuartzJobDetail makeJobDetail(CascadingClassLoadHelper cascadingClassLoadHelper, ResultSet rs) throws SQLException, ClassNotFoundException, IOException { JobDetailImpl jobDetail = new JobDetailImpl(); String groupName = rs.getString(Constants.COL_JOB_GROUP); String jobName = rs.getString(Constants.COL_JOB_NAME); jobDetail.setName(jobName); jobDetail.setGroup(groupName); jobDetail.setDescription(rs.getString(Constants.COL_DESCRIPTION)); jobDetail.setJobClass( cascadingClassLoadHelper.loadClass(rs.getString(Constants.COL_JOB_CLASS), Job.class) ); jobDetail.setDurability(rs.getBoolean(Constants.COL_IS_DURABLE)); jobDetail.setRequestsRecovery(rs.getBoolean(Constants.COL_REQUESTS_RECOVERY)); Map<?, ?> map = (Map<?, ?>) getObjectFromBlob(rs, COL_JOB_DATAMAP); if (map != null) { jobDetail.setJobDataMap(new JobDataMap(map)); } JobId jobId = new QuartzJobId(groupName, jobName, engine.getClusterName()); QuartzJobDetail quartzJobDetail = new QuartzJobDetail(engine, jobId, jobDetail, rs.getTimestamp(WinderJDBCDelegate.COL_JOB_CREATED)); return quartzJobDetail; }
/** * Retrieve a job from redis * @param jobKey the job key detailing the identity of the job to be retrieved * @param jedis a thread-safe Redis connection * @return the {@link org.quartz.JobDetail} of the desired job * @throws JobPersistenceException if the desired job does not exist * @throws ClassNotFoundException */ public JobDetail retrieveJob(JobKey jobKey, T jedis) throws JobPersistenceException, ClassNotFoundException{ final String jobHashKey = redisSchema.jobHashKey(jobKey); final String jobDataMapHashKey = redisSchema.jobDataMapHashKey(jobKey); final Map<String, String> jobDetailMap = jedis.hgetAll(jobHashKey); if(jobDetailMap == null || jobDetailMap.size() == 0){ // desired job does not exist return null; } JobDetailImpl jobDetail = mapper.convertValue(jobDetailMap, JobDetailImpl.class); jobDetail.setKey(jobKey); final Map<String, String> jobData = jedis.hgetAll(jobDataMapHashKey); if(jobData != null && !jobData.isEmpty()){ JobDataMap jobDataMap = new JobDataMap(); jobDataMap.putAll(jobData); jobDetail.setJobDataMap(jobDataMap); } return jobDetail; }
@Test public void serializeJobDetail() throws Exception { JobDetail testJob = JobBuilder.newJob(TestJob.class) .withIdentity("testJob", "testGroup") .usingJobData("timeout", 42) .withDescription("I am describing a job!") .build(); String json = mapper.writeValueAsString(testJob); Map<String, Object> jsonMap = mapper.readValue(json, new TypeReference<HashMap<String, String>>() { }); assertThat(jsonMap, hasKey("name")); assertEquals(testJob.getKey().getName(), jsonMap.get("name")); assertThat(jsonMap, hasKey("group")); assertEquals(testJob.getKey().getGroup(), jsonMap.get("group")); assertThat(jsonMap, hasKey("jobClass")); assertEquals(testJob.getJobClass().getName(), jsonMap.get("jobClass")); JobDetailImpl jobDetail = mapper.readValue(json, JobDetailImpl.class); assertEquals(testJob.getKey().getName(), jobDetail.getKey().getName()); assertEquals(testJob.getKey().getGroup(), jobDetail.getKey().getGroup()); assertEquals(testJob.getJobClass(), jobDetail.getJobClass()); }
protected void scheduleMessage(PersistedMessageBO message) throws SchedulerException { LOG.debug("Scheduling execution of a delayed asynchronous message."); Scheduler scheduler = KSBServiceLocator.getScheduler(); JobDataMap jobData = new JobDataMap(); jobData.put(MessageServiceExecutorJob.MESSAGE_KEY, message); JobDetailImpl jobDetail = new JobDetailImpl("Delayed_Asynchronous_Call-" + Math.random(), "Delayed_Asynchronous_Call", MessageServiceExecutorJob.class); jobDetail.setJobDataMap(jobData); scheduler.getListenerManager().addJobListener( new MessageServiceExecutorJobListener()); SimpleTriggerImpl trigger = new SimpleTriggerImpl("Delayed_Asynchronous_Call_Trigger-" + Math.random(), "Delayed_Asynchronous_Call", message.getQueueDate()); trigger.setJobDataMap(jobData);// 1.6 bug required or derby will choke scheduler.scheduleJob(jobDetail, trigger); }
public void scheduleExecution(Throwable throwable, PersistedMessageBO message, String description) throws Exception { KSBServiceLocator.getMessageQueueService().delete(message); PersistedMessageBO messageCopy = message.copy(); Scheduler scheduler = KSBServiceLocator.getScheduler(); JobDataMap jobData = new JobDataMap(); jobData.put(MessageServiceExecutorJob.MESSAGE_KEY, messageCopy); JobDetailImpl jobDetail = new JobDetailImpl("Exception_Message_Job " + Math.random(), "Exception Messaging", MessageServiceExecutorJob.class); jobDetail.setJobDataMap(jobData); if (!StringUtils.isBlank(description)) { jobDetail.setDescription(description); } scheduler.getListenerManager().addJobListener( new MessageServiceExecutorJobListener()); SimpleTriggerImpl trigger = new SimpleTriggerImpl("Exception_Message_Trigger " + Math.random(), "Exception Messaging", messageCopy .getQueueDate()); trigger.setJobDataMap(jobData);// 1.6 bug required or derby will choke scheduler.scheduleJob(jobDetail, trigger); }
@Test public void testSchedulingJob() throws Exception { Scheduler scheduler = KSBServiceLocator.getScheduler(); JobDataMap datMap = new JobDataMap(); datMap.put("yo", "yo"); JobDetailImpl jobDetail = new JobDetailImpl("myJob", null, TestJob.class); jobDetail.setJobDataMap(datMap); TriggerBuilder triggerBuilder = TriggerBuilder.newTrigger(); triggerBuilder.startAt(new Date()); triggerBuilder.withIdentity("i'm a trigger puller"); triggerBuilder.usingJobData(datMap); triggerBuilder.withSchedule(SimpleScheduleBuilder.simpleSchedule().withRepeatCount(1).withIntervalInMilliseconds(1L)); Trigger trigger = triggerBuilder.build(); scheduler.scheduleJob(jobDetail, trigger); synchronized (TestJob.LOCK) { TestJob.LOCK.wait(30 * 1000); } assertTrue("job never fired", TestJob.EXECUTED); }
@BeforeClass public void setUp() throws SchedulerException, InterruptedException { Config config = new Config(); config.setProperty("hazelcast.logging.type", "slf4j"); hazelcastInstance = Hazelcast.newHazelcastInstance(config); this.fSignaler = new SampleSignaler(); ClassLoadHelper loadHelper = new CascadingClassLoadHelper(); loadHelper.initialize(); this.jobStore = createJobStore("AbstractJobStoreTest"); this.jobStore.initialize(loadHelper, this.fSignaler); this.jobStore.schedulerStarted(); this.fJobDetail = new JobDetailImpl("job1", "jobGroup1", NoOpJob.class); this.fJobDetail.setDurability(true); this.jobStore.storeJob(this.fJobDetail, false); }
/** * <p> * Select the job to which the trigger is associated. Allow option to load actual job class or not. When case of * remove, we do not need to load the class, which in many cases, it's no longer exists. * * </p> * * @param conn * the DB Connection * @return the <code>{@link org.quartz.JobDetail}</code> object * associated with the given trigger * @throws SQLException * @throws ClassNotFoundException */ public JobDetail selectJobForTrigger(Connection conn, ClassLoadHelper loadHelper, TriggerKey triggerKey, boolean loadJobClass) throws ClassNotFoundException, SQLException { PreparedStatement ps = null; ResultSet rs = null; try { ps = conn.prepareStatement(rtp(SELECT_JOB_FOR_TRIGGER)); ps.setString(1, triggerKey.getName()); ps.setString(2, triggerKey.getGroup()); rs = ps.executeQuery(); if (rs.next()) { JobDetailImpl job = new JobDetailImpl(); job.setName(rs.getString(1)); job.setGroup(rs.getString(2)); job.setDurability(getBoolean(rs, 3)); if (loadJobClass) job.setJobClass(loadHelper.loadClass(rs.getString(4), Job.class)); job.setRequestsRecovery(getBoolean(rs, 5)); return job; } else { if (logger.isDebugEnabled()) { logger.debug("No job for trigger '" + triggerKey + "'."); } return null; } } finally { closeResultSet(rs); closeStatement(ps); } }
/** * @param attrMap the attributes that define the job * @return JobDetail */ public static JobDetail newJobDetail(Map<String, Object> attrMap) throws ClassNotFoundException { JobDetailImpl jobDetail = new JobDetailImpl(); int i = 0; jobDetail.setName((String) attrMap.get(ITEM_NAMES[i++])); jobDetail.setGroup((String) attrMap.get(ITEM_NAMES[i++])); jobDetail.setDescription((String) attrMap.get(ITEM_NAMES[i++])); Class<?> jobClass = Class.forName((String) attrMap.get(ITEM_NAMES[i++])); @SuppressWarnings("unchecked") Class<? extends Job> jobClassTyped = (Class<? extends Job>)jobClass; jobDetail.setJobClass(jobClassTyped); if(attrMap.containsKey(ITEM_NAMES[i])) { @SuppressWarnings("unchecked") Map<String, Object> map = (Map<String, Object>)attrMap.get(ITEM_NAMES[i]); jobDetail.setJobDataMap(JobDataMapSupport.newJobDataMap(map)); } i++; if(attrMap.containsKey(ITEM_NAMES[i])) { jobDetail.setDurability((Boolean) attrMap.get(ITEM_NAMES[i])); } i++; if(attrMap.containsKey(ITEM_NAMES[i])) { jobDetail.setRequestsRecovery((Boolean) attrMap.get(ITEM_NAMES[i])); } i++; return jobDetail; }
private void scheduleNewTask(RollupTask task) { try { logger.info("Scheduling rollup " + task.getName()); Trigger trigger = createTrigger(task); JobDetailImpl jobDetail = createJobDetail(task, dataStore, hostName); scheduler.schedule(jobDetail, trigger); logger.info("Roll-up task " + jobDetail.getFullName() + " scheduled. Next execution time " + trigger.getNextFireTime()); } catch (KairosDBException e) { logger.error("Failed to schedule new roll up task job " + task, e); } }
static JobDetailImpl createJobDetail(RollupTask task, KairosDatastore dataStore, String hostName) { JobDetailImpl jobDetail = new JobDetailImpl(); jobDetail.setJobClass(RollUpJob.class); jobDetail.setKey(getJobKey(task)); JobDataMap map = new JobDataMap(); map.put("task", task); map.put("datastore", dataStore); map.put("hostName", hostName); jobDetail.setJobDataMap(map); return jobDetail; }
private void startDaemonJob() throws Exception{ if(disableScheduler){ log.info("Current uflo application is disabled scheduler..."); return; } String currentInstanceName=System.getProperty("uflo.instanceName"); if(StringUtils.isBlank(instanceNames)){ if(StringUtils.isNotBlank(currentInstanceName)){ log.info("Uflo job cluster names is empty,but system property \"uflo.instanceName\" value is \""+currentInstanceName+"\",so Uflo job run mode is single still..."); }else{ log.info("Uflo job run mode is single..."); } schedulerService.resetScheduer(); return; }else{ if(StringUtils.isBlank(currentInstanceName)){ String msg="Current uflo application configured cluster names \""+instanceNames+"\",but not configure system property \"uflo.instanceName\"."; log.info(msg); throw new RuntimeException(msg); } } log.info("Uflo job run mode is cluster..."); initDetectionScheduler(); JobDetailImpl jobDetail=initJobDetail(currentInstanceName); Trigger trigger=initTrigger(); HeartbeatDetectionJob detectionJob=new HeartbeatDetectionJob(); jobDetail.setJobClass(detectionJob.getClass()); scheduler.scheduleJob(jobDetail, trigger); scheduler.start(); log.info("Uflo cluster daemon scheduler is started..."); }
private JobDetailImpl initJobDetail(String currentInstanceName){ String clusterJobInstanceNames[]=instanceNames.split(","); SessionFactory sessionFactory=EnvironmentUtils.getEnvironment().getSessionFactory(); JobDetailImpl jobDetail=new DetectionJobDetail(sessionFactory,currentInstanceName,clusterJobInstanceNames,schedulerService); jobDetail.setKey(new JobKey("UfloDaemonJobDetail")); jobDetail.setName("UfloDaemonDetectionJobDetail"); return jobDetail; }
@Override @SuppressWarnings("unchecked") public void afterPropertiesSet() { if (this.name == null) { this.name = this.beanName; } if (this.group == null) { this.group = Scheduler.DEFAULT_GROUP; } if (this.applicationContextJobDataKey != null) { if (this.applicationContext == null) { throw new IllegalStateException( "JobDetailBean needs to be set up in an ApplicationContext " + "to be able to handle an 'applicationContextJobDataKey'"); } getJobDataMap().put(this.applicationContextJobDataKey, this.applicationContext); } JobDetailImpl jdi = new JobDetailImpl(); jdi.setName(this.name); jdi.setGroup(this.group); jdi.setJobClass((Class) this.jobClass); jdi.setJobDataMap(this.jobDataMap); jdi.setDurability(this.durability); jdi.setRequestsRecovery(this.requestsRecovery); jdi.setDescription(this.description); this.jobDetail = jdi; }
@Test public void schedulerWithTaskExecutor() throws Exception { Assume.group(TestGroup.PERFORMANCE); CountingTaskExecutor taskExecutor = new CountingTaskExecutor(); DummyJob.count = 0; JobDetailImpl jobDetail = new JobDetailImpl(); jobDetail.setDurability(true); jobDetail.setJobClass(DummyJob.class); jobDetail.setName("myJob"); SimpleTriggerFactoryBean trigger = new SimpleTriggerFactoryBean(); trigger.setName("myTrigger"); trigger.setJobDetail(jobDetail); trigger.setStartDelay(1); trigger.setRepeatInterval(500); trigger.setRepeatCount(1); trigger.afterPropertiesSet(); SchedulerFactoryBean bean = new SchedulerFactoryBean(); bean.setTaskExecutor(taskExecutor); bean.setTriggers(trigger.getObject()); bean.setJobDetails(jobDetail); bean.afterPropertiesSet(); bean.start(); Thread.sleep(500); assertTrue("DummyJob should have been executed at least once.", DummyJob.count > 0); assertEquals(DummyJob.count, taskExecutor.count); bean.destroy(); }
@Test public void schedulerWithQuartzJobBean() throws Exception { Assume.group(TestGroup.PERFORMANCE); DummyJob.param = 0; DummyJob.count = 0; JobDetailImpl jobDetail = new JobDetailImpl(); jobDetail.setDurability(true); jobDetail.setJobClass(DummyJobBean.class); jobDetail.setName("myJob"); jobDetail.getJobDataMap().put("param", "10"); SimpleTriggerFactoryBean trigger = new SimpleTriggerFactoryBean(); trigger.setName("myTrigger"); trigger.setJobDetail(jobDetail); trigger.setStartDelay(1); trigger.setRepeatInterval(500); trigger.setRepeatCount(1); trigger.afterPropertiesSet(); SchedulerFactoryBean bean = new SchedulerFactoryBean(); bean.setTriggers(trigger.getObject()); bean.setJobDetails(jobDetail); bean.afterPropertiesSet(); bean.start(); Thread.sleep(500); assertEquals(10, DummyJobBean.param); assertTrue(DummyJobBean.count > 0); bean.destroy(); }
@Test public void schedulerWithSpringBeanJobFactory() throws Exception { Assume.group(TestGroup.PERFORMANCE); DummyJob.param = 0; DummyJob.count = 0; JobDetailImpl jobDetail = new JobDetailImpl(); jobDetail.setDurability(true); jobDetail.setJobClass(DummyJob.class); jobDetail.setName("myJob"); jobDetail.getJobDataMap().put("param", "10"); jobDetail.getJobDataMap().put("ignoredParam", "10"); SimpleTriggerFactoryBean trigger = new SimpleTriggerFactoryBean(); trigger.setName("myTrigger"); trigger.setJobDetail(jobDetail); trigger.setStartDelay(1); trigger.setRepeatInterval(500); trigger.setRepeatCount(1); trigger.afterPropertiesSet(); SchedulerFactoryBean bean = new SchedulerFactoryBean(); bean.setJobFactory(new SpringBeanJobFactory()); bean.setTriggers(trigger.getObject()); bean.setJobDetails(jobDetail); bean.afterPropertiesSet(); bean.start(); Thread.sleep(500); assertEquals(10, DummyJob.param); assertTrue("DummyJob should have been executed at least once.", DummyJob.count > 0); bean.destroy(); }
@Test public void schedulerWithSpringBeanJobFactoryAndParamMismatchNotIgnored() throws Exception { Assume.group(TestGroup.PERFORMANCE); DummyJob.param = 0; DummyJob.count = 0; JobDetailImpl jobDetail = new JobDetailImpl(); jobDetail.setDurability(true); jobDetail.setJobClass(DummyJob.class); jobDetail.setName("myJob"); jobDetail.getJobDataMap().put("para", "10"); jobDetail.getJobDataMap().put("ignoredParam", "10"); SimpleTriggerFactoryBean trigger = new SimpleTriggerFactoryBean(); trigger.setName("myTrigger"); trigger.setJobDetail(jobDetail); trigger.setStartDelay(1); trigger.setRepeatInterval(500); trigger.setRepeatCount(1); trigger.afterPropertiesSet(); SchedulerFactoryBean bean = new SchedulerFactoryBean(); SpringBeanJobFactory jobFactory = new SpringBeanJobFactory(); jobFactory.setIgnoredUnknownProperties("ignoredParam"); bean.setJobFactory(jobFactory); bean.setTriggers(trigger.getObject()); bean.setJobDetails(jobDetail); bean.afterPropertiesSet(); Thread.sleep(500); assertEquals(0, DummyJob.param); assertTrue(DummyJob.count == 0); bean.destroy(); }
@Test public void schedulerWithSpringBeanJobFactoryAndQuartzJobBean() throws Exception { Assume.group(TestGroup.PERFORMANCE); DummyJobBean.param = 0; DummyJobBean.count = 0; JobDetailImpl jobDetail = new JobDetailImpl(); jobDetail.setDurability(true); jobDetail.setJobClass(DummyJobBean.class); jobDetail.setName("myJob"); jobDetail.getJobDataMap().put("param", "10"); SimpleTriggerFactoryBean trigger = new SimpleTriggerFactoryBean(); trigger.setName("myTrigger"); trigger.setJobDetail(jobDetail); trigger.setStartDelay(1); trigger.setRepeatInterval(500); trigger.setRepeatCount(1); trigger.afterPropertiesSet(); SchedulerFactoryBean bean = new SchedulerFactoryBean(); bean.setJobFactory(new SpringBeanJobFactory()); bean.setTriggers(trigger.getObject()); bean.setJobDetails(jobDetail); bean.afterPropertiesSet(); bean.start(); Thread.sleep(500); assertEquals(10, DummyJobBean.param); assertTrue(DummyJobBean.count > 0); bean.destroy(); }
private static JobDetail jobDetailMyJobOne() { JobDetailImpl jobDetail = new JobDetailImpl(); jobDetail.setKey(new JobKey("jobone", "mygroup")); jobDetail.setJobClass(MyJobOne.class); // remain stored in the job store even if no triggers point to it anymore jobDetail.setDurability(true); return jobDetail; }
private static JobDetail jobDetailMyJobTwo() { JobDetailImpl jobDetail = new JobDetailImpl(); jobDetail.setKey(new JobKey("jobtwo", "mygroup")); jobDetail.setJobClass(MyJobTwo.class); jobDetail.setDurability(true); JobDataMap map = new JobDataMap(); map.put("name", "HaHa"); map.put(MyJobTwo.COUNT, 1); jobDetail.setJobDataMap(map); return jobDetail; }
@Override @SuppressWarnings("unchecked") public void afterPropertiesSet() throws ClassNotFoundException, NoSuchMethodException { prepare(); // Use specific name if given, else fall back to bean name. String name = (this.name != null ? this.name : this.beanName); // Consider the concurrent flag to choose between stateful and stateless // job. Class<?> jobClass = (this.concurrent ? MethodInvokingJob.class : StatefulMethodInvokingJob.class); // Build JobDetail instance. JobDetailImpl jdi = new JobDetailImpl(); jdi.setName(name); jdi.setGroup(this.group); jdi.setJobClass((Class) jobClass); jdi.setDurability(true); jdi.getJobDataMap().put("methodInvoker", this); //jdi.getJobDataMap().put("targetJob", getTargetObject()); this.jobDetail = jdi; postProcessJobDetail(this.jobDetail); TaskTriggerDefinition def = taskTriggerDefinitionCache.loadByUnique(getJobName()); QuartzJob targetJob = (QuartzJob) getTargetObject(); jobExecuteCounterHolder.put(targetJob.getClass(), new JobExecuteCounter(def.getMaxFireTimes(), 0)); //targetJob.setMaxFireTimes(def.getMaxFireTimes()); //获取每个Job的最大执行次数 }
/** * get JobDetail * @param jobParamer * @return */ private JobDetail getJobDetail(JobParamer jobParamer){ JobDetailImpl jobDetail = new JobDetailImpl();// 任务名,任务组,任务执行类 jobDetail.setGroup(jobParamer.getGroupName()); jobDetail.setName(jobParamer.getId()); jobDetail.setJobClass(CommonJob.class); JobDataMap jobDataMap = new JobDataMap(); jobDataMap.put("jobParamer", jobParamer); jobDetail.setJobDataMap(jobDataMap); return jobDetail; }
@Test public void testStoreTriggerReplacesTrigger() throws Exception { String jobName = "StoreTriggerReplacesTrigger"; String jobGroup = "StoreTriggerReplacesTriggerGroup"; JobDetailImpl detail = new JobDetailImpl(jobName, jobGroup, MyJob.class); jobStore.storeJob(detail, false); String trName = "StoreTriggerReplacesTrigger"; String trGroup = "StoreTriggerReplacesTriggerGroup"; OperableTrigger tr = new SimpleTriggerImpl(trName, trGroup, new Date()); tr.setJobKey(new JobKey(jobName, jobGroup)); tr.setCalendarName(null); jobStore.storeTrigger(tr, false); assertEquals(tr, jobStore.retrieveTrigger(tr.getKey())); try { jobStore.storeTrigger(tr, false); fail("an attempt to store duplicate trigger succeeded"); } catch (ObjectAlreadyExistsException oaee) { // expected } tr.setCalendarName("QQ"); jobStore.storeTrigger(tr, true); //fails here assertEquals(tr, jobStore.retrieveTrigger(tr.getKey())); assertEquals("StoreJob doesn't replace triggers", "QQ", jobStore.retrieveTrigger(tr.getKey()).getCalendarName()); }
@Override public void scheduleBatchEmailReminders() throws Exception { String emailBatchGroup = "Email Batch"; String dailyCron = ConfigContext.getCurrentContextConfig() .getProperty(KewApiConstants.DAILY_EMAIL_CRON_EXPRESSION); if (!StringUtils.isBlank(dailyCron)) { LOG.info("Scheduling Daily Email batch with cron expression: " + dailyCron); CronTriggerImpl dailyTrigger = new CronTriggerImpl(DAILY_TRIGGER_NAME, emailBatchGroup, dailyCron); JobDetailImpl dailyJobDetail = new JobDetailImpl(DAILY_JOB_NAME, emailBatchGroup, DailyEmailJob.class); dailyTrigger.setJobName(dailyJobDetail.getName()); dailyTrigger.setJobGroup(dailyJobDetail.getGroup()); addJobToScheduler(dailyJobDetail); addTriggerToScheduler(dailyTrigger); } else { LOG.warn("No " + KewApiConstants.DAILY_EMAIL_CRON_EXPRESSION + " parameter was configured. Daily Email batch was not scheduled!"); } String weeklyCron = ConfigContext.getCurrentContextConfig().getProperty( KewApiConstants.WEEKLY_EMAIL_CRON_EXPRESSION); if (!StringUtils.isBlank(weeklyCron)) { LOG.info("Scheduling Weekly Email batch with cron expression: " + weeklyCron); CronTriggerImpl weeklyTrigger = new CronTriggerImpl(WEEKLY_TRIGGER_NAME, emailBatchGroup, weeklyCron); JobDetailImpl weeklyJobDetail = new JobDetailImpl(WEEKLY_JOB_NAME, emailBatchGroup, WeeklyEmailJob.class); weeklyTrigger.setJobName(weeklyJobDetail.getName()); weeklyTrigger.setJobGroup(weeklyJobDetail.getGroup()); addJobToScheduler(weeklyJobDetail); addTriggerToScheduler(weeklyTrigger); } else { LOG.warn("No " + KewApiConstants.WEEKLY_EMAIL_CRON_EXPRESSION + " parameter was configured. Weekly Email batch was not scheduled!"); } }
@Override public void scheduleBatchEmailReminders() throws Exception { sendDailyReminderCalled = false; sendWeeklyReminderCalled = false; LOG.info("Scheduling Batch Email Reminders."); String emailBatchGroup = "Email Batch"; String dailyCron = ConfigContext.getCurrentContextConfig() .getProperty(KewApiConstants.DAILY_EMAIL_CRON_EXPRESSION); if (!StringUtils.isBlank(dailyCron)) { LOG.info("Scheduling Daily Email batch with cron expression: " + dailyCron); CronTriggerImpl dailyTrigger = new CronTriggerImpl(DAILY_TRIGGER_NAME, emailBatchGroup, dailyCron); JobDetailImpl dailyJobDetail = new JobDetailImpl(DAILY_JOB_NAME, emailBatchGroup, DailyEmailJob.class); dailyTrigger.setJobName(dailyJobDetail.getName()); dailyTrigger.setJobGroup(dailyJobDetail.getGroup()); sendDailyReminderCalled = true; } else { LOG.warn("No " + KewApiConstants.DAILY_EMAIL_CRON_EXPRESSION + " parameter was configured. Daily Email batch was not scheduled!"); } String weeklyCron = ConfigContext.getCurrentContextConfig().getProperty( KewApiConstants.WEEKLY_EMAIL_CRON_EXPRESSION); if (!StringUtils.isBlank(weeklyCron)) { LOG.info("Scheduling Weekly Email batch with cron expression: " + weeklyCron); CronTriggerImpl weeklyTrigger = new CronTriggerImpl(WEEKLY_TRIGGER_NAME, emailBatchGroup, weeklyCron); JobDetailImpl weeklyJobDetail = new JobDetailImpl(WEEKLY_JOB_NAME, emailBatchGroup, WeeklyEmailJob.class); weeklyTrigger.setJobName(weeklyJobDetail.getName()); weeklyTrigger.setJobGroup(weeklyJobDetail.getGroup()); sendWeeklyReminderCalled = true; } else { LOG.warn("No " + KewApiConstants.WEEKLY_EMAIL_CRON_EXPRESSION + " parameter was configured. Weekly Email batch was not scheduled!"); } }
protected void registerJobListener() throws SchedulerException { KSBServiceLocator.getScheduler().getListenerManager().addJobListener(new JobListenerSupport() { @Override public void jobWasExecuted(JobExecutionContext context, JobExecutionException jobException) { log.info("Job was executed: " + context); if (MessageProcessingJob.NAME.equals(((JobDetailImpl) context.getJobDetail()).getName())) { signal.countDown(); } } public String getName() { return System.currentTimeMillis() + RandomStringUtils.randomAlphanumeric(10); } }); }
@Test() public void testStoreTriggerReplacesTrigger() throws Exception { String jobName = "StoreTriggerReplacesTrigger"; String jobGroup = "StoreTriggerReplacesTriggerGroup"; JobDetailImpl detail = new JobDetailImpl(jobName, jobGroup, NoOpJob.class); jobStore.storeJob(detail, false); String trName = "StoreTriggerReplacesTrigger"; String trGroup = "StoreTriggerReplacesTriggerGroup"; OperableTrigger tr = new SimpleTriggerImpl(trName, trGroup, new Date()); tr.setJobKey(new JobKey(jobName, jobGroup)); tr.setCalendarName(null); jobStore.storeTrigger(tr, false); assertEquals(tr, jobStore.retrieveTrigger(tr.getKey())); try { jobStore.storeTrigger(tr, false); fail("an attempt to store duplicate trigger succeeded"); } catch (ObjectAlreadyExistsException oaee) { // expected } tr.setCalendarName("QQ"); jobStore.storeTrigger(tr, true); // fails here assertEquals(tr, jobStore.retrieveTrigger(tr.getKey())); assertEquals("QQ", jobStore.retrieveTrigger(tr.getKey()).getCalendarName(), "StoreJob doesn't replace triggers"); }
public CouchDbJobDetail(JobDetail newJob) { this(); if (newJob instanceof JobDetailImpl) { super.setName(((JobDetailImpl) newJob).getName()); super.setGroup(((JobDetailImpl) newJob).getGroup()); } else if (newJob instanceof CouchDbJobDetail) { super.setName((((CouchDbJobDetail) newJob).getName())); super.setGroup(((CouchDbJobDetail) newJob).getGroup()); } super.setDescription(newJob.getDescription()); super.setJobClass(newJob.getJobClass()); super.setDurability(newJob.isDurable()); super.setRequestsRecovery(newJob.requestsRecovery()); super.setJobDataMap((JobDataMap) newJob.getJobDataMap().clone()); }
public JobDetail toQuartzJobDetail() throws ClassNotFoundException { JobDetailImpl jd = new JobDetailImpl(); jd.setDescription(description); jd.setDurability("1".equals(is_durable)); jd.setName(job_name); jd.setGroup("".equals(job_group) ? null : job_group); Class cls = Class.forName(job_class_name); jd.setJobClass(cls); jd.setRequestsRecovery("1".equals(requests_recovery)); jd.setJobDataMap(dataMap); return jd; }
@Test public void testIsInterruptable() throws Exception { JobDetailImpl job = new JobDetailImpl(); job.setJobClass(DummyJob.class); Assert.assertEquals(true, utilsTool.isInterruptible(job)); }
public JobDetailImpl getJobDetail() { return jobDetail; }
public void setJobDetail(JobDetailImpl jobDetail) { this.jobDetail = jobDetail; }