@Override public SimpleTrigger initializeQuartzTrigger() { SimpleTrigger trigger = new SimpleTrigger(); trigger.addTriggerListener(SchedulerInitializer.COMMON_TRIGGER_LISTENER_NAME); setTrigger(trigger); return getTrigger(); }
public void startJob( String domainId, String projectId, String userId, String instanceId, long timeout) throws SchedulerException { long startTime = System.currentTimeMillis() + 1000L; SimpleTrigger trigger = new SimpleTrigger("AnalysisJobTrigger", null, new Date(startTime), null, 0, 0L); long endTime = startTime + 3000; trigger.setEndTime(new Date(startTime)); /* * InitialContext ctx = new InitialContext(); Scheduler scheduler = * (Scheduler) ctx.lookup("Quartz"); Trigger trigger = * TriggerUtils.makeDailyTrigger("myTrigger", 0, 0); //a trigger which * gets fired on each midnight trigger.setStartTime(new Date()); * * JobDetail job = new JobDetail("jobName", "jobGroup", Executor.class); */ JobDetail job = new JobDetail(projectId + instanceId, "group1", FirstJob.class); job.getJobDataMap().put(JobParamName.DOMAIN_ID, domainId); job.getJobDataMap().put(JobParamName.PROJECT_ID, projectId); job.getJobDataMap().put(JobParamName.USER_ID, userId); job.getJobDataMap().put(JobParamName.INSTANCE_ID, instanceId); job.getJobDataMap().put(JobParamName.TIMEOUT, timeout); sch.scheduleJob(job, trigger); }
public SimpleTrigger getTrigger() throws PluggableTaskException { SimpleTrigger trigger = new SimpleTrigger( getTaskName(), Scheduler.DEFAULT_GROUP, getParameter(PARAM_START_TIME.getName(), DEFAULT_START_TIME), getParameter(PARAM_END_TIME.getName(), DEFAULT_END_TIME), getParameter(PARAM_REPEAT.getName(), DEFAULT_REPEAT), getParameter(PARAM_INTERVAL.getName(), DEFAULT_INTERVAL) * 3600 * 1000); trigger.setMisfireInstruction( SimpleTrigger.MISFIRE_INSTRUCTION_RESCHEDULE_NEXT_WITH_EXISTING_COUNT); return trigger; }
@Override public Trigger getTrigger() throws Exception { if ((repeatInterval <= 0) && (repeatCount != 0)) { logger.error( "Job " + getBeanName() + " - repeatInterval/repeatIntervalMinutes cannot be 0 (or -ve) unless repeatCount is also 0"); return null; } SimpleTrigger trigger = new SimpleTrigger(getBeanName(), Scheduler.DEFAULT_GROUP); trigger.setStartTime(new Date(System.currentTimeMillis() + this.startDelay)); trigger.setRepeatCount(repeatCount); trigger.setRepeatInterval(repeatInterval); return trigger; }
public void init() throws Exception { String name = triggerConfig.getName(); String s = triggerConfig.getParameter("dateFormat"); SimpleDateFormat dateFormat = s == null ? new SimpleDateFormat() : new SimpleDateFormat(s); log.debug("Date format: " + dateFormat.toPattern()); s = triggerConfig.getParameter("delay"); long delay = s == null ? 0 : Long.parseLong(s); log.debug("Delay: " + delay); s = triggerConfig.getParameter("startTime"); Date startTime = s == null ? new Date(System.currentTimeMillis() + delay * 1000) : dateFormat.parse(s); log.debug("Start time: " + startTime); s = triggerConfig.getParameter("endTime"); Date endTime = s == null ? null : dateFormat.parse(s); log.debug("End time: " + endTime); s = triggerConfig.getParameter("count"); Integer count = s == null ? null : Integer.parseInt(s); log.debug("Count: " + count); s = triggerConfig.getParameter("interval"); long interval = s == null ? 0 : Long.parseLong(s); log.debug("Interval: " + interval); Partition partition = triggerContext.getPartition(); org.quartz.SimpleTrigger simpleTrigger; if (count == null) { // schedule one execution simpleTrigger = new org.quartz.SimpleTrigger(name, partition.getName(), startTime); } else { // schedule multiple executions simpleTrigger = new org.quartz.SimpleTrigger( name, partition.getName(), startTime, endTime, count, interval * 1000); simpleTrigger.setMisfireInstruction( org.quartz.SimpleTrigger.MISFIRE_INSTRUCTION_RESCHEDULE_NOW_WITH_EXISTING_REPEAT_COUNT); } quartzTrigger = simpleTrigger; }
public SimpleTrigger generateImportTrigger( FormProcessor fp, UserAccountBean userAccount, StudyBean study, Date startDateTime, String locale) { String jobName = fp.getString(JOB_NAME); String email = fp.getString(EMAIL); String jobDesc = fp.getString(JOB_DESC); String directory = fp.getString(DIRECTORY); // what kinds of periods do we have? hourly, daily, weekly? long interval = 0; int hours = fp.getInt("hours"); int minutes = fp.getInt("minutes"); if (hours > 0) { long hoursInt = hours * 3600000; interval = interval + hoursInt; } if (minutes > 0) { long minutesInt = minutes * 60000; interval = interval + minutesInt; } SimpleTrigger trigger = new SimpleTrigger(jobName, IMPORT_TRIGGER, 64000, interval); trigger.setDescription(jobDesc); // set just the start date trigger.setStartTime(startDateTime); trigger.setName(jobName); // + datasetId); trigger.setGroup(IMPORT_TRIGGER); // + datasetId); trigger.setMisfireInstruction( SimpleTrigger.MISFIRE_INSTRUCTION_RESCHEDULE_NEXT_WITH_EXISTING_COUNT); // set job data map JobDataMap jobDataMap = new JobDataMap(); jobDataMap.put(EMAIL, email); jobDataMap.put(USER_ID, userAccount.getId()); jobDataMap.put(STUDY_NAME, study.getName()); jobDataMap.put(STUDY_OID, study.getOid()); jobDataMap.put(DIRECTORY, directory); jobDataMap.put(ExampleSpringJob.LOCALE, locale); jobDataMap.put("hours", hours); jobDataMap.put("minutes", minutes); trigger.setJobDataMap(jobDataMap); trigger.setVolatility(false); return trigger; }
@RequestMapping("/cancelScheduledJob") public String cancelScheduledJob( HttpServletRequest request, HttpServletResponse response, @RequestParam("theJobName") String theJobName, @RequestParam("theJobGroupName") String theJobGroupName, @RequestParam("theTriggerName") String triggerName, @RequestParam("theTriggerGroupName") String triggerGroupName, @RequestParam("redirection") String redirection, ModelMap model) throws SchedulerException { scheduler.getJobDetail(theJobName, theJobGroupName); logger.debug("About to pause the job-->" + theJobName + "Job Group Name -->" + theJobGroupName); SimpleTrigger oldTrigger = (SimpleTrigger) scheduler.getTrigger(triggerName, triggerGroupName); if (oldTrigger != null) { Date startTime = new Date(oldTrigger.getStartTime().getTime() + oldTrigger.getRepeatInterval()); if (triggerGroupName.equals(ExtractController.TRIGGER_GROUP_NAME)) { interruptQuartzJob(scheduler, theJobName, theJobGroupName); } scheduler.pauseJob(theJobName, theJobGroupName); SimpleTrigger newTrigger = new SimpleTrigger(triggerName, triggerGroupName); newTrigger.setJobName(theJobName); newTrigger.setJobGroup(theJobGroupName); newTrigger.setJobDataMap(oldTrigger.getJobDataMap()); newTrigger.setVolatility(false); newTrigger.setRepeatCount(oldTrigger.getRepeatCount()); newTrigger.setRepeatInterval(oldTrigger.getRepeatInterval()); newTrigger.setMisfireInstruction( SimpleTrigger.MISFIRE_INSTRUCTION_RESCHEDULE_NEXT_WITH_REMAINING_COUNT); newTrigger.setStartTime(startTime); newTrigger.setRepeatInterval(oldTrigger.getRepeatInterval()); scheduler.unscheduleJob( triggerName, triggerGroupName); // these are the jobs which are from extract data and are not not // required to be rescheduled. ArrayList<String> pageMessages = new ArrayList<String>(); if (triggerGroupName.equals(ExtractController.TRIGGER_GROUP_NAME)) { scheduler.rescheduleJob(triggerName, triggerGroupName, newTrigger); pageMessages.add("The Job " + theJobName + " has been cancelled"); } else if (triggerGroupName.equals(XsltTriggerService.TRIGGER_GROUP_NAME)) { JobDetailBean jobDetailBean = new JobDetailBean(); jobDetailBean.setGroup(XsltTriggerService.TRIGGER_GROUP_NAME); jobDetailBean.setName(newTrigger.getName()); jobDetailBean.setJobClass(org.akaza.openclinica.job.XsltStatefulJob.class); jobDetailBean.setJobDataMap(newTrigger.getJobDataMap()); jobDetailBean.setDurability(true); // need durability? jobDetailBean.setVolatility(false); scheduler.deleteJob(theJobName, theJobGroupName); scheduler.scheduleJob(jobDetailBean, newTrigger); pageMessages.add("The Job " + theJobName + " has been rescheduled"); } request.setAttribute("pageMessages", pageMessages); logger.debug("jobDetails>" + scheduler.getJobDetail(theJobName, theJobGroupName)); } sdvUtil.forwardRequestFromController(request, response, "/pages/" + redirection); return null; }
public void run() throws Exception { Log log = LogFactory.getLog(JobExceptionExample.class); log.info("------- Initializing ----------------------"); // First we must get a reference to a scheduler SchedulerFactory sf = new StdSchedulerFactory(); Scheduler sched = sf.getScheduler(); log.info("------- Initialization Complete ------------"); log.info("------- Scheduling Jobs -------------------"); // jobs can be scheduled before start() has been called // get a "nice round" time a few seconds in the future... long ts = TriggerUtils.getNextGivenSecondDate(null, 15).getTime(); // badJob1 will run every three seconds // this job will throw an exception and refire // immediately JobDetail job = new JobDetail("badJob1", "group1", BadJob1.class); SimpleTrigger trigger = new SimpleTrigger( "trigger1", "group1", new Date(ts), null, SimpleTrigger.REPEAT_INDEFINITELY, 3000L); Date ft = sched.scheduleJob(job, trigger); log.info( job.getFullName() + " will run at: " + ft + " and repeat: " + trigger.getRepeatCount() + " times, every " + trigger.getRepeatInterval() / 1000 + " seconds"); // badJob2 will run every three seconds // this job will throw an exception and never // refire job = new JobDetail("badJob2", "group1", BadJob2.class); trigger = new SimpleTrigger( "trigger2", "group1", new Date(ts), null, SimpleTrigger.REPEAT_INDEFINITELY, 3000L); ft = sched.scheduleJob(job, trigger); log.info( job.getFullName() + " will run at: " + ft + " and repeat: " + trigger.getRepeatCount() + " times, every " + trigger.getRepeatInterval() / 1000 + " seconds"); log.info("------- Starting Scheduler ----------------"); // jobs don't start firing until start() has been called... sched.start(); log.info("------- Started Scheduler -----------------"); try { // sleep for 60 seconds Thread.sleep(60L * 1000L); } catch (Exception e) { } log.info("------- Shutting Down ---------------------"); sched.shutdown(true); log.info("------- Shutdown Complete -----------------"); SchedulerMetaData metaData = sched.getMetaData(); log.info("Executed " + metaData.numJobsExecuted() + " jobs."); }
public void run() throws Exception { Logger log = LoggerFactory.getLogger(SimpleTriggerExample.class); log.info("------- Initializing -------------------"); // First we must get a reference to a scheduler SchedulerFactory sf = new StdSchedulerFactory(); Scheduler sched = sf.getScheduler(); log.info("------- Initialization Complete --------"); log.info("------- Scheduling Jobs ----------------"); // jobs can be scheduled before sched.start() has been called // get a "nice round" time a few seconds in the future... Date startTime = DateBuilder.nextGivenSecondDate(null, 15); // job1 will only fire once at date/time "ts" JobDetail job = newJob(SimpleJob.class).withIdentity("job1", "group1").build(); SimpleTrigger trigger = (SimpleTrigger) newTrigger().withIdentity("trigger1", "group1").startAt(startTime).build(); // schedule it to run! Date ft = sched.scheduleJob(job, trigger); log.info( job.getKey() + " will run at: " + ft + " and repeat: " + trigger.getRepeatCount() + " times, every " + trigger.getRepeatInterval() / 1000 + " seconds"); // job2 will only fire once at date/time "ts" job = newJob(SimpleJob.class).withIdentity("job2", "group1").build(); trigger = (SimpleTrigger) newTrigger().withIdentity("trigger2", "group1").startAt(startTime).build(); ft = sched.scheduleJob(job, trigger); log.info( job.getKey() + " will run at: " + ft + " and repeat: " + trigger.getRepeatCount() + " times, every " + trigger.getRepeatInterval() / 1000 + " seconds"); // job3 will run 11 times (run once and repeat 10 more times) // job3 will repeat every 10 seconds job = newJob(SimpleJob.class).withIdentity("job3", "group1").build(); trigger = newTrigger() .withIdentity("trigger3", "group1") .startAt(startTime) .withSchedule(simpleSchedule().withIntervalInSeconds(10).withRepeatCount(10)) .build(); ft = sched.scheduleJob(job, trigger); log.info( job.getKey() + " will run at: " + ft + " and repeat: " + trigger.getRepeatCount() + " times, every " + trigger.getRepeatInterval() / 1000 + " seconds"); // the same job (job3) will be scheduled by a another trigger // this time will only repeat twice at a 70 second interval trigger = newTrigger() .withIdentity("trigger3", "group2") .startAt(startTime) .withSchedule(simpleSchedule().withIntervalInSeconds(10).withRepeatCount(2)) .forJob(job) .build(); ft = sched.scheduleJob(trigger); log.info( job.getKey() + " will [also] run at: " + ft + " and repeat: " + trigger.getRepeatCount() + " times, every " + trigger.getRepeatInterval() / 1000 + " seconds"); // job4 will run 6 times (run once and repeat 5 more times) // job4 will repeat every 10 seconds job = newJob(SimpleJob.class).withIdentity("job4", "group1").build(); trigger = newTrigger() .withIdentity("trigger4", "group1") .startAt(startTime) .withSchedule(simpleSchedule().withIntervalInSeconds(10).withRepeatCount(5)) .build(); ft = sched.scheduleJob(job, trigger); log.info( job.getKey() + " will run at: " + ft + " and repeat: " + trigger.getRepeatCount() + " times, every " + trigger.getRepeatInterval() / 1000 + " seconds"); // job5 will run once, five minutes in the future job = newJob(SimpleJob.class).withIdentity("job5", "group1").build(); trigger = (SimpleTrigger) newTrigger() .withIdentity("trigger5", "group1") .startAt(futureDate(5, IntervalUnit.MINUTE)) .build(); ft = sched.scheduleJob(job, trigger); log.info( job.getKey() + " will run at: " + ft + " and repeat: " + trigger.getRepeatCount() + " times, every " + trigger.getRepeatInterval() / 1000 + " seconds"); // job6 will run indefinitely, every 40 seconds job = newJob(SimpleJob.class).withIdentity("job6", "group1").build(); trigger = newTrigger() .withIdentity("trigger6", "group1") .startAt(startTime) .withSchedule(simpleSchedule().withIntervalInSeconds(40).repeatForever()) .build(); ft = sched.scheduleJob(job, trigger); log.info( job.getKey() + " will run at: " + ft + " and repeat: " + trigger.getRepeatCount() + " times, every " + trigger.getRepeatInterval() / 1000 + " seconds"); log.info("------- Starting Scheduler ----------------"); // All of the jobs have been added to the scheduler, but none of the jobs // will run until the scheduler has been started sched.start(); log.info("------- Started Scheduler -----------------"); // jobs can also be scheduled after start() has been called... // job7 will repeat 20 times, repeat every five minutes job = newJob(SimpleJob.class).withIdentity("job7", "group1").build(); trigger = newTrigger() .withIdentity("trigger7", "group1") .startAt(startTime) .withSchedule(simpleSchedule().withIntervalInMinutes(5).withRepeatCount(20)) .build(); ft = sched.scheduleJob(job, trigger); log.info( job.getKey() + " will run at: " + ft + " and repeat: " + trigger.getRepeatCount() + " times, every " + trigger.getRepeatInterval() / 1000 + " seconds"); // jobs can be fired directly... (rather than waiting for a trigger) job = newJob(SimpleJob.class).withIdentity("job8", "group1").storeDurably().build(); sched.addJob(job, true); log.info("'Manually' triggering job8..."); sched.triggerJob(jobKey("job8", "group1")); log.info("------- Waiting 30 seconds... --------------"); try { // wait 33 seconds to show jobs Thread.sleep(30L * 1000L); // executing... } catch (Exception e) { } // jobs can be re-scheduled... // job 7 will run immediately and repeat 10 times for every second log.info("------- Rescheduling... --------------------"); trigger = newTrigger() .withIdentity("trigger7", "group1") .startAt(startTime) .withSchedule(simpleSchedule().withIntervalInMinutes(5).withRepeatCount(20)) .build(); ft = sched.rescheduleJob(trigger.getKey(), trigger); log.info("job7 rescheduled to run at: " + ft); log.info("------- Waiting five minutes... ------------"); try { // wait five minutes to show jobs Thread.sleep(300L * 1000L); // executing... } catch (Exception e) { } log.info("------- Shutting Down ---------------------"); sched.shutdown(true); log.info("------- Shutdown Complete -----------------"); // display some stats about the schedule that just ran SchedulerMetaData metaData = sched.getMetaData(); log.info("Executed " + metaData.getNumberOfJobsExecuted() + " jobs."); }
public void run() throws Exception { Logger log = LoggerFactory.getLogger(SimpleTriggerExample.class); log.info("------- Initializing -------------------"); // First we must get a reference to a scheduler SchedulerFactory sf = new StdSchedulerFactory(); Scheduler sched = sf.getScheduler(); log.info("------- Initialization Complete --------"); log.info("------- Scheduling Jobs ----------------"); // jobs can be scheduled before sched.start() has been called // get a "nice round" time a few seconds in the future... long ts = TriggerUtils.getNextGivenSecondDate(null, 5).getTime(); sched.start(); log.info("------- Started Scheduler -----------------"); // jobs can also be scheduled after start() has been called... // job7 will repeat 20 times, repeat every 3 sec JobDetail job = new JobDetail("job7", "group1", SimpleJob.class); SimpleTrigger trigger = new SimpleTrigger("trigger7", "group1", "job7", "group1", new Date(ts), null, 30, 2000L); Date ft = sched.scheduleJob(job, trigger); log.info( job.getFullName() + " will run at: " + ft + " and repeat: " + trigger.getRepeatCount() + " times, every " + trigger.getRepeatInterval() / 1000 + " seconds"); log.info("------- Waiting 15 seconds... --------------"); try { Thread.sleep(15L * 1000L); } catch (Exception e) { } // jobs can be re-scheduled... // job 7 will run immediately and repeat 10 times for every second log.info("------- Rescheduling... --------------------"); trigger = new SimpleTrigger("trigger7", "group1", "job7", "group1", new Date(), null, 10, 1000L); ft = sched.rescheduleJob("trigger7", "group1", trigger); log.info("job7 rescheduled to run at: " + ft); log.info("------- Waiting five minutes... ------------"); try { Thread.sleep(300L * 1000L); } catch (Exception e) { } log.info("------- Shutting Down ---------------------"); sched.shutdown(true); log.info("------- Shutdown Complete -----------------"); // display some stats about the schedule that just ran SchedulerMetaData metaData = sched.getMetaData(); log.info("Executed " + metaData.getNumberOfJobsExecuted() + " jobs."); }
public void run() throws Exception { Logger log = LoggerFactory.getLogger(JobStateExample.class); log.info("------- Initializing -------------------"); // First we must get a reference to a scheduler SchedulerFactory sf = new StdSchedulerFactory(); Scheduler sched = sf.getScheduler(); log.info("------- Initialization Complete --------"); log.info("------- Scheduling Jobs ----------------"); // get a "nice round" time a few seconds in the future.... Date startTime = nextGivenSecondDate(null, 10); // job1 will only run 5 times (at start time, plus 4 repeats), every 10 seconds JobDetail job1 = newJob(ColorJob.class).withIdentity("job1", "group1").build(); SimpleTrigger trigger1 = newTrigger() .withIdentity("trigger1", "group1") .startAt(startTime) .withSchedule(simpleSchedule().withIntervalInSeconds(10).withRepeatCount(4)) .build(); // pass initialization parameters into the job job1.getJobDataMap().put(ColorJob.FAVORITE_COLOR, "Green"); job1.getJobDataMap().put(ColorJob.EXECUTION_COUNT, 1); // schedule the job to run Date scheduleTime1 = sched.scheduleJob(job1, trigger1); log.info( job1.getKey() + " will run at: " + scheduleTime1 + " and repeat: " + trigger1.getRepeatCount() + " times, every " + trigger1.getRepeatInterval() / 1000 + " seconds"); // job2 will also run 5 times, every 10 seconds JobDetail job2 = newJob(ColorJob.class).withIdentity("job2", "group1").build(); SimpleTrigger trigger2 = newTrigger() .withIdentity("trigger2", "group1") .startAt(startTime) .withSchedule(simpleSchedule().withIntervalInSeconds(10).withRepeatCount(4)) .build(); // pass initialization parameters into the job // this job has a different favorite color! job2.getJobDataMap().put(ColorJob.FAVORITE_COLOR, "Red"); job2.getJobDataMap().put(ColorJob.EXECUTION_COUNT, 1); // schedule the job to run Date scheduleTime2 = sched.scheduleJob(job2, trigger2); log.info( job2.getKey().toString() + " will run at: " + scheduleTime2 + " and repeat: " + trigger2.getRepeatCount() + " times, every " + trigger2.getRepeatInterval() / 1000 + " seconds"); log.info("------- Starting Scheduler ----------------"); // All of the jobs have been added to the scheduler, but none of the jobs // will run until the scheduler has been started sched.start(); log.info("------- Started Scheduler -----------------"); log.info("------- Waiting 60 seconds... -------------"); try { // wait five minutes to show jobs Thread.sleep(60L * 1000L); // executing... } catch (Exception e) { // } log.info("------- Shutting Down ---------------------"); sched.shutdown(true); log.info("------- Shutdown Complete -----------------"); SchedulerMetaData metaData = sched.getMetaData(); log.info("Executed " + metaData.getNumberOfJobsExecuted() + " jobs."); }
/** * Test. * * @throws IOException e * @throws SchedulerException e */ @Test public void testJob() throws IOException, SchedulerException { // job quartz JobGlobalListener.initJobGlobalListener(); JobGlobalListener.getJobCounter().clear(); // Grab the Scheduler instance from the Factory final Scheduler scheduler = StdSchedulerFactory.getDefaultScheduler(); try { // and start it off scheduler.start(); final Random random = new Random(); // Define a Trigger that will fire "later" final JobDetail job2 = new JobDetail("job" + random.nextInt(), null, JobTestImpl.class); final SimpleTrigger trigger2 = new SimpleTrigger( "trigger" + random.nextInt(), null, new Date(System.currentTimeMillis() + 60000)); trigger2.setRepeatInterval(2 * 24L * 60 * 60 * 1000); scheduler.scheduleJob(job2, trigger2); scheduler.pauseJob(job2.getName(), job2.getGroup()); try { final JobDetail job3 = new JobDetail("job" + random.nextInt(), null, JobTestImpl.class); // cron trigger that will never fire final Trigger trigger3 = new CronTrigger("crontrigger" + random.nextInt(), null, "0 0 0 * * ? 2030"); scheduler.scheduleJob(job3, trigger3); // other trigger that will never fire final NthIncludedDayTrigger trigger4 = new NthIncludedDayTrigger("nth trigger" + random.nextInt(), null); trigger4.setN(1); trigger4.setIntervalType(NthIncludedDayTrigger.INTERVAL_TYPE_YEARLY); trigger4.setJobName(job3.getName()); scheduler.scheduleJob(trigger4); } catch (final ParseException e) { throw new IllegalStateException(e); } // JavaInformations doit être réinstancié pour récupérer les jobs // (mais "Aucun job" dans le counter) final List<JavaInformations> javaInformationsList2 = Collections.singletonList(new JavaInformations(null, true)); final HtmlReport htmlReport = new HtmlReport(collector, null, javaInformationsList2, Period.TOUT, writer); htmlReport.toHtml(null, null); assertNotEmptyAndClear(writer); // on lance 10 jobs pour être à peu près sûr qu'il y en a un qui fait une erreur // (aléatoirement il y en a 2/10 qui font une erreur) final Map<JobDetail, SimpleTrigger> triggersByJob = new LinkedHashMap<JobDetail, SimpleTrigger>(); for (int i = 0; i < 10; i++) { // Define a Trigger that will fire "now" final JobDetail job = new JobDetail("job" + random.nextInt(), null, JobTestImpl.class); job.setDescription("description"); final SimpleTrigger trigger = new SimpleTrigger("trigger" + random.nextInt(), null, new Date()); // Schedule the job with the trigger scheduler.scheduleJob(job, trigger); triggersByJob.put(job, trigger); } // JobTestImpl fait un sleep de 2s au plus, donc on attend les jobs pour les compter try { Thread.sleep(3000); } catch (final InterruptedException e) { throw new IllegalStateException(e); } for (final Map.Entry<JobDetail, SimpleTrigger> entry : triggersByJob.entrySet()) { // et on les relance pour qu'ils soient en cours entry.getValue().setRepeatInterval(60000); scheduler.scheduleJob(entry.getKey(), entry.getValue()); } // JavaInformations doit être réinstancié pour récupérer les jobs setProperty(Parameter.SYSTEM_ACTIONS_ENABLED, Boolean.FALSE.toString()); final List<JavaInformations> javaInformationsList3 = Collections.singletonList(new JavaInformations(null, true)); final HtmlReport htmlReport3 = new HtmlReport(collector, null, javaInformationsList3, Period.TOUT, writer); htmlReport3.toHtml(null, null); assertNotEmptyAndClear(writer); } finally { scheduler.shutdown(); JobGlobalListener.getJobCounter().clear(); JobGlobalListener.destroyJobGlobalListener(); } }
@RequestMapping("/listCurrentScheduledJobs") public ModelMap listScheduledJobs(HttpServletRequest request, HttpServletResponse response) throws SchedulerException { Locale locale = LocaleResolver.getLocale(request); ResourceBundleProvider.updateLocale(locale); ModelMap gridMap = new ModelMap(); String[] triggerNames; boolean showMoreLink = false; if (request.getParameter("showMoreLink") != null) { showMoreLink = Boolean.parseBoolean(request.getParameter("showMoreLink").toString()); } else { showMoreLink = true; } request.setAttribute("showMoreLink", showMoreLink + ""); // request.setAttribute("studySubjectId",studySubjectId); /*SubjectIdSDVFactory tableFactory = new SubjectIdSDVFactory(); * @RequestParam("studySubjectId") int studySubjectId,*/ request.setAttribute("imagePathPrefix", "../"); ArrayList<String> pageMessages = (ArrayList<String>) request.getAttribute("pageMessages"); if (pageMessages == null) { pageMessages = new ArrayList<String>(); } request.setAttribute("pageMessages", pageMessages); List<JobExecutionContext> listCurrentJobs = new ArrayList<JobExecutionContext>(); listCurrentJobs = scheduler.getCurrentlyExecutingJobs(); Iterator<JobExecutionContext> itCurrentJobs = listCurrentJobs.iterator(); List<String> currentJobList = new ArrayList<String>(); while (itCurrentJobs.hasNext()) { JobExecutionContext temp = itCurrentJobs.next(); currentJobList.add(temp.getTrigger().getJobName() + temp.getTrigger().getGroup()); } String[] triggerGroups = scheduler.getTriggerGroupNames(); List<SimpleTrigger> simpleTriggers = new ArrayList<SimpleTrigger>(); int index1 = 0; for (String triggerGroup : triggerGroups) { logger.debug("Group: " + triggerGroup + " contains the following triggers"); triggerNames = scheduler.getTriggerNames(triggerGroup); for (String triggerName : triggerNames) { int state = scheduler.getTriggerState(triggerName, triggerGroup); logger.debug("- " + triggerName); if (state != Trigger.STATE_PAUSED) { simpleTriggers.add( index1, (SimpleTrigger) scheduler.getTrigger(triggerName, triggerGroup)); index1++; } } } List<ScheduledJobs> jobsScheduled = new ArrayList<ScheduledJobs>(); int index = 0; for (SimpleTrigger st : simpleTriggers) { boolean isExecuting = currentJobList.contains(st.getJobName() + st.getGroup()); ScheduledJobs jobs = new ScheduledJobs(); ExtractPropertyBean epBean = null; if (st.getJobDataMap() != null) { epBean = (ExtractPropertyBean) st.getJobDataMap().get(EP_BEAN); } if (epBean != null) { StringBuilder checkbox = new StringBuilder(); checkbox.append("<input style='margin-right: 5px' type='checkbox'/>"); StringBuilder actions = new StringBuilder("<table><tr><td>"); if (isExecuting) { actions.append(" "); } else { String contextPath = request.getContextPath(); StringBuilder jsCodeString = new StringBuilder("this.form.method='GET'; this.form.action='") .append(contextPath) .append("/pages/cancelScheduledJob") .append("';") .append("this.form.theJobName.value='") .append(st.getJobName()) .append("';") .append("this.form.theJobGroupName.value='") .append(st.getJobGroup()) .append("';") .append("this.form.theTriggerName.value='") .append(st.getName()) .append("';") .append("this.form.theTriggerGroupName.value='") .append(st.getGroup()) .append("';") .append("this.form.submit();"); actions .append("<td><input type=\"submit\" class=\"button\" value=\"Cancel Job\" ") .append("name=\"cancelJob\" onclick=\"") .append(jsCodeString.toString()) .append("\" />"); } actions.append("</td></tr></table>"); jobs.setCheckbox(checkbox.toString()); jobs.setDatasetId(epBean.getDatasetName()); String fireTime = st.getStartTime() != null ? longFormat(locale).format(st.getStartTime()) : ""; jobs.setFireTime(fireTime); if (st.getNextFireTime() != null) { jobs.setScheduledFireTime(longFormat(locale).format(st.getNextFireTime())); } jobs.setExportFileName(epBean.getExportFileName()[0]); jobs.setAction(actions.toString()); jobs.setJobStatus(isExecuting ? "Currently Executing" : "Scheduled"); jobsScheduled.add(index, jobs); index++; } } logger.debug("totalRows" + index); request.setAttribute("totalJobs", index); request.setAttribute("jobs", jobsScheduled); TableFacade facade = scheduledJobTableFactory.createTable(request, response); String sdvMatrix = facade.render(); gridMap.addAttribute(SCHEDULED_TABLE_ATTRIBUTE, sdvMatrix); return gridMap; }
protected SchedulerResponse getScheduledJob(Scheduler scheduler, JobKey jobKey) throws Exception { JobDetail jobDetail = scheduler.getJobDetail(jobKey); if (jobDetail == null) { return null; } SchedulerResponse schedulerResponse = new SchedulerResponse(); JobDataMap jobDataMap = jobDetail.getJobDataMap(); String description = jobDataMap.getString(SchedulerEngine.DESCRIPTION); schedulerResponse.setDescription(description); String destinationName = jobDataMap.getString(SchedulerEngine.DESTINATION_NAME); schedulerResponse.setDestinationName(destinationName); Message message = getMessage(jobDataMap); JobState jobState = getJobState(jobDataMap); message.put(SchedulerEngine.JOB_STATE, jobState); schedulerResponse.setMessage(message); StorageType storageType = StorageType.valueOf(jobDataMap.getString(SchedulerEngine.STORAGE_TYPE)); schedulerResponse.setStorageType(storageType); String jobName = jobKey.getName(); String groupName = jobKey.getGroup(); TriggerKey triggerKey = new TriggerKey(jobName, groupName); Trigger trigger = scheduler.getTrigger(triggerKey); if (trigger == null) { schedulerResponse.setGroupName(groupName); schedulerResponse.setJobName(jobName); return schedulerResponse; } message.put(SchedulerEngine.END_TIME, trigger.getEndTime()); message.put(SchedulerEngine.FINAL_FIRE_TIME, trigger.getFinalFireTime()); message.put(SchedulerEngine.NEXT_FIRE_TIME, trigger.getNextFireTime()); message.put(SchedulerEngine.PREVIOUS_FIRE_TIME, trigger.getPreviousFireTime()); message.put(SchedulerEngine.START_TIME, trigger.getStartTime()); if (trigger instanceof CalendarIntervalTrigger) { CalendarIntervalTrigger calendarIntervalTrigger = CalendarIntervalTrigger.class.cast(trigger); IntervalUnit intervalUnit = calendarIntervalTrigger.getRepeatIntervalUnit(); schedulerResponse.setTrigger( new IntervalTrigger( jobName, groupName, calendarIntervalTrigger.getStartTime(), calendarIntervalTrigger.getEndTime(), calendarIntervalTrigger.getRepeatInterval(), TimeUnit.valueOf(intervalUnit.name()))); } else if (trigger instanceof CronTrigger) { CronTrigger cronTrigger = CronTrigger.class.cast(trigger); schedulerResponse.setTrigger( new com.liferay.portal.kernel.scheduler.CronTrigger( jobName, groupName, cronTrigger.getStartTime(), cronTrigger.getEndTime(), cronTrigger.getCronExpression())); } else if (trigger instanceof SimpleTrigger) { SimpleTrigger simpleTrigger = SimpleTrigger.class.cast(trigger); schedulerResponse.setTrigger( new IntervalTrigger( jobName, groupName, simpleTrigger.getStartTime(), simpleTrigger.getEndTime(), (int) simpleTrigger.getRepeatInterval(), TimeUnit.MILLISECOND)); } return schedulerResponse; }
@Deprecated public void scheduleLoanArrearsAndPortfolioAtRisk( Date initialTime, long delay, JobRegistry jobRegistry, final JobRepository jobRepository, Map<String, Object> jobData, ResourcelessTransactionManager transactionManager) throws TaskSystemException { final String jobName = "LoanArrearsAndPortfolioAtRiskTask"; try { final TaskletStep step1 = new TaskletStep(); step1.setName("LoanArrearsAndPortfolioAtRiskTask-step-1"); step1.setTasklet( (Tasklet) Class.forName(BATCH_JOB_CLASS_PATH_PREFIX + getHelperName("LoanArrearsTask")) .newInstance()); step1.setJobRepository(jobRepository); step1.setTransactionManager(transactionManager); step1.afterPropertiesSet(); final TaskletStep step2 = new TaskletStep(); step2.setName("LoanArrearsAndPortfolioAtRiskTask-step-2"); step2.setTasklet( (Tasklet) Class.forName(BATCH_JOB_CLASS_PATH_PREFIX + getHelperName("PortfolioAtRiskTask")) .newInstance()); step2.setJobRepository(jobRepository); step2.setTransactionManager(transactionManager); step2.afterPropertiesSet(); jobRegistry.register( new JobFactory() { @Override public Job createJob() { SimpleJob job = new SimpleJob(jobName + "Job"); job.setJobRepository(jobRepository); job.setRestartable(true); job.registerJobExecutionListener(new BatchJobListener()); job.addStep(step1); job.addStep(step2); return job; } @Override public String getJobName() { return jobName + "Job"; } }); } catch (Exception e) { throw new TaskSystemException(e); } JobDetailBean jobDetailBean = new JobDetailBean(); jobDetailBean.setJobDataAsMap(jobData); try { jobDetailBean.setJobClass(Class.forName(BATCH_JOB_CLASS_PATH_PREFIX + "PortfolioAtRiskTask")); } catch (ClassNotFoundException cnfe) { throw new TaskSystemException(cnfe); } jobDetailBean.setName(jobName + "Job"); jobDetailBean.setGroup(Scheduler.DEFAULT_GROUP); jobDetailBean.afterPropertiesSet(); SimpleTrigger trigger = new SimpleTrigger(); trigger.setName(jobName + "Job"); trigger.setGroup(Scheduler.DEFAULT_GROUP); trigger.setStartTime(initialTime); trigger.setRepeatInterval(delay); trigger.setRepeatCount(SimpleTrigger.REPEAT_INDEFINITELY); try { scheduler.scheduleJob(jobDetailBean, trigger); } catch (SchedulerException se) { throw new TaskSystemException(se); } }
public SimpleTrigger generateTrigger( FormProcessor fp, UserAccountBean userAccount, StudyBean study, String locale) { Date startDateTime = fp.getDateTime(DATE_START_JOB); // check the above? int datasetId = fp.getInt(DATASET_ID); String period = fp.getString(PERIOD); String email = fp.getString(EMAIL); String jobName = fp.getString(JOB_NAME); String jobDesc = fp.getString(JOB_DESC); String spss = fp.getString(SPSS); String tab = fp.getString(TAB); String cdisc = fp.getString(CDISC); String cdisc12 = fp.getString(ExampleSpringJob.CDISC12); String cdisc13 = fp.getString(ExampleSpringJob.CDISC13); String cdisc13oc = fp.getString(ExampleSpringJob.CDISC13OC); BigInteger interval = new BigInteger("0"); if ("monthly".equalsIgnoreCase(period)) { interval = new BigInteger("2419200000"); // how many // milliseconds in // a month? should // be 24192000000 } else if ("weekly".equalsIgnoreCase(period)) { interval = new BigInteger("604800000"); // how many // milliseconds in // a week? should // be 6048000000 } else { // daily interval = new BigInteger("86400000"); // how many // milliseconds in a // day? } // set up and commit job here SimpleTrigger trigger = new SimpleTrigger(jobName, "DEFAULT", 64000, interval.longValue()); // set the job detail name, // based on our choice of format above // what if there is more than one detail? // what is the number of times it should repeat? // arbitrary large number, 64K should be enough :) trigger.setDescription(jobDesc); // set just the start date trigger.setStartTime(startDateTime); trigger.setName(jobName); // + datasetId); trigger.setGroup("DEFAULT"); // + datasetId); trigger.setMisfireInstruction( SimpleTrigger.MISFIRE_INSTRUCTION_RESCHEDULE_NEXT_WITH_EXISTING_COUNT); // set job data map JobDataMap jobDataMap = new JobDataMap(); jobDataMap.put(DATASET_ID, datasetId); jobDataMap.put(PERIOD, period); jobDataMap.put(EMAIL, email); jobDataMap.put(TAB, tab); jobDataMap.put(CDISC, cdisc); jobDataMap.put(ExampleSpringJob.CDISC12, cdisc12); jobDataMap.put(ExampleSpringJob.LOCALE, locale); // System.out.println("found 1.2: " + // jobDataMap.get(ExampleSpringJob.CDISC12)); jobDataMap.put(ExampleSpringJob.CDISC13, cdisc13); // System.out.println("found 1.3: " + // jobDataMap.get(ExampleSpringJob.CDISC13)); jobDataMap.put(ExampleSpringJob.CDISC13OC, cdisc13oc); // System.out.println("found 1.3oc: " + // jobDataMap.get(ExampleSpringJob.CDISC13OC)); jobDataMap.put(SPSS, spss); jobDataMap.put(USER_ID, userAccount.getId()); // StudyDAO studyDAO = new StudyDAO(); jobDataMap.put(STUDY_ID, study.getId()); jobDataMap.put(STUDY_NAME, study.getName()); jobDataMap.put(STUDY_OID, study.getOid()); trigger.setJobDataMap(jobDataMap); // trigger.setRepeatInterval(interval.longValue()); // System.out.println("default for volatile: " + trigger.isVolatile()); trigger.setVolatility(false); return trigger; }
@Override protected void executeInternal(JobExecutionContext context) throws JobExecutionException { // need to generate a Locale so that user beans and other things will // generate normally Locale locale = new Locale("en-US"); ResourceBundleProvider.updateLocale(locale); ResourceBundle pageMessages = ResourceBundleProvider.getPageMessagesBundle(); // logger.debug("--"); // logger.debug("-- executing a job " + message + " at " + new // java.util.Date().toString()); JobDataMap dataMap = context.getMergedJobDataMap(); SimpleTrigger trigger = (SimpleTrigger) context.getTrigger(); try { ApplicationContext appContext = (ApplicationContext) context.getScheduler().getContext().get("applicationContext"); String studySubjectNumber = ((CoreResources) appContext.getBean("coreResources")).getField("extract.number"); coreResources = (CoreResources) appContext.getBean("coreResources"); ruleSetRuleDao = (RuleSetRuleDao) appContext.getBean("ruleSetRuleDao"); dataSource = (DataSource) appContext.getBean("dataSource"); mailSender = (OpenClinicaMailSender) appContext.getBean("openClinicaMailSender"); AuditEventDAO auditEventDAO = new AuditEventDAO(dataSource); // Scheduler scheduler = context.getScheduler(); // JobDetail detail = context.getJobDetail(); // jobDetailBean = (JobDetailBean) detail; /* * data map here should coincide with the job data map found in * CreateJobExportServlet, with the following code: jobDataMap = new * JobDataMap(); jobDataMap.put(DATASET_ID, datasetId); * jobDataMap.put(PERIOD, period); jobDataMap.put(EMAIL, email); * jobDataMap.put(TAB, tab); jobDataMap.put(CDISC, cdisc); * jobDataMap.put(SPSS, spss); */ String alertEmail = dataMap.getString(EMAIL); String localeStr = dataMap.getString(LOCALE); if (localeStr != null) { locale = new Locale(localeStr); ResourceBundleProvider.updateLocale(locale); pageMessages = ResourceBundleProvider.getPageMessagesBundle(); } int dsId = dataMap.getInt(DATASET_ID); String tab = dataMap.getString(TAB); String cdisc = dataMap.getString(CDISC); String cdisc12 = dataMap.getString(CDISC12); if (cdisc12 == null) { cdisc12 = "0"; } String cdisc13 = dataMap.getString(CDISC13); if (cdisc13 == null) { cdisc13 = "0"; } String cdisc13oc = dataMap.getString(CDISC13OC); if (cdisc13oc == null) { cdisc13oc = "0"; } String spss = dataMap.getString(SPSS); int userId = dataMap.getInt(USER_ID); int studyId = dataMap.getInt(STUDY_ID); // String datasetId = dataMap.getString(DATASET_ID); // int dsId = new Integer(datasetId).intValue(); // String userAcctId = dataMap.getString(USER_ID); // int userId = new Integer(userAcctId).intValue(); // why the flip-flop? if one property is set to 'true' we can // see jobs in another screen but all properties have to be // strings logger.debug("-- found the job: " + dsId + " dataset id"); // for (Iterator it = dataMap.entrySet().iterator(); it.hasNext();) // { // java.util.Map.Entry entry = (java.util.Map.Entry) it.next(); // Object key = entry.getKey(); // Object value = entry.getValue(); // // logger.debug("-- found datamap property: " + key.toString() + // // " : " + value.toString()); // } HashMap fileName = new HashMap<String, Integer>(); if (dsId > 0) { // trying to not throw an error if there's no dataset id DatasetDAO dsdao = new DatasetDAO(dataSource); DatasetBean datasetBean = (DatasetBean) dsdao.findByPK(dsId); StudyDAO studyDao = new StudyDAO(dataSource); UserAccountDAO userAccountDAO = new UserAccountDAO(dataSource); // hmm, three lines in the if block DRY? String generalFileDir = ""; String generalFileDirCopy = ""; String exportFilePath = SQLInitServlet.getField("exportFilePath"); String pattern = "yyyy" + File.separator + "MM" + File.separator + "dd" + File.separator + "HHmmssSSS" + File.separator; SimpleDateFormat sdfDir = new SimpleDateFormat(pattern); generalFileDir = DATASET_DIR + datasetBean.getId() + File.separator + sdfDir.format(new java.util.Date()); if (!"".equals(exportFilePath)) { generalFileDirCopy = SQLInitServlet.getField("filePath") + exportFilePath + File.separator; } // logger.debug("-- created the following dir: " + // generalFileDir); long sysTimeBegin = System.currentTimeMillis(); // set up the user bean here, tbh // logger.debug("-- gen tab file 00"); userBean = (UserAccountBean) userAccountDAO.findByPK(userId); // needs to also be captured by the servlet, tbh // logger.debug("-- gen tab file 00"); generateFileService = new GenerateExtractFileService(dataSource, userBean, coreResources, ruleSetRuleDao); // logger.debug("-- gen tab file 00"); // tbh #5796 - covers a bug when the user changes studies, 10/2010 StudyBean activeStudy = (StudyBean) studyDao.findByPK(studyId); StudyBean parentStudy = new StudyBean(); logger.debug( "active study: " + studyId + " parent study: " + activeStudy.getParentStudyId()); if (activeStudy.getParentStudyId() > 0) { // StudyDAO sdao = new StudyDAO(sm.getDataSource()); parentStudy = (StudyBean) studyDao.findByPK(activeStudy.getParentStudyId()); } else { parentStudy = activeStudy; // covers a bug in tab file creation, tbh 01/2009 } logger.debug("-- found extract bean "); ExtractBean eb = generateFileService.generateExtractBean(datasetBean, activeStudy, parentStudy); MessageFormat mf = new MessageFormat(""); StringBuffer message = new StringBuffer(); StringBuffer auditMessage = new StringBuffer(); // use resource bundle page messages to generate the email, tbh // 02/2009 // message.append(pageMessages.getString("html_email_header_1") // + " " + alertEmail + // pageMessages.getString("html_email_header_2") + "<br/>"); message.append( "<p>" + pageMessages.getString("email_header_1") + " " + EmailEngine.getAdminEmail() + " " + pageMessages.getString("email_header_2") + " Job Execution " + pageMessages.getString("email_header_3") + "</p>"); message.append("<P>Dataset: " + datasetBean.getName() + "</P>"); message.append("<P>Study: " + activeStudy.getName() + "</P>"); message.append( "<p>" + pageMessages.getString("html_email_body_1") + datasetBean.getName() + pageMessages.getString("html_email_body_2") + SQLInitServlet.getField("sysURL") + pageMessages.getString("html_email_body_3") + "</p>"); // logger.debug("-- gen tab file 00"); if ("1".equals(tab)) { logger.debug("-- gen tab file 01"); fileName = generateFileService.createTabFile( eb, sysTimeBegin, generalFileDir, datasetBean, activeStudy.getId(), parentStudy.getId(), generalFileDirCopy); message.append( "<p>" + pageMessages.getString("html_email_body_4") + " " + getFileNameStr(fileName) + pageMessages.getString("html_email_body_4_5") + SQLInitServlet.getField("sysURL.base") + "AccessFile?fileId=" + getFileIdInt(fileName) + pageMessages.getString("html_email_body_3") + "</p>"); // MessageFormat mf = new MessageFormat(""); // mf.applyPattern(pageMessages.getString( // "you_can_access_tab_delimited")); // Object[] arguments = { getFileIdInt(fileName) }; // auditMessage.append(mf.format(arguments)); // auditMessage.append( // "You can access your tab-delimited file <a href='AccessFile?fileId=" // + getFileIdInt(fileName) + "'>here</a>.<br/>"); auditMessage.append( pageMessages.getString("you_can_access_tab_delimited") + getFileIdInt(fileName) + pageMessages.getString("access_end")); } if ("1".equals(cdisc)) { String odmVersion = "oc1.2"; fileName = generateFileService.createODMFile( odmVersion, sysTimeBegin, generalFileDir, datasetBean, activeStudy, generalFileDirCopy, eb, activeStudy.getId(), parentStudy.getId(), studySubjectNumber, true, true, true, null); logger.debug("-- gen odm file"); message.append( "<p>" + pageMessages.getString("html_email_body_4") + " " + getFileNameStr(fileName) + pageMessages.getString("html_email_body_4_5") + SQLInitServlet.getField("sysURL.base") + "AccessFile?fileId=" + getFileIdInt(fileName) + pageMessages.getString("html_email_body_3") + "</p>"); // MessageFormat mf = new MessageFormat(""); // mf.applyPattern(pageMessages.getString( // "you_can_access_odm_12")); // Object[] arguments = { getFileIdInt(fileName) }; // auditMessage.append(mf.format(arguments)); // auditMessage.append( // "You can access your ODM 1.2 w/OpenClinica Extension XML file <a // href='AccessFile?fileId=" // + getFileIdInt(fileName) // + "'>here</a>.<br/>"); auditMessage.append( pageMessages.getString("you_can_access_odm_12") + getFileIdInt(fileName) + pageMessages.getString("access_end")); } if ("1".equals(cdisc12)) { String odmVersion = "1.2"; fileName = generateFileService.createODMFile( odmVersion, sysTimeBegin, generalFileDir, datasetBean, activeStudy, generalFileDirCopy, eb, activeStudy.getId(), parentStudy.getId(), studySubjectNumber, true, true, true, null); logger.debug("-- gen odm file 1.2 default"); message.append( "<p>" + pageMessages.getString("html_email_body_4") + " " + getFileNameStr(fileName) + pageMessages.getString("html_email_body_4_5") + SQLInitServlet.getField("sysURL.base") + "AccessFile?fileId=" + getFileIdInt(fileName) + pageMessages.getString("html_email_body_3") + "</p>"); // mf.applyPattern(pageMessages.getString( // "you_can_access_odm_12_xml")); // Object[] arguments = { getFileIdInt(fileName) }; // auditMessage.append(mf.format(arguments)); // // auditMessage.append( // "You can access your ODM 1.2 XML file <a href='AccessFile?fileId=" // + getFileIdInt(fileName) + "'>here</a>.<br/>"); auditMessage.append( pageMessages.getString("you_can_access_odm_12_xml") + getFileIdInt(fileName) + pageMessages.getString("access_end")); } if ("1".equals(cdisc13)) { String odmVersion = "1.3"; fileName = generateFileService.createODMFile( odmVersion, sysTimeBegin, generalFileDir, datasetBean, activeStudy, generalFileDirCopy, eb, activeStudy.getId(), parentStudy.getId(), studySubjectNumber, true, true, true, null); logger.debug("-- gen odm file 1.3"); message.append( "<p>" + pageMessages.getString("html_email_body_4") + " " + getFileNameStr(fileName) + pageMessages.getString("html_email_body_4_5") + SQLInitServlet.getField("sysURL.base") + "AccessFile?fileId=" + getFileIdInt(fileName) + pageMessages.getString("html_email_body_3") + "</p>"); // MessageFormat mf = new MessageFormat(""); // mf.applyPattern(pageMessages.getString( // "you_can_access_odm_13")); // Object[] arguments = { getFileIdInt(fileName) }; // auditMessage.append(mf.format(arguments)); // auditMessage.append( // "You can access your ODM 1.3 XML file <a href='AccessFile?fileId=" // + getFileIdInt(fileName) + "'>here</a>.<br/>"); auditMessage.append( pageMessages.getString("you_can_access_odm_13") + getFileIdInt(fileName) + pageMessages.getString("access_end")); } if ("1".equals(cdisc13oc)) { String odmVersion = "oc1.3"; fileName = generateFileService.createODMFile( odmVersion, sysTimeBegin, generalFileDir, datasetBean, activeStudy, generalFileDirCopy, eb, activeStudy.getId(), parentStudy.getId(), studySubjectNumber, true, true, true, null); logger.debug("-- gen odm file 1.3 oc"); message.append( "<p>" + pageMessages.getString("html_email_body_4") + " " + getFileNameStr(fileName) + pageMessages.getString("html_email_body_4_5") + SQLInitServlet.getField("sysURL.base") + "AccessFile?fileId=" + getFileIdInt(fileName) + pageMessages.getString("html_email_body_3") + "</p>"); // MessageFormat mf = new MessageFormat(""); // mf.applyPattern(pageMessages.getString( // "you_can_access_odm_13_xml")); // Object[] arguments = { getFileIdInt(fileName) }; // auditMessage.append(mf.format(arguments)); // auditMessage.append( // "You can access your ODM 1.3 w/OpenClinica Extension XML file <a // href='AccessFile?fileId=" // + getFileIdInt(fileName) // + "'>here</a>.<br/>"); auditMessage.append( pageMessages.getString("you_can_access_odm_13_xml") + getFileIdInt(fileName) + pageMessages.getString("access_end")); } if ("1".equals(spss)) { SPSSReportBean answer = new SPSSReportBean(); fileName = generateFileService.createSPSSFile( datasetBean, eb, activeStudy, parentStudy, sysTimeBegin, generalFileDir, answer, generalFileDirCopy); logger.debug("-- gen spss file"); message.append( "<p>" + pageMessages.getString("html_email_body_4") + " " + getFileNameStr(fileName) + pageMessages.getString("html_email_body_4_5") + SQLInitServlet.getField("sysURL.base") + "AccessFile?fileId=" + getFileIdInt(fileName) + pageMessages.getString("html_email_body_3") + "</p>"); // MessageFormat mf = new MessageFormat(""); // mf.applyPattern(pageMessages.getString( // "you_can_access_spss")); // Object[] arguments = { getFileIdInt(fileName) }; // auditMessage.append(mf.format(arguments)); // auditMessage.append( // "You can access your SPSS files <a href='AccessFile?fileId=" // + getFileIdInt(fileName) + "'>here</a>.<br/>"); auditMessage.append( pageMessages.getString("you_can_access_spss") + getFileIdInt(fileName) + pageMessages.getString("access_end")); } // wrap up the message, and send the email message.append( "<p>" + pageMessages.getString("html_email_body_5") + "</P><P>" + pageMessages.getString("email_footer")); try { mailSender.sendEmail( alertEmail.trim(), pageMessages.getString("job_ran_for") + " " + datasetBean.getName(), message.toString(), true); } catch (OpenClinicaSystemException ose) { // Do Nothing, In the future we might want to have an email // status added to system. } TriggerBean triggerBean = new TriggerBean(); triggerBean.setDataset(datasetBean); triggerBean.setUserAccount(userBean); triggerBean.setFullName(trigger.getName()); auditEventDAO.createRowForExtractDataJobSuccess(triggerBean, auditMessage.toString()); } else { TriggerBean triggerBean = new TriggerBean(); // triggerBean.setDataset(datasetBean); triggerBean.setUserAccount(userBean); triggerBean.setFullName(trigger.getName()); auditEventDAO.createRowForExtractDataJobFailure(triggerBean); // logger.debug("-- made it here for some reason, ds id: " // + dsId); } // logger.debug("-- generated file: " + fileNameStr); // dataSource. } catch (Exception e) { // TODO Auto-generated catch block -- ideally should generate a fail // msg here, tbh 02/2009 logger.debug("-- found exception: " + e.getMessage()); e.printStackTrace(); } }