@Override protected void processRequest() throws Exception { FormProcessor fp = new FormProcessor(request); TriggerService triggerService = new TriggerService(); String action = fp.getString("action"); String triggerName = fp.getString("tname"); scheduler = getScheduler(); System.out.println("found trigger name " + triggerName); Trigger trigger = scheduler.getTrigger(triggerName.trim(), TRIGGER_IMPORT_GROUP); System.out.println("found trigger from the other side " + trigger.getFullName()); if (StringUtil.isBlank(action)) { setUpServlet(trigger); forwardPage(Page.UPDATE_JOB_IMPORT); } else if ("confirmall".equalsIgnoreCase(action)) { HashMap errors = triggerService.validateImportJobForm( fp, request, scheduler.getTriggerNames("DEFAULT"), trigger.getName()); if (!errors.isEmpty()) { // send back addPageMessage( "Your modifications caused an error, please see the messages for more information."); setUpServlet(trigger); forwardPage(Page.UPDATE_JOB_IMPORT); } else { StudyDAO studyDAO = new StudyDAO(sm.getDataSource()); int studyId = fp.getInt(CreateJobImportServlet.STUDY_ID); StudyBean study = (StudyBean) studyDAO.findByPK(studyId); // in the place of a users' current study, tbh Date startDate = trigger.getStartTime(); trigger = triggerService.generateImportTrigger( fp, sm.getUserBean(), study, startDate, request.getLocale().getLanguage()); // scheduler = getScheduler(); JobDetailBean jobDetailBean = new JobDetailBean(); jobDetailBean.setGroup(TRIGGER_IMPORT_GROUP); jobDetailBean.setName(trigger.getName()); jobDetailBean.setJobClass(org.akaza.openclinica.web.job.ImportStatefulJob.class); jobDetailBean.setJobDataMap(trigger.getJobDataMap()); jobDetailBean.setDurability(true); // need durability? jobDetailBean.setVolatility(false); try { scheduler.deleteJob(triggerName, TRIGGER_IMPORT_GROUP); Date dateStart = scheduler.scheduleJob(jobDetailBean, trigger); addPageMessage("Your job has been successfully modified."); forwardPage(Page.VIEW_IMPORT_JOB_SERVLET); } catch (SchedulerException se) { se.printStackTrace(); // set a message here with the exception message setUpServlet(trigger); addPageMessage( "There was an unspecified error with your creation, please contact an administrator."); forwardPage(Page.UPDATE_JOB_IMPORT); } } } }
/** * Resume (un-pause) the <code>{@link org.quartz.Job}</code> with the given name. * * <p>If any of the <code>Job</code>'s<code>Trigger</code> s missed one or more fire-times, then * the <code>Trigger</code>'s misfire instruction will be applied. * * @see #pauseJob(org.quartz.core.SchedulingContext, String, String) */ public void resumeJob(SchedulingContext ctxt, String jobName, String groupName) throws JobPersistenceException { synchronized (m_triggerLock) { Trigger[] triggers = getTriggersForJob(ctxt, jobName, groupName); for (int i = 0, length = triggers.length; i < length; i++) { Trigger t = triggers[i]; resumeTrigger(ctxt, t.getName(), t.getGroup()); } } }
protected boolean triggerExists(Trigger checkTrigger) { try { Trigger trigger = resourceScheduler.getTrigger(checkTrigger.getName(), checkTrigger.getGroup()); return (trigger != null); } catch (SchedulerException e) { log.error("Failed to check if the trigger exists", e); } return false; }
/** * Inform the <code>JobStore</code> that the scheduler has completed the firing of the given * <code>Trigger</code> (and the execution its associated <code>Job</code>), and that the <code> * {@link org.quartz.JobDataMap}</code> in the given <code>JobDetail</code> should be updated if * the <code>Job</code> is stateful. */ public void triggeredJobComplete( SchedulingContext ctxt, Trigger trigger, JobDetail jobDetail, int triggerInstCode) throws JobPersistenceException { synchronized (m_triggerLock) { if (trigger.getNextFireTime() != null) { storeTrigger(ctxt, trigger, true); } else { removeTrigger(ctxt, trigger.getName(), trigger.getGroup()); } } }
protected boolean triggerChanged(Trigger checkTrigger) { try { Trigger trigger = resourceScheduler.getTrigger(checkTrigger.getName(), checkTrigger.getGroup()); if (trigger != null && trigger instanceof CronTrigger) { return !((CronTrigger) trigger) .getCronExpression() .equals(((CronTrigger) checkTrigger).getCronExpression()); } } catch (SchedulerException e) { log.error("Failed to check if the trigger has changed", e); } return false; }
public static Trigger convertTriggerFromNativeObject(org.quartz.Trigger quartzTrigger) { Trigger spagobiTrigger; spagobiTrigger = new Trigger(); spagobiTrigger.setName(quartzTrigger.getName()); spagobiTrigger.setGroupName(quartzTrigger.getGroup()); spagobiTrigger.setDescription(quartzTrigger.getDescription()); // spagobiTrigger.setCalendarName( quartzTrigger.getCalendarName() ); Assert.assertTrue( quartzTrigger.getCalendarName() == null, "quartz trigger calendar name is not null: " + quartzTrigger.getCalendarName()); spagobiTrigger.setStartTime(quartzTrigger.getStartTime()); spagobiTrigger.setEndTime(quartzTrigger.getEndTime()); // triggers that run immediately have a generated name that starts with schedule_uuid_ (see // TriggerXMLDeserializer) // It would be better anyway to relay on a specific property to recognize if a trigger is // thinked to run immediately spagobiTrigger.setRunImmediately(spagobiTrigger.getName().startsWith("schedule_uuid_")); if (quartzTrigger instanceof org.quartz.CronTrigger) { org.quartz.CronTrigger quartzCronTrigger = (org.quartz.CronTrigger) quartzTrigger; // dirty trick String expression = (String) quartzCronTrigger.getJobDataMap().get(SPAGOBI_CRON_EXPRESSION); if (expression != null) { quartzCronTrigger.getJobDataMap().remove(SPAGOBI_CRON_EXPRESSION); } else { // for back compatibility expression = (String) quartzCronTrigger.getJobDataMap().get(SPAGOBI_CRON_EXPRESSION_DEPRECATED); quartzCronTrigger.getJobDataMap().remove(SPAGOBI_CRON_EXPRESSION_DEPRECATED); } spagobiTrigger.setCronExpression(new CronExpression(expression)); } Job job = new Job(); job.setName(quartzTrigger.getJobName()); job.setGroupName(quartzTrigger.getJobGroup()); job.setVolatile(quartzTrigger.isVolatile()); Map<String, String> parameters = convertParametersFromNativeObject(quartzTrigger.getJobDataMap()); job.addParameters(parameters); spagobiTrigger.setJob(job); return spagobiTrigger; }
private void setUpServlet(Trigger trigger) throws Exception { FormProcessor fp2 = new FormProcessor(request); request.setAttribute(CreateJobImportServlet.JOB_NAME, trigger.getName()); request.setAttribute(CreateJobImportServlet.JOB_DESC, trigger.getDescription()); dataMap = trigger.getJobDataMap(); String contactEmail = dataMap.getString(ImportSpringJob.EMAIL); System.out.println("found email: " + contactEmail); int userId = dataMap.getInt(ImportSpringJob.USER_ID); int hours = dataMap.getInt(CreateJobImportServlet.HOURS); int minutes = dataMap.getInt(CreateJobImportServlet.MINUTES); String directory = dataMap.getString(ImportSpringJob.DIRECTORY); String studyName = dataMap.getString(ImportSpringJob.STUDY_NAME); request.setAttribute(ImportSpringJob.EMAIL, contactEmail); request.setAttribute(ImportSpringJob.STUDY_NAME, studyName); request.setAttribute("filePath", directory); request.setAttribute("firstFilePath", IMPORT_DIR); request.setAttribute("hours", new Integer(hours).toString()); request.setAttribute("minutes", new Integer(minutes).toString()); Date jobDate = trigger.getNextFireTime(); UserAccountDAO udao = new UserAccountDAO(sm.getDataSource()); StudyDAO sdao = new StudyDAO(sm.getDataSource()); // ArrayList studies = udao.findStudyByUser(ub.getName(), (ArrayList) // sdao.findAll()); // request.setAttribute("studies", studies); ArrayList<StudyBean> all = (ArrayList<StudyBean>) sdao.findAll(); ArrayList<StudyBean> finalList = new ArrayList<StudyBean>(); for (StudyBean sb : all) { if (!(sb.getParentStudyId() > 0)) { finalList.add(sb); // System.out.println("found study name: " + sb.getName()); finalList.addAll(sdao.findAllByParent(sb.getId())); } } // System.out.println("found list of studies: " + finalList.toString()); addEntityList( "studies", finalList, respage.getString("a_user_cannot_be_created_no_study_as_active"), Page.ADMIN_SYSTEM); // tbh >> // HashMap presetValues = new HashMap(); // Calendar calendar = new GregorianCalendar(); // calendar.setTime(jobDate); // presetValues.put(CreateJobImportServlet.DATE_START_JOB + "Hour", // calendar.get(Calendar.HOUR_OF_DAY)); // presetValues.put(CreateJobImportServlet.DATE_START_JOB + "Minute", // calendar.get(Calendar.MINUTE)); // // TODO this will have to match l10n formatting // presetValues.put(CreateJobImportServlet.DATE_START_JOB + "Date", // (calendar.get(Calendar.MONTH) + 1) + "/" + // calendar.get(Calendar.DATE) + "/" // + calendar.get(Calendar.YEAR)); // fp2.setPresetValues(presetValues); // setPresetValues(fp2.getPresetValues()); }
/** * Schedules a given job and trigger (both wrapped by a <code>JobSchedulingBundle</code>). * * @param job job wrapper. * @param sched job scheduler. * @param localOverWriteExistingJobs locally overwrite existing jobs. * @exception SchedulerException if the Job or Trigger cannot be added to the Scheduler, or there * is an internal Scheduler error. */ public void scheduleJob( JobSchedulingBundle job, Scheduler sched, boolean localOverWriteExistingJobs) throws SchedulerException { if ((job != null) && job.isValid()) { JobDetail detail = job.getJobDetail(); JobDetail dupeJ = sched.getJobDetail(detail.getName(), detail.getGroup()); if ((dupeJ != null) && !localOverWriteExistingJobs) { getLog().info("Not overwriting existing job: " + dupeJ.getFullName()); return; } if (dupeJ != null) { getLog().info("Replacing job: " + detail.getFullName()); } else { getLog().info("Adding job: " + detail.getFullName()); } if (job.getTriggers().size() == 0 && !job.getJobDetail().isDurable()) { if (dupeJ == null) { throw new SchedulerException( "A new job defined without any triggers must be durable: " + detail.getFullName()); } if ((dupeJ.isDurable() && (sched.getTriggersOfJob(detail.getName(), detail.getGroup()).length == 0))) { throw new SchedulerException( "Can't make a durable job without triggers non-durable: " + detail.getFullName()); } } sched.addJob(detail, true); for (Iterator iter = job.getTriggers().iterator(); iter.hasNext(); ) { Trigger trigger = (Trigger) iter.next(); trigger.setJobName(detail.getName()); trigger.setJobGroup(detail.getGroup()); if (trigger.getStartTime() == null) { trigger.setStartTime(new Date()); } boolean addedTrigger = false; while (addedTrigger == false) { Trigger dupeT = sched.getTrigger(trigger.getName(), trigger.getGroup()); if (dupeT != null) { if (getLog().isDebugEnabled()) { getLog() .debug( "Rescheduling job: " + detail.getFullName() + " with updated trigger: " + trigger.getFullName()); } if (!dupeT.getJobGroup().equals(trigger.getJobGroup()) || !dupeT.getJobName().equals(trigger.getJobName())) { getLog().warn("Possibly duplicately named triggers in jobs xml file!"); } sched.rescheduleJob(trigger.getName(), trigger.getGroup(), trigger); } else { if (getLog().isDebugEnabled()) { getLog() .debug( "Scheduling job: " + detail.getFullName() + " with trigger: " + trigger.getFullName()); } try { sched.scheduleJob(trigger); } catch (ObjectAlreadyExistsException e) { if (getLog().isDebugEnabled()) { getLog() .debug( "Adding trigger: " + trigger.getFullName() + " for job: " + detail.getFullName() + " failed because the trigger already existed. " + "This is likely due to a race condition between multiple instances " + "in the cluster. Will try to reschedule instead."); } continue; } } addedTrigger = true; } } addScheduledJob(job); } }
@Override protected void processRequest() throws Exception { FormProcessor fp = new FormProcessor(request); // changes to this servlet, we now look at group name too, tbh 05/2009 String triggerName = fp.getString("tname"); String gName = fp.getString("gname"); String groupName = ""; if (gName.equals("") || gName.equals("0")) { groupName = TRIGGER_GROUP; } else { // if (gName.equals("1")) { groupName = TRIGGER_IMPORT_GROUP; } // << tbh 09/03/2009 #4143 scheduler = getScheduler(); Trigger trigger = scheduler.getTrigger(triggerName.trim(), groupName); // trigger bean is a wrapper for the trigger, to serve as a link btw // quartz classes and oc classes // is it really necessary? DRY if (trigger == null) { System.out.println("*** reset trigger group name"); groupName = TRIGGER_GROUP; trigger = scheduler.getTrigger(triggerName.trim(), groupName); } // << tbh 09/03/2009 #4143 // above is a hack, if we add more trigger groups this will have // to be redone System.out.println("found trigger name: " + triggerName); System.out.println("found group name: " + groupName); // System.out.println("found trigger on the other side, full name: " + // trigger.getFullName()); TriggerBean triggerBean = new TriggerBean(); JobDataMap dataMap = new JobDataMap(); AuditEventDAO auditEventDAO = new AuditEventDAO(sm.getDataSource()); try { triggerBean.setFullName(trigger.getName()); triggerBean.setPreviousDate(trigger.getPreviousFireTime()); triggerBean.setNextDate(trigger.getNextFireTime()); // >> set active here, tbh 10/08/2009 if (scheduler.getTriggerState(triggerName, groupName) == Trigger.STATE_PAUSED) { triggerBean.setActive(false); System.out.println("setting active to false for trigger: " + trigger.getName()); } else { triggerBean.setActive(true); System.out.println("setting active to TRUE for trigger: " + trigger.getName()); } // << if (trigger.getDescription() != null) { triggerBean.setDescription(trigger.getDescription()); } if (trigger.getJobDataMap().size() > 0) { dataMap = trigger.getJobDataMap(); String contactEmail = dataMap.getString(ExampleSpringJob.EMAIL); System.out.println("found email: " + contactEmail); // String datasetId = // dataMap.getString(ExampleSpringJob.DATASET_ID); // int dsId = new Integer(datasetId).intValue(); if (gName.equals("") || gName.equals("0")) { String tab = dataMap.getString(ExampleSpringJob.TAB); String cdisc = dataMap.getString(ExampleSpringJob.CDISC); String spss = dataMap.getString(ExampleSpringJob.SPSS); String periodToRun = dataMap.getString(ExampleSpringJob.PERIOD); // int userId = new Integer(userAcctId).intValue(); int dsId = dataMap.getInt(ExampleSpringJob.DATASET_ID); triggerBean.setCdisc(cdisc); triggerBean.setSpss(spss); triggerBean.setTab(tab); triggerBean.setPeriodToRun(periodToRun); DatasetDAO datasetDAO = new DatasetDAO(sm.getDataSource()); DatasetBean dataset = (DatasetBean) datasetDAO.findByPK(dsId); triggerBean.setDataset(dataset); } int userId = dataMap.getInt(ExampleSpringJob.USER_ID); // need to set information, extract bean, user account bean UserAccountDAO userAccountDAO = new UserAccountDAO(sm.getDataSource()); triggerBean.setContactEmail(contactEmail); UserAccountBean userAccount = (UserAccountBean) userAccountDAO.findByPK(userId); triggerBean.setUserAccount(userAccount); ArrayList<AuditEventBean> triggerLogs = auditEventDAO.findAllByAuditTable(trigger.getName()); // set the table for the audit event beans here ArrayList allRows = AuditEventRow.generateRowsFromBeans(triggerLogs); EntityBeanTable table = fp.getEntityBeanTable(); String[] columns = { resword.getString("date_and_time"), resword.getString("action_message"), resword.getString("entity_operation"), // resword.getString("study_site"), // resword.getString("study_subject_ID"), resword.getString("changes_and_additions"), resword.getString("actions") }; table.setColumns(new ArrayList(Arrays.asList(columns))); table.setAscendingSort(false); table.hideColumnLink(1); table.hideColumnLink(3); table.hideColumnLink(4); table.setQuery("ViewSingleJob?tname=" + triggerName + "&gname=" + gName, new HashMap()); table.setRows(allRows); table.computeDisplay(); request.setAttribute("table", table); } } catch (NullPointerException e) { // TODO Auto-generated catch block System.out.println(" found NPE " + e.getMessage()); e.printStackTrace(); } // need to show the extract for which this runs, which files, etc // in other words the job data map request.setAttribute("triggerBean", triggerBean); request.setAttribute("groupName", groupName); forwardPage(Page.VIEW_SINGLE_JOB); }
private Pair<String, String> getTriggerKey(Trigger t) { return getTriggerKey(t.getGroup(), t.getName()); }