public void executeJob(final ScheduledJobDetail scheduledJobDetail, String triggerType) {
    try {
      final JobDataMap jobDataMap = new JobDataMap();
      if (triggerType == null) {
        triggerType = SchedulerServiceConstants.TRIGGER_TYPE_APPLICATION;
      }
      jobDataMap.put(SchedulerServiceConstants.TRIGGER_TYPE_REFERENCE, triggerType);
      jobDataMap.put(
          SchedulerServiceConstants.TENANT_IDENTIFIER,
          ThreadLocalContextUtil.getTenant().getTenantIdentifier());
      final String key = scheduledJobDetail.getJobKey();
      final JobKey jobKey = constructJobKey(key);
      final String schedulerName = getSchedulerName(scheduledJobDetail);
      final Scheduler scheduler = this.schedulers.get(schedulerName);
      if (scheduler == null || !scheduler.checkExists(jobKey)) {
        final JobDetail jobDetail = createJobDetail(scheduledJobDetail);
        final String tempSchedulerName = "temp" + scheduledJobDetail.getId();
        final Scheduler tempScheduler =
            createScheduler(tempSchedulerName, 1, schedulerJobListener, schedulerStopListener);
        tempScheduler.addJob(jobDetail, true);
        jobDataMap.put(SchedulerServiceConstants.SCHEDULER_NAME, tempSchedulerName);
        this.schedulers.put(tempSchedulerName, tempScheduler);
        tempScheduler.triggerJob(jobDetail.getKey(), jobDataMap);
      } else {
        scheduler.triggerJob(jobKey, jobDataMap);
      }

    } catch (final Exception e) {
      final String msg = "Job execution failed for job with id:" + scheduledJobDetail.getId();
      logger.error(msg, e);
      throw new PlatformInternalServerException(
          "error.msg.sheduler.job.execution.failed", msg, scheduledJobDetail.getId());
    }
  }
示例#2
0
 @Test
 public void doTest() throws SchedulerException, MeasurementException {
   PollJob job = new PollJob();
   JobExecutionContext context = mock(JobExecutionContext.class);
   Map params = new HashMap();
   JobDataMap jobMap = new JobDataMap(params);
   MeasurementDefinition def = MocksFactory.createMockMeasurementDefinition();
   MeasurementListener listener = mock(MeasurementListener.class);
   Scheduler scheduler = mock(Scheduler.class);
   CoreMeasurementService service = mock(CoreMeasurementService.class);
   SchedulerContext schedulerContext =
       new SchedulerContext(
           Collections.singletonMap(PollJob.MEASUREMENT_SERVICE_ATTR_NAME, service));
   jobMap.put(PollJob.LISTENER_ATTR_NAME, listener);
   jobMap.put(PollJob.MEASUREMENT_DEF_ATTR_NAME, def);
   jobMap.put(PollJob.MEASUREMENT_SERVICE_ATTR_NAME, service);
   when(context.getMergedJobDataMap()).thenReturn(jobMap);
   when(context.getScheduler()).thenReturn(scheduler);
   when(scheduler.getContext()).thenReturn(schedulerContext);
   CapabilityValue capValue = new CapabilityValue(RandomUtils.nextLong());
   when(service.getCapabilityValue(Matchers.<String>any(), Matchers.<String>any()))
       .thenReturn(capValue);
   job.execute(context);
   verify(context).getMergedJobDataMap();
   verify(service).getCapabilityValue(def.getResourceUri(), def.getCapabilityUri());
   verify(listener).newCapabilityValue(capValue);
   assertEquals(capValue.getMetricsId(), def.getId());
 }
    public final void addJob(
        String jobName, String groupName, StorageType storageType, org.quartz.Trigger trigger) {

      JobKey jobKey = new JobKey(jobName, groupName);

      JobBuilder jobBuilder = JobBuilder.newJob(MessageSenderJob.class);

      jobBuilder = jobBuilder.withIdentity(jobKey);

      JobDetail jobDetail = jobBuilder.build();

      JobDataMap jobDataMap = jobDetail.getJobDataMap();

      jobDataMap.put(SchedulerEngine.MESSAGE, _jsonFactory.serialize(new Message()));
      jobDataMap.put(SchedulerEngine.DESTINATION_NAME, _TEST_DESTINATION_NAME);
      jobDataMap.put(SchedulerEngine.STORAGE_TYPE, storageType.toString());

      JobState jobState = new JobState(TriggerState.NORMAL);

      jobState.addException(new Exception(), new Date());

      jobDataMap.put(SchedulerEngine.JOB_STATE, JobStateSerializeUtil.serialize(jobState));

      _jobs.put(jobKey, new Tuple(jobDetail, trigger, TriggerState.NORMAL));
    }
  @Override
  public void registerSchedule(Schedule schedule, Map<String, Serializable> parameters) {
    log.info("Registering " + schedule);
    JobDetail job = new JobDetail(schedule.getId(), "nuxeo", EventJob.class);
    JobDataMap map = job.getJobDataMap();
    map.put("eventId", schedule.getEventId());
    map.put("eventCategory", schedule.getEventCategory());
    map.put("username", schedule.getUsername());

    if (parameters != null) {
      map.putAll(parameters);
    }

    Trigger trigger;
    try {
      trigger = new CronTrigger(schedule.getId(), "nuxeo", schedule.getCronExpression());
    } catch (ParseException e) {
      log.error(
          String.format(
              "invalid cron expresion '%s' for schedule '%s'",
              schedule.getCronExpression(), schedule.getId()),
          e);
      return;
    }
    // This is useful when testing to avoid multiple threads:
    // trigger = new SimpleTrigger(schedule.getId(), "nuxeo");

    try {
      scheduler.scheduleJob(job, trigger);
    } catch (ObjectAlreadyExistsException e) {
      log.trace("Overriding scheduler with id: " + schedule.getId());
      // when jobs are persisted in a database, the job should already
      // be there
      // remove existing job and re-schedule
      boolean unregistred = unregisterSchedule(schedule.getId());
      if (unregistred) {
        try {
          scheduler.scheduleJob(job, trigger);
        } catch (SchedulerException e1) {
          log.error(
              String.format(
                  "failed to schedule job with id '%s': %s", schedule.getId(), e.getMessage()),
              e);
        }
      }

    } catch (SchedulerException e) {
      log.error(
          String.format(
              "failed to schedule job with id '%s': %s", schedule.getId(), e.getMessage()),
          e);
    }
  }
  public void startNonQuartzJob(
      String domainId, String projectId, String userId, String instanceId, long timeout)
      throws SchedulerException {

    JobDataMap jobDataMap = new JobDataMap();
    jobDataMap.put(JobParamName.DOMAIN_ID, domainId);
    jobDataMap.put(JobParamName.PROJECT_ID, projectId);
    jobDataMap.put(JobParamName.USER_ID, userId);
    jobDataMap.put(JobParamName.INSTANCE_ID, instanceId);
    jobDataMap.put(JobParamName.TIMEOUT, timeout);
    FirstJob generalJob = new FirstJob();
    generalJob.executeNonQuartzJob(jobDataMap);
  }
示例#6
0
  private JobDetail createJobDetail(JobBeanWrapper job, String jobName) {
    JobDetail jd =
        JobBuilder.newJob(job.getJobClass())
            .withIdentity(new JobKey(jobName, Scheduler.DEFAULT_GROUP))
            .storeDurably()
            .requestRecovery()
            .build();
    JobDataMap map = jd.getJobDataMap();

    map.put(JobBeanWrapper.SPRING_BEAN_NAME, job.getBeanId());
    map.put(JobBeanWrapper.JOB_TYPE, job.getJobType());

    return jd;
  }
  public static JobDetail forProduct(Product product, Boolean lazy) {
    JobDataMap map = new JobDataMap();
    map.put(JobStatus.TARGET_TYPE, JobStatus.TargetType.PRODUCT);
    map.put(JobStatus.TARGET_ID, product.getId());
    map.put(LAZY_REGEN, lazy);

    JobDetail detail =
        newJob(RefreshPoolsForProductJob.class)
            .withIdentity("refresh_pools_for_product" + Util.generateUUID())
            .requestRecovery(true) // recover the job upon restarts
            .usingJobData(map)
            .build();

    return detail;
  }
示例#8
0
  /**
   * Called by the <code>{@link org.quartz.Scheduler}</code> when a <code>{@link org.quartz.Trigger}
   * </code> fires that is associated with the <code>Job</code>.
   *
   * @throws JobExecutionException if there is an exception while executing the job.
   */
  public void execute(JobExecutionContext context) throws JobExecutionException {

    // This job simply prints out its job name and the
    // date and time that it is running
    JobKey jobKey = context.getJobDetail().getKey();

    // Grab and print passed parameters
    JobDataMap data = context.getJobDetail().getJobDataMap();
    String favoriteColor = data.getString(FAVORITE_COLOR);
    int count = data.getInt(EXECUTION_COUNT);
    _log.info(
        "ColorJob: "
            + jobKey
            + " executing at "
            + new Date()
            + "\n"
            + "  favorite color is "
            + favoriteColor
            + "\n"
            + "  execution count (from job map) is "
            + count
            + "\n"
            + "  execution count (from job member variable) is "
            + _counter);

    // increment the count and store it back into the
    // job map so that job state can be properly maintained
    count++;
    data.put(EXECUTION_COUNT, count);

    // Increment the local member variable
    // This serves no real purpose since job state can not
    // be maintained via member variables!
    _counter++;
  }
  public void run() throws Exception {

    Log log = LogFactory.getLog(RemoteClientExample.class);

    // First we must get a reference to a scheduler
    SchedulerFactory sf = new StdSchedulerFactory();
    Scheduler sched = sf.getScheduler();

    // define the job and ask it to run
    JobDetail job = new JobDetail("remotelyAddedJob", "default", SimpleJob.class);
    JobDataMap map = new JobDataMap();
    map.put("msg", "Your remotely added job has executed!");
    job.setJobDataMap(map);
    CronTrigger trigger =
        new CronTrigger(
            "remotelyAddedTrigger",
            "default",
            "remotelyAddedJob",
            "default",
            new Date(),
            null,
            "/5 * * ? * *");

    // schedule the job
    sched.scheduleJob(job, trigger);

    log.info("Remote job scheduled.");
  }
  /**
   * schedule a job
   *
   * @param aScheduler scheduler to start
   * @throws SchedulerException
   */
  public static void schedule(am.projects.webserver.report.vo.Scheduler aScheduler)
      throws SchedulerException {

    if (null == scheduler) return;

    JobDataMap dataMap = new JobDataMap();
    dataMap.put(am.projects.webserver.report.vo.Scheduler.class.getSimpleName(), aScheduler);

    // define the job and tie it to our HelloJob class
    String jobName = buildJobName(aScheduler);
    String groupName = buildJobGroupName(aScheduler);
    JobKey jobKey = new JobKey(jobName, groupName);

    JobDetail job = newJob(MonitorJob.class).withIdentity(jobKey).usingJobData(dataMap).build();

    TriggerKey triggerKey = new TriggerKey(jobName, groupName);
    Trigger trigger =
        newTrigger()
            .withIdentity(triggerKey)
            .startNow()
            .withSchedule(
                simpleSchedule()
                    .withIntervalInMinutes(aScheduler.getRequestRepeatIntervalInMinutes())
                    .repeatForever())
            .build();

    scheduler.scheduleJob(job, trigger);
  }
  /**
   * 新增任务
   *
   * @param locale
   * @param model
   * @return
   */
  @RequestMapping(
      value = "/addJob",
      method = {RequestMethod.POST, RequestMethod.GET})
  @ResponseBody
  public Map<String, Object> addJob(
      @RequestParam("jobName") String jobName,
      @RequestParam("type") String type,
      @RequestParam("startTime") String startTime,
      HttpServletRequest req) {
    Map<String, Object> m = new HashMap<String, Object>();
    JobDataMap jobData = new JobDataMap();
    String key = null;
    String[] value = null;
    for (Entry<String, String[]> entry : req.getParameterMap().entrySet()) {
      key = entry.getKey();
      value = entry.getValue();
      if (value != null && value.length > 0) {
        jobData.put(key, value[0]);
      }
    }
    JobType jt = null;
    if (GloabConstant.JOB_TYPE_0.equals(type)) {
      jt = JobType.CONSULT;
    } else {
      jt = JobType.ACTIVITY;
    }
    JobManager.addJob(jobName, jt, new Date(Long.valueOf(startTime)), jobData);

    m.put("success", true);
    return m;
  }
示例#12
0
  private void scheduleCronJob(String cronString, String elementId) {
    try {
      SchedulerFactory schedulerFactory = new StdSchedulerFactory();
      scheduler = schedulerFactory.getScheduler();
      scheduler.start();

      JobDataMap dataMap = new JobDataMap();
      dataMap.put("trigger", this);

      jobName = "TriggerJob_" + elementId;
      JobDetail job =
          org.quartz.JobBuilder.newJob(CronEventTrigger.class)
              .withIdentity(jobName, jobGroup)
              .usingJobData(dataMap)
              .build();

      Trigger trigger =
          org.quartz.TriggerBuilder.newTrigger()
              .withIdentity("TriggerJob_" + elementId, jobGroup)
              .withSchedule(CronScheduleBuilder.cronSchedule(cronString))
              .build();

      scheduler.scheduleJob(job, trigger);

    } catch (SchedulerException e) {
      log.error(
          "Error while instantiating quartz scheduler for trigger '"
              + triggerDefinition.getId()
              + "',"
              + e.getMessage(),
          e);
    }
  }
示例#13
0
  public void testCreation() throws Exception {
    Scheduler scheduler = (Scheduler) lookup(Scheduler.ROLE, "test");

    assertNotNull(scheduler);

    JobDataMap dataMap = new JobDataMap();

    dataMap.put("project", "continuum");

    JobDetail jobDetail = new JobDetail("job", "group", JobOne.class);

    jobDetail.setJobDataMap(dataMap);

    Trigger trigger = new SimpleTrigger("trigger", "group");

    scheduler.addGlobalTriggerListener(this);

    scheduler.scheduleJob(jobDetail, trigger);

    while (!triggerFired) {
      // System.out.println("! triggerFired");
      Thread.sleep(10);
    }
    System.out.println("ok triggerFired");
  }
  protected void updateJobState(
      Scheduler scheduler, JobKey jobKey, TriggerState triggerState, boolean suppressError)
      throws Exception {

    JobDetail jobDetail = scheduler.getJobDetail(jobKey);

    if (jobDetail == null) {
      return;
    }

    JobDataMap jobDataMap = jobDetail.getJobDataMap();

    JobState jobState = getJobState(jobDataMap);

    if (triggerState != null) {
      jobState.setTriggerState(triggerState);
    }

    if (suppressError) {
      jobState.clearExceptions();
    }

    jobDataMap.put(SchedulerEngine.JOB_STATE, JobStateSerializeUtil.serialize(jobState));

    scheduler.addJob(jobDetail, true);
  }
  protected void schedule(
      Scheduler scheduler,
      StorageType storageType,
      Trigger trigger,
      String description,
      String destinationName,
      Message message)
      throws Exception {

    if (_jsonFactory == null) {
      throw new IllegalStateException("JSON factory not initialized");
    }

    try {
      JobBuilder jobBuilder = JobBuilder.newJob(MessageSenderJob.class);

      jobBuilder.withIdentity(trigger.getJobKey());

      jobBuilder.storeDurably();

      JobDetail jobDetail = jobBuilder.build();

      JobDataMap jobDataMap = jobDetail.getJobDataMap();

      jobDataMap.put(SchedulerEngine.DESCRIPTION, description);
      jobDataMap.put(SchedulerEngine.DESTINATION_NAME, destinationName);
      jobDataMap.put(SchedulerEngine.MESSAGE, _jsonFactory.serialize(message));
      jobDataMap.put(SchedulerEngine.STORAGE_TYPE, storageType.toString());

      JobState jobState =
          new JobState(
              TriggerState.NORMAL, message.getInteger(SchedulerEngine.EXCEPTIONS_MAX_SIZE));

      jobDataMap.put(SchedulerEngine.JOB_STATE, JobStateSerializeUtil.serialize(jobState));

      unregisterMessageListener(scheduler, trigger.getJobKey());

      synchronized (this) {
        scheduler.deleteJob(trigger.getJobKey());
        scheduler.scheduleJob(jobDetail, trigger);
      }
    } catch (ObjectAlreadyExistsException oaee) {
      if (_log.isInfoEnabled()) {
        _log.info("Message is already scheduled");
      }
    }
  }
示例#16
0
 /**
  * 创建任务JOb
  *
  * @return
  */
 private JobDetail createJobDetail() {
   JobDataMap jobDataMap = new JobDataMap();
   jobDataMap.put("jobConfiguration", jobConfiguration);
   JobDetail result =
       JobBuilder.newJob(jobConfiguration.getJobClass())
           .setJobData(jobDataMap)
           .withIdentity(jobConfiguration.getJobName())
           .build();
   return result;
 }
  public static org.quartz.JobDataMap convertParametersToNativeObject(
      Map<String, String> spagobiParameters) {
    JobDataMap quartzParameters = new JobDataMap();

    Set<String> parameterNames = spagobiParameters.keySet();
    for (String parameterName : parameterNames) {
      String parameterValue = spagobiParameters.get(parameterName);
      quartzParameters.put(parameterName, parameterValue);
    }
    return quartzParameters;
  }
 private void completeWithMissingParams(JobDataMap jobDataMap, JobDataMap previousJobDataMap) {
   for (Iterator<Entry<String, Object>> iterator = previousJobDataMap.entrySet().iterator();
       iterator.hasNext(); ) {
     Entry<String, Object> entryFromPrevious = (Entry<String, Object>) iterator.next();
     if (!jobDataMap.containsKey(entryFromPrevious.getKey())) {
       if (!FOR_EACH_LISTENER.equals(entryFromPrevious.getKey())) {
         jobDataMap.put(entryFromPrevious.getKey(), entryFromPrevious.getValue());
       }
     }
   }
 }
 JobDataMap createJobDataMap(String parameters) {
   JobDataMap map = new JobDataMap();
   if (!StringUtils.isNullOrEmpty(parameters)) {
     JSONArray jsonArray = JSON.parseArray(parameters);
     for (int i = 0; i < jsonArray.size(); i++) {
       JSONObject o = jsonArray.getJSONObject(i);
       map.put(o.getString("key"), o.get("value"));
     }
   }
   return map;
 }
  @Override
  @OverridingMethodsMustInvokeSuper
  protected void beforeExecuteInScope(
      @Nonnull final JobDataMap aJobDataMap, @Nonnull final JobExecutionContext aContext) {
    final String sUserID = getCurrentUserID(aJobDataMap);

    // Remember that a long running job is starting
    final String sLongRunningJobID = getLongRunningJobManager().onStartJob(this, sUserID);

    // Store in JobDataMap
    aJobDataMap.put(KEY_LONG_RUNNING_JOB_ID, sLongRunningJobID);
  }
示例#21
0
  public SimpleTrigger generateImportTrigger(
      FormProcessor fp,
      UserAccountBean userAccount,
      StudyBean study,
      Date startDateTime,
      String locale) {

    String jobName = fp.getString(JOB_NAME);

    String email = fp.getString(EMAIL);
    String jobDesc = fp.getString(JOB_DESC);
    String directory = fp.getString(DIRECTORY);

    // what kinds of periods do we have? hourly, daily, weekly?
    long interval = 0;
    int hours = fp.getInt("hours");
    int minutes = fp.getInt("minutes");
    if (hours > 0) {
      long hoursInt = hours * 3600000;
      interval = interval + hoursInt;
    }
    if (minutes > 0) {
      long minutesInt = minutes * 60000;
      interval = interval + minutesInt;
    }
    SimpleTrigger trigger = new SimpleTrigger(jobName, IMPORT_TRIGGER, 64000, interval);
    trigger.setDescription(jobDesc);
    // set just the start date
    trigger.setStartTime(startDateTime);
    trigger.setName(jobName); // + datasetId);
    trigger.setGroup(IMPORT_TRIGGER); // + datasetId);
    trigger.setMisfireInstruction(
        SimpleTrigger.MISFIRE_INSTRUCTION_RESCHEDULE_NEXT_WITH_EXISTING_COUNT);
    // set job data map
    JobDataMap jobDataMap = new JobDataMap();

    jobDataMap.put(EMAIL, email);
    jobDataMap.put(USER_ID, userAccount.getId());
    jobDataMap.put(STUDY_NAME, study.getName());
    jobDataMap.put(STUDY_OID, study.getOid());
    jobDataMap.put(DIRECTORY, directory);
    jobDataMap.put(ExampleSpringJob.LOCALE, locale);
    jobDataMap.put("hours", hours);
    jobDataMap.put("minutes", minutes);

    trigger.setJobDataMap(jobDataMap);
    trigger.setVolatility(false);
    return trigger;
  }
示例#22
0
  public boolean start() {

    try {
      scheduler.start();
    } catch (SchedulerException e) {
      throw new SynapseException("Error starting the scheduler", e);
    }

    Trigger trigger;
    TriggerBuilder<Trigger> triggerBuilder = newTrigger().withIdentity(name + "-trigger");

    if (cronExpression == null || "".equals(cronExpression)) {
      trigger =
          triggerBuilder
              .withSchedule(
                  simpleSchedule()
                      .withIntervalInMilliseconds(interval)
                      .repeatForever()
                      .withMisfireHandlingInstructionNextWithRemainingCount())
              .build();
    } else {
      trigger =
          triggerBuilder
              .startNow()
              .withSchedule(
                  CronScheduleBuilder.cronSchedule(cronExpression)
                      .withMisfireHandlingInstructionDoNothing())
              .build();
    }

    JobDataMap jobDataMap = getJobDataMap();
    jobDataMap.put(MessageProcessorConstants.PARAMETERS, parameters);

    JobBuilder jobBuilder = getJobBuilder();
    JobDetail jobDetail = jobBuilder.usingJobData(jobDataMap).build();

    try {
      scheduler.scheduleJob(jobDetail, trigger);
    } catch (SchedulerException e) {
      throw new SynapseException(
          "Error scheduling job : " + jobDetail + " with trigger " + trigger, e);
    }
    if (logger.isDebugEnabled()) {
      logger.debug("Started message processor. [" + getName() + "].");
    }

    return true;
  }
 /**
  * schedules a job with a configurable delay.
  *
  * @param instance - the instance to activate the method on timeout
  * @param methodName - the name of the method to activate on the instance
  * @param inputTypes - the method input types
  * @param inputParams - the method input parameters
  * @param initialDelay - the initial delay before the first activation
  * @param taskDelay - the name of the config value that sets the delay between jobs
  * @param timeUnit - the unit of time used for initialDelay and taskDelay.
  * @return the scheduled job id
  */
 @Override
 public String scheduleAConfigurableDelayJob(
     Object instance,
     String methodName,
     Class<?>[] inputTypes,
     Object[] inputParams,
     long initialDelay,
     String configurableDelayKeyName,
     TimeUnit timeUnit) {
   long configurableDelay = getConfigurableDelay(configurableDelayKeyName);
   JobDetail job =
       createJobForDelayJob(
           instance, methodName, inputTypes, inputParams, configurableDelay, timeUnit);
   JobDataMap data = job.getJobDataMap();
   data.put(CONFIGURABLE_DELAY_KEY_NAME, configurableDelayKeyName);
   scheduleJobWithTrigger(initialDelay, timeUnit, instance, job);
   return job.getKey().getName();
 }
示例#24
0
  private void startJobs() {
    try {
      // Get a new scheduler
      scheduler = new StdSchedulerFactory().getScheduler();
      // Start it up. This won't start any jobs though.
      scheduler.start();

      for (GuanxiJobConfig gxJob : gxJobs) {
        // Need a new JobDetail to hold custom data to send to the job we're controlling
        JobDetail jobDetail =
            new JobDetail(
                gxJob.getKey(), Scheduler.DEFAULT_GROUP, Class.forName(gxJob.getJobClass()));

        // Create a new JobDataMap for custom data to be sent to the job...
        JobDataMap jobDataMap = new JobDataMap();
        // ...and add the job's custom config object
        jobDataMap.put(GuanxiJobConfig.JOB_KEY_JOB_CONFIG, gxJob);

        // Put the job's custom data in it's JobDetail
        jobDetail.setJobDataMap(jobDataMap);

        /* Tell the scheduler when this job will run. Nothing will happen
         * until the start method is called.
         */
        Trigger trigger =
            new CronTrigger(gxJob.getKey(), Scheduler.DEFAULT_GROUP, gxJob.getCronLine());

        // Start the job
        scheduler.scheduleJob(jobDetail, trigger);

        if (gxJob.isStartImmediately()) {
          scheduler.triggerJob(gxJob.getKey(), Scheduler.DEFAULT_GROUP);
        }
      }
    } catch (ClassNotFoundException cnfe) {
      logger.error("Error locating job class", cnfe);
    } catch (SchedulerException se) {
      logger.error("Job scheduling error", se);
    } catch (ParseException pe) {
      logger.error("Error parsing job cronline", pe);
    }
  }
示例#25
0
  private static void doSomethingInteresting(Scheduler scheduler) throws SchedulerException {

    logger.debug("Let's do something interesting with Quarz...");

    JobDetail job = newJob(HelloJob.class).withIdentity("malJob", "malGroup1").build();

    Trigger trigger =
        newTrigger()
            .withIdentity("malTrigger", "malGroup1")
            .startNow()
            .withSchedule(simpleSchedule().withIntervalInSeconds(2).repeatForever())
            .build();

    JobDetail dumbJob =
        newJob(DumbJob.class)
            .withIdentity("dumbJob", "malGroup1")
            .usingJobData("jobSays", "Hello World!")
            .usingJobData("myFloatValue", 3.141f)
            .build();

    JobDataMap jobDataMap = new JobDataMap();
    jobDataMap.put("state", new ArrayList<Date>());

    CronTrigger cronTrigger =
        newTrigger()
            .withIdentity("dumbTrigger", "malGroup1")
            .withSchedule(cronSchedule("0/5 * * * * ?"))
            .usingJobData(jobDataMap)
            .build();

    scheduler.scheduleJob(job, trigger);
    scheduler.scheduleJob(dumbJob, cronTrigger);

    try {
      Thread.sleep(10000);
    } catch (InterruptedException e) {
      Thread.currentThread().interrupt();
    }

    logger.debug("Let's do something interesting with Quarz. Done.");
  }
  @Override
  public void afterPropertiesSet() throws Exception {
    // 参数验证
    Assert.notNull(this.triggerRule);
    Assert.notNull(this.mainTableName);
    Class<?> jClass = Class.forName(jobClass);
    Assert.isAssignable(Job.class, jClass);

    if (this.name == null) {
      this.name = this.beanName;
    }
    if (this.group == null) {
      this.group = Scheduler.DEFAULT_GROUP;
    }
    if (this.startDelay > 0 || this.startTime == null) {
      this.startTime = new Date(System.currentTimeMillis() + this.startDelay);
    }
    this.triggerClass = this.triggerRule.getTriggerClass();

    // jobDetail
    JobDetailImpl jdi = new JobDetailImpl();
    jdi.setName(this.name);
    jdi.setGroup(this.group);
    jdi.setJobClass(QuartzJob.class);
    jdi.setJobDataMap(this.jobDataMap);
    //        jdi.setDurability(true);
    jdi.setDescription(this.description);
    this.jobDetail = jdi;

    // jobDataMap
    jobDataMap.put("jobDetail", jdi);
    jobDataMap.put(QuartzJob.JOB_CLASS, jClass);
    jobDataMap.put(QuartzJob.MAIN_TABLE_NAME, mainTableName);
    jobDataMap.put(QuartzJob.THREAD_COUNT, threadCount);
    jobDataMap.put(QuartzJob.TIMEOUT, timeoutMinutesForMultiThread);
    jobDataMap.put(QuartzJob.ALLOW_RETRY, allowRetry);

    // trigger
    this.trigger = triggerRule.initializeTrigger();
    trigger.setName(this.name);
    trigger.setGroup(this.group);
    trigger.setJobKey(this.jobDetail.getKey());
    trigger.setJobDataMap(this.jobDataMap);
    trigger.setStartTime(this.startTime);
    trigger.setPriority(this.priority);
    trigger.setMisfireInstruction(this.misfireInstruction);
    trigger.setDescription(this.description);
  }
示例#27
0
 private Trigger createTrigger(
     final ScheduledJobDetail scheduledJobDetails, final JobDetail jobDetail) {
   try {
     final MifosPlatformTenant tenant = ThreadLocalContextUtil.getTenant();
     final CronTriggerFactoryBean cronTriggerFactoryBean = new CronTriggerFactoryBean();
     cronTriggerFactoryBean.setName(scheduledJobDetails.getJobName() + "Trigger" + tenant.getId());
     cronTriggerFactoryBean.setJobDetail(jobDetail);
     final JobDataMap jobDataMap = new JobDataMap();
     jobDataMap.put(SchedulerServiceConstants.TENANT_IDENTIFIER, tenant.getTenantIdentifier());
     cronTriggerFactoryBean.setJobDataMap(jobDataMap);
     final TimeZone timeZone = TimeZone.getTimeZone(tenant.getTimezoneId());
     cronTriggerFactoryBean.setTimeZone(timeZone);
     cronTriggerFactoryBean.setGroup(scheduledJobDetails.getGroupName());
     cronTriggerFactoryBean.setCronExpression(scheduledJobDetails.getCronExpression());
     cronTriggerFactoryBean.setPriority(scheduledJobDetails.getTaskPriority());
     cronTriggerFactoryBean.afterPropertiesSet();
     return cronTriggerFactoryBean.getObject();
   } catch (ParseException e) {
     throw new RuntimeException(e);
   }
 }
示例#28
0
  /**
   * Schedules the generation of a consumer export. This job starts immediately.
   *
   * @param consumer the target consumer
   * @param cdnLabel
   * @param webAppPrefix
   * @param apiUrl
   * @return a JobDetail representing the job to be started.
   */
  public static JobDetail scheduleExport(
      Consumer consumer,
      String cdnLabel,
      String webAppPrefix,
      String apiUrl,
      Map<String, String> extensionData) {
    JobDataMap map = new JobDataMap();
    map.put(JobStatus.OWNER_ID, consumer.getOwner().getKey());
    map.put(JobStatus.TARGET_TYPE, JobStatus.TargetType.CONSUMER);
    map.put(JobStatus.TARGET_ID, consumer.getUuid());
    map.put(CDN_LABEL, cdnLabel);
    map.put(WEBAPP_PREFIX, webAppPrefix);
    map.put(API_URL, apiUrl);
    map.put(EXTENSION_DATA, extensionData);

    return newJob(ExportJob.class)
        .withIdentity("export_" + Util.generateUUID())
        .usingJobData(map)
        .build();
  }
  protected void unschedule(Scheduler scheduler, JobKey jobKey) throws Exception {

    JobDetail jobDetail = scheduler.getJobDetail(jobKey);

    TriggerKey triggerKey = new TriggerKey(jobKey.getName(), jobKey.getGroup());

    if (jobDetail == null) {
      return;
    }

    unregisterMessageListener(scheduler, jobKey);

    JobDataMap jobDataMap = jobDetail.getJobDataMap();

    JobState jobState = getJobState(jobDataMap);

    Trigger trigger = scheduler.getTrigger(triggerKey);

    if (trigger == null) {
      return;
    }

    jobState.setTriggerDate(SchedulerEngine.END_TIME, new Date());
    jobState.setTriggerDate(SchedulerEngine.FINAL_FIRE_TIME, trigger.getPreviousFireTime());
    jobState.setTriggerDate(SchedulerEngine.NEXT_FIRE_TIME, null);
    jobState.setTriggerDate(SchedulerEngine.PREVIOUS_FIRE_TIME, trigger.getPreviousFireTime());
    jobState.setTriggerDate(SchedulerEngine.START_TIME, trigger.getStartTime());

    jobState.setTriggerState(TriggerState.UNSCHEDULED);

    jobState.clearExceptions();

    jobDataMap.put(SchedulerEngine.JOB_STATE, JobStateSerializeUtil.serialize(jobState));

    scheduler.unscheduleJob(triggerKey);

    scheduler.addJob(jobDetail, true);
  }
  public void launchSuperviseSmsSending(final List<SMSBroker> smsMessageList) {
    // use hashCode of smsMessage

    final long now = System.currentTimeMillis();

    final String jobName = "superviseSmsSending" + now;
    final String keyName = SuperviseSmsSending.SUPERVISE_SMS_BROKER_KEY;
    final String groupName = Scheduler.DEFAULT_GROUP;

    try {
      for (SMSBroker smsMessage : smsMessageList) {
        if (logger.isDebugEnabled()) {
          logger.debug(
              "smsMessage in launchSuperviseSmsSending is : "
                  + " - smsMessage id is : "
                  + smsMessage.getId()
                  + " - smsMessage content is : "
                  + smsMessage.getMessage()
                  + " - smsMessage phone is : "
                  + smsMessage.getRecipient());
        }
      }

      // create DataMap
      final JobDataMap jobDataMap = new JobDataMap();
      jobDataMap.put(keyName, smsMessageList);

      // create trigger
      final Trigger trigger = new SimpleTrigger(jobName, groupName);

      trigger.setVolatility(false);
      trigger.setStartTime(new Date(now));

      if (logger.isDebugEnabled()) {
        logger.debug(
            "Launching job with parameter : \n"
                + " - jobName : "
                + jobName
                + "\n"
                + " - groupName : "
                + groupName
                + "\n");
      }

      JobDetail jobDetail = new JobDetail(jobName, groupName, SuperviseSmsSending.class);
      jobDetail.setJobDataMap(jobDataMap);

      scheduler.scheduleJob(jobDetail, trigger);

      if (logger.isDebugEnabled()) {
        logger.debug("Job successfully launched");
      }
    } catch (SchedulerException e) {
      logger.warn(
          "An error occurs launching the job with parameter : \n"
              + " - jobName : "
              + jobName
              + "\n"
              + " - groupName : "
              + groupName
              + "\n");
    }
  }