protected void executeInternal(JobExecutionContext context) throws JobExecutionException {
   try {
     SchedulerContext schCtx = context.getScheduler().getContext();
     // 获取Spring中的上下文
     ApplicationContext appCtx = (ApplicationContext) schCtx.get("applicationContext");
     this.staticPageSvc = (StaticPageSvc) appCtx.getBean("staticPageSvc");
     JobDataMap jdm = context.getJobDetail().getJobDataMap();
     // 获取栏目
     String channelIdStr = (String) jdm.get(CmsTask.TASK_PARAM_CHANNEL_ID);
     if (!StringUtils.isBlank(channelIdStr)) {
       this.channelId = Integer.parseInt(channelIdStr);
       if (channelId.equals(0)) {
         channelId = null;
       }
     }
     // 获取站点
     String siteIdStr = (String) jdm.get(CmsTask.TASK_PARAM_SITE_ID);
     if (!StringUtils.isBlank(siteIdStr)) {
       this.siteId = Integer.parseInt(siteIdStr);
     }
   } catch (SchedulerException e1) {
     // TODO 尚未处理异常
     e1.printStackTrace();
   }
   staitcChannel();
 }
 @Override
 public void execute(JobExecutionContext context) throws JobExecutionException {
   try {
     SchedulerContext schedulerContext = context.getScheduler().getContext();
     schedulerContext.put(JOB_THREAD, Thread.currentThread());
     CyclicBarrier barrier = (CyclicBarrier) schedulerContext.get(BARRIER);
     barrier.await(TEST_TIMEOUT_SECONDS, TimeUnit.SECONDS);
   } catch (Throwable e) {
     e.printStackTrace();
     throw new AssertionError("Await on barrier was interrupted: " + e.toString());
   }
 }
  /** @see org.quartz.Job#execute(org.quartz.JobExecutionContext) */
  public void execute(JobExecutionContext context) throws JobExecutionException {
    JobDataMap mergedJobDataMap = context.getMergedJobDataMap();
    SchedulerContext schedCtxt = null;
    try {
      schedCtxt = context.getScheduler().getContext();
    } catch (SchedulerException e) {
      throw new JobExecutionException("Error obtaining scheduler context.", e, false);
    }

    String fileName = mergedJobDataMap.getString(FILE_NAME);
    String listenerName = mergedJobDataMap.getString(FILE_SCAN_LISTENER_NAME);

    if (fileName == null) {
      throw new JobExecutionException(
          "Required parameter '" + FILE_NAME + "' not found in merged JobDataMap");
    }
    if (listenerName == null) {
      throw new JobExecutionException(
          "Required parameter '" + FILE_SCAN_LISTENER_NAME + "' not found in merged JobDataMap");
    }

    FileScanListener listener = (FileScanListener) schedCtxt.get(listenerName);

    if (listener == null) {
      throw new JobExecutionException(
          "FileScanListener named '" + listenerName + "' not found in SchedulerContext");
    }

    long lastDate = -1;
    if (mergedJobDataMap.containsKey(LAST_MODIFIED_TIME)) {
      lastDate = mergedJobDataMap.getLong(LAST_MODIFIED_TIME);
    }

    long newDate = getLastModifiedDate(fileName);

    if (newDate < 0) {
      log.warn("File '" + fileName + "' does not exist.");
      return;
    }

    if (lastDate > 0 && (newDate != lastDate)) {
      // notify call back...
      log.info("File '" + fileName + "' updated, notifying listener.");
      listener.fileUpdated(fileName);
    } else if (log.isDebugEnabled()) {
      log.debug("File '" + fileName + "' unchanged.");
    }

    // It is the JobDataMap on the JobDetail which is actually stateful
    context.getJobDetail().getJobDataMap().put(LAST_MODIFIED_TIME, newDate);
  }
Exemple #4
0
 protected void executeInternal(JobExecutionContext context) throws JobExecutionException {
   try {
     SchedulerContext schCtx = context.getScheduler().getContext();
     JobDataMap jdm = context.getJobDetail().getJobDataMap();
     // 获取采集源
     this.acquId = Integer.parseInt((String) jdm.get(CmsTask.TASK_PARAM_ACQU_ID));
     // 获取Spring中的上下文
     ApplicationContext appCtx = (ApplicationContext) schCtx.get("applicationContext");
     this.acquisitionSvc = (AcquisitionSvc) appCtx.getBean("acquisitionSvc");
   } catch (SchedulerException e1) {
     // TODO 尚未处理异常
     e1.printStackTrace();
   }
   acquStart();
 }
 protected void executeInternal(JobExecutionContext context) throws JobExecutionException {
   try {
     SchedulerContext schCtx = context.getScheduler().getContext();
     JobDataMap jdm = context.getJobDetail().getJobDataMap();
     // 获取Spring中的上下文
     ApplicationContext appCtx = (ApplicationContext) schCtx.get("applicationContext");
     this.cmsSiteMng = (CmsSiteMng) appCtx.getBean("cmsSiteMng");
     this.staticPageSvc = (StaticPageSvc) appCtx.getBean("staticPageSvc");
     this.sessionFactory = (SessionFactory) appCtx.getBean("sessionFactory");
     this.siteId = Integer.parseInt((String) jdm.get(CmsTask.TASK_PARAM_SITE_ID));
   } catch (SchedulerException e1) {
     // TODO 尚未处理异常
     e1.printStackTrace();
   }
   staticIndex();
 }
 public void execute(JobExecutionContext context) throws JobExecutionException {
   // The RAM index is lazy loaded so that application startup is not blocked.
   // The index can also be interrupted if the application needs to shutdown
   // before the index is loaded.
   SchedulerContext schedulerContext = null;
   try {
     schedulerContext = context.getScheduler().getContext();
     ApplicationPrefs prefs = (ApplicationPrefs) schedulerContext.get("ApplicationPrefs");
     ServletContext servletContext = (ServletContext) schedulerContext.get("ServletContext");
     // @todo remove the servlet context and use the indexer
     if (servletContext != null) {
       IIndexerService indexer = IndexerFactory.getInstance().getIndexerService();
       if (indexer == null) {
         throw (new JobExecutionException("Indexer Configuration error: No indexer defined."));
       }
       // Determine the database connection to use
       Connection db = null;
       ConnectionPool commonCP =
           (ConnectionPool) servletContext.getAttribute(Constants.CONNECTION_POOL);
       ConnectionElement ce = new ConnectionElement();
       ce.setDriver(prefs.get(ApplicationPrefs.CONNECTION_DRIVER));
       ce.setUrl(prefs.get(ApplicationPrefs.CONNECTION_URL));
       ce.setUsername(prefs.get(ApplicationPrefs.CONNECTION_USER));
       ce.setPassword(prefs.get(ApplicationPrefs.CONNECTION_PASSWORD));
       // Setup the directory index
       indexerContext = new IndexerContext(prefs);
       indexerContext.setIndexType(Constants.INDEXER_DIRECTORY);
       try {
         db = commonCP.getConnection(ce, true);
         indexer.initializeData(indexerContext, db);
       } catch (Exception e) {
         LOG.error("Could not load RAM index", e);
       } finally {
         commonCP.free(db);
       }
       // Tell the indexer it's ok to create other writers now
       servletContext.setAttribute(Constants.DIRECTORY_INDEX_INITIALIZED, "true");
     }
   } catch (Exception e) {
     e.printStackTrace(System.out);
     throw new JobExecutionException(e.getMessage());
   }
 }
  private void createAndInitScheduler() throws SchedulerException {
    LOG.info("Create and initializing scheduler.");
    scheduler = createScheduler();

    // Store CamelContext into QuartzContext space
    SchedulerContext quartzContext = scheduler.getContext();
    String camelContextName = getCamelContext().getManagementName();
    LOG.debug("Storing camelContextName={} into Quartz Context space.", camelContextName);
    quartzContext.put(
        QuartzConstants.QUARTZ_CAMEL_CONTEXT + "-" + camelContextName, getCamelContext());

    // Set camel job counts to zero. We needed this to prevent shutdown in case there are multiple
    // Camel contexts
    // that has not completed yet, and the last one with job counts to zero will eventually
    // shutdown.
    AtomicInteger number =
        (AtomicInteger) quartzContext.get(QuartzConstants.QUARTZ_CAMEL_JOBS_COUNT);
    if (number == null) {
      number = new AtomicInteger(0);
      quartzContext.put(QuartzConstants.QUARTZ_CAMEL_JOBS_COUNT, number);
    }
  }