public static void scheduleJobAction(PortletRequest request, String action) throws SchedulerException, ParseException { // Checking all the rows to see which are selected String rowSelection; for (int i = 0; (rowSelection = request.getParameter(PARAMETER_JOB_SELECTED + i)) != null; i++) { boolean rowSelected = GetterUtil.get(rowSelection, false); if (rowSelected) { String jobName = ParamUtil.getString(request, PARAMETER_JOB_NAME + i); String groupName = ParamUtil.getString(request, PARAMETER_JOB_GROUP + i); String storageTypeText = ParamUtil.getString(request, PARAMETER_STORAGE_TYPE + i); StorageType storageType = StorageType.valueOf(storageTypeText); // Log debug messages if (_log.isDebugEnabled()) { _log.debug( String.format( LOG_JOB_FORMAT, action, LOG_ACTION_MSG, jobName, groupName, storageType)); } if (action.equals(ACTION_PAUSE)) { SchedulerEngineHelperUtil.pause(jobName, groupName, storageType); } else if (action.equals(ACTION_RESUME)) { SchedulerEngineHelperUtil.resume(jobName, groupName, storageType); } else if (action.equals(ACTION_RUN)) { runScheduledJob(jobName, groupName); } } } }
@Override public SchedulerEntry addingService(ServiceReference<SchedulerEntry> serviceReference) { Registry registry = RegistryUtil.getRegistry(); SchedulerEntry schedulerEntry = registry.getService(serviceReference); StorageType storageType = StorageType.MEMORY_CLUSTERED; if (schedulerEntry instanceof StorageTypeAware) { StorageTypeAware storageTypeAware = (StorageTypeAware) schedulerEntry; storageType = storageTypeAware.getStorageType(); } addTrigger(schedulerEntry, serviceReference); String portletId = (String) serviceReference.getProperty("javax.portlet.name"); try { SchedulerEngineHelperUtil.schedule(schedulerEntry, storageType, portletId, 0); return schedulerEntry; } catch (SchedulerException e) { _log.error(e, e); } return null; }
public static void scheduleJobServiceAction(String action) throws SchedulerException { if (action.equals(ACTION_SHUTDOWN)) { _log.info(LOG_SHUTDOWN_ACTION_MSG); SchedulerEngineHelperUtil.shutdown(); } }
public static void getSchedulerJobs(PortletRequest request) throws SchedulerException { // Scheduler List List<SchedulerResponse> schedulerJobs = SchedulerEngineHelperUtil.getScheduledJobs(); List<SchedulerJobBean> schedulerJobBeans = getSchedulerJobsList(schedulerJobs); request.setAttribute(ATTRIBUTE_JOBS_LIST, schedulerJobBeans); request.setAttribute(ATTRIBUTE_COUNT, schedulerJobBeans.size()); }
public static SchedulerJobBean getSchedulerJob(SchedulerResponse schedulerResponse) { TriggerState triggerState = SchedulerEngineHelperUtil.getJobState(schedulerResponse); Date startTime = SchedulerEngineHelperUtil.getStartTime(schedulerResponse); Date endTime = SchedulerEngineHelperUtil.getEndTime(schedulerResponse); Date previousFireTime = SchedulerEngineHelperUtil.getPreviousFireTime(schedulerResponse); Date nextFireTime = SchedulerEngineHelperUtil.getNextFireTime(schedulerResponse); StorageType storageType = schedulerResponse.getStorageType(); SchedulerJobBean schedulerJobBean = new SchedulerJobBeanImpl(); schedulerJobBean.setJobName(schedulerResponse.getJobName()); schedulerJobBean.setGroupName(schedulerResponse.getGroupName()); schedulerJobBean.setTriggerState( triggerState == null ? SchedulerJobBean.NULL_VALUE_DISPLAY : triggerState.toString()); schedulerJobBean.setStartTime(startTime); schedulerJobBean.setEndTime(endTime); schedulerJobBean.setPreviousFireTime(previousFireTime); schedulerJobBean.setNextFireTime(nextFireTime); schedulerJobBean.setStorageType( storageType == null ? SchedulerJobBean.NULL_VALUE_DISPLAY : storageType.toString().trim()); return schedulerJobBean; }
private ClusterSchedulerEngine _getClusterSchedulerEngine( boolean master, int memoryClusterJobs, int persistentJobs) throws Exception { MockSchedulerEngine mockSchedulerEngine = new MockSchedulerEngine(memoryClusterJobs, persistentJobs); MockClusterExecutor mockClusterExecutor = new MockClusterExecutor(true, mockSchedulerEngine); ClusterExecutorUtil clusterExecutorUtil = new ClusterExecutorUtil(); clusterExecutorUtil.setClusterExecutor(mockClusterExecutor); ClusterSchedulerEngine clusterSchedulerEngine = new ClusterSchedulerEngine(mockSchedulerEngine); Address masterAddress = null; if (master) { masterAddress = ClusterExecutorUtil.getLocalClusterNodeAddress(); } else { masterAddress = MockClusterExecutor._anotherAddress; } MockLockLocalService.setLock(AddressSerializerUtil.serialize(masterAddress)); SchedulerEngineHelperImpl schedulerEngineHelperImpl = new SchedulerEngineHelperImpl(); schedulerEngineHelperImpl.setSchedulerEngine(clusterSchedulerEngine); SchedulerEngineHelperUtil schedulerEngineHelperUtil = new SchedulerEngineHelperUtil(); schedulerEngineHelperUtil.setSchedulerEngineHelper(schedulerEngineHelperImpl); clusterSchedulerEngine.initialize(); clusterSchedulerEngine.start(); return clusterSchedulerEngine; }
@Override public void removedService( ServiceReference<SchedulerEntry> serviceReference, SchedulerEntry schedulerEntry) { Registry registry = RegistryUtil.getRegistry(); registry.ungetService(serviceReference); StorageType storageType = StorageType.MEMORY_CLUSTERED; if (schedulerEntry instanceof StorageTypeAware) { StorageTypeAware storageTypeAware = (StorageTypeAware) schedulerEntry; storageType = storageTypeAware.getStorageType(); } try { SchedulerEngineHelperUtil.unschedule(schedulerEntry, storageType); } catch (SchedulerException e) { _log.error(e, e); } }
protected void doReceive(Message message) throws Exception { long kaleoTimerInstanceTokenId = message.getLong("kaleoTimerInstanceTokenId"); try { KaleoTimerInstanceToken kaleoTimerInstanceToken = getKaleoTimerInstanceToken(message); Map<String, Serializable> workflowContext = WorkflowContextUtil.convert(kaleoTimerInstanceToken.getWorkflowContext()); ServiceContext serviceContext = (ServiceContext) workflowContext.get(WorkflowConstants.CONTEXT_SERVICE_CONTEXT); _workflowEngine.executeTimerWorkflowInstance( kaleoTimerInstanceTokenId, serviceContext, workflowContext); } catch (Exception e) { if (_log.isWarnEnabled()) { _log.warn("Unable to execute scheduled job. Unregistering job " + message, e); } String groupName = SchedulerUtil.getGroupName(kaleoTimerInstanceTokenId); SchedulerEngineHelperUtil.delete(groupName, StorageType.PERSISTED); } }
protected void doRun(String[] ids) throws Exception { // Print release information System.out.println("Starting " + ReleaseInfo.getReleaseInfo()); // Portal resiliency DistributedRegistry.registerDistributed( ComponentConstants.COMPONENT_CONTEXT, Direction.DUPLEX, MatchType.POSTFIX); DistributedRegistry.registerDistributed( MimeResponse.MARKUP_HEAD_ELEMENT, Direction.DUPLEX, MatchType.EXACT); DistributedRegistry.registerDistributed( PortletRequest.LIFECYCLE_PHASE, Direction.DUPLEX, MatchType.EXACT); DistributedRegistry.registerDistributed(WebKeys.class); Intraband intraband = MPIHelperUtil.getIntraband(); intraband.registerDatagramReceiveHandler( SystemDataType.MAILBOX.getValue(), new MailboxDatagramReceiveHandler()); MessageBus messageBus = (MessageBus) PortalBeanLocatorUtil.locate(MessageBus.class.getName()); intraband.registerDatagramReceiveHandler( SystemDataType.MESSAGE.getValue(), new MessageDatagramReceiveHandler(messageBus)); intraband.registerDatagramReceiveHandler( SystemDataType.PROXY.getValue(), new IntrabandProxyDatagramReceiveHandler()); intraband.registerDatagramReceiveHandler( SystemDataType.RPC.getValue(), new RPCDatagramReceiveHandler()); // Shutdown hook if (_log.isDebugEnabled()) { _log.debug("Add shutdown hook"); } Runtime runtime = Runtime.getRuntime(); runtime.addShutdownHook(new Thread(new ShutdownHook())); // Template manager if (_log.isDebugEnabled()) { _log.debug("Initialize template manager"); } TemplateManagerUtil.init(); // Indexers IndexerRegistryUtil.register(new MBMessageIndexer()); IndexerRegistryUtil.register(new PluginPackageIndexer()); // Upgrade if (_log.isDebugEnabled()) { _log.debug("Upgrade database"); } DBUpgrader.upgrade(); // Clear locks if (_log.isDebugEnabled()) { _log.debug("Clear locks"); } try { LockLocalServiceUtil.clear(); } catch (Exception e) { if (_log.isWarnEnabled()) { _log.warn("Unable to clear locks because Lock table does not exist"); } } // Messaging if (_log.isDebugEnabled()) { _log.debug("Initialize message bus"); } MessageSender messageSender = (MessageSender) PortalBeanLocatorUtil.locate(MessageSender.class.getName()); SynchronousMessageSender synchronousMessageSender = (SynchronousMessageSender) PortalBeanLocatorUtil.locate(SynchronousMessageSender.class.getName()); MessageBusUtil.init( DoPrivilegedUtil.wrap(messageBus), DoPrivilegedUtil.wrap(messageSender), DoPrivilegedUtil.wrap(synchronousMessageSender)); // Cluster executor ClusterExecutorUtil.initialize(); if (!SPIUtil.isSPI()) { ClusterMasterExecutorUtil.initialize(); } // Ehache bootstrap EhcacheStreamBootstrapCacheLoader.start(); // Scheduler if (_log.isDebugEnabled()) { _log.debug("Initialize scheduler engine lifecycle"); } SchedulerEngineHelperUtil.initialize(); // Verify if (_log.isDebugEnabled()) { _log.debug("Verify database"); } DBUpgrader.verify(); // Background tasks if (!ClusterMasterExecutorUtil.isEnabled()) { BackgroundTaskLocalServiceUtil.cleanUpBackgroundTasks(); } // Liferay JspFactory JspFactorySwapper.swap(); // Jericho CachedLoggerProvider.install(); }