/** @see org.imirsel.nema.flowservice.JobScheduler#abortJob(org.imirsel.nema.model.Job) */ public void abortJob(Job job) { MeandreServerProxy executingServer = findExecutingServer(job); try { executingServer.abortJob(job); } catch (MeandreServerException e) { // TODO Perhaps do something more intelligent here throw new RuntimeException(e); } }
/** * Find the server that is executing the given {@link Job}. * * @param job The {@link Job} to find the executing server instance for. * @return The {@link MeandreServerProxy} that is currently executing the job, or <code>null * </code> if no known server is executing the job. */ private MeandreServerProxy findExecutingServer(Job job) { workersLock.lock(); try { for (MeandreServerProxy server : workers) { if (job.getHost().equals(server.getMeandreServerProxyConfig().getHost()) && job.getPort().equals(server.getMeandreServerProxyConfig().getPort())) { return server; } } } finally { workersLock.unlock(); } return null; }
/** @see FlowService#removeFlow(Flow) */ @Override public boolean removeFlow(String flowUri) { boolean result = false; try { result = headServer.removeFlow(flowUri); } catch (MeandreServerException e) { throw new ServiceException("A problem occured while trying to remove flow: " + flowUri, e); } return result; }
/** @see FlowService#getConsole(Job) */ @Override public String getConsole(Job job) { String console = null; try { console = headServer.getConsole(job.getExecutionInstanceId()); } catch (MeandreServerException e) { throw new ServiceException("Could not retrieve the console for job " + job.getId(), e); } return console; }
/** @see FlowService#getComponents(Credentials,Flow) */ @Override public List<Component> getComponents(Credentials credentials, String flowUri) { List<Component> components = null; try { components = headServer.getComponents(credentials, flowUri); } catch (MeandreServerException e) { throw new ServiceException( "A problem occurred while retrieving " + "components for flow: " + flowUri, e); } return components; }
/** @see FlowService#getComponentPropertyDataType(Credentials,Component, Flow) */ @Override public Map<String, Property> getComponentPropertyDataType( Credentials credentials, Component component, String flowUri) { Map<String, Property> propertyDataTypes = null; try { propertyDataTypes = headServer.getComponentPropertyDataType(credentials, component, flowUri); } catch (MeandreServerException e) { throw new ServiceException( "A problem occurred while retrieving " + "component data types for flow: " + flowUri, e); } return propertyDataTypes; }
@Override public Map<Component, List<Property>> getAllComponentsAndPropertyDataTypes( Credentials credentials, String flowUri) { Map<Component, List<Property>> componentPropertyDataTypes = null; try { componentPropertyDataTypes = headServer.getAllComponentsAndPropertyDataTypes(credentials, flowUri); } catch (MeandreServerException e) { throw new ServiceException( "A problem occurred while retrieving " + "component data types for flow: " + flowUri, e); } return componentPropertyDataTypes; }
/** @see ConfigChangeListener#configChanged() */ @Override public void configChanged() { logger.info("Received configuration change notification."); if (!headServer.getConfig().equals(flowServiceConfig.getHeadConfig())) { MeandreServerProxy newHead; try { newHead = meandreServerProxyFactory.getServerProxyInstance( flowServiceConfig.getHeadConfig(), true); } catch (MeandreServerException e) { throw new RuntimeException("Could not instantiate head server.", e); } meandreServerProxyFactory.release(headServer); headServer = newHead; logger.info( "Head server configuration has changed. New head " + "server is " + headServer.toString()); } else { logger.info("Head server has not changed."); } jobScheduler.setWorkerConfigs(flowServiceConfig.getWorkerConfigs()); }
/** @see FlowService#createNewFlow(Credentials, Flow, HashMap, String, long) */ @Override public Flow createNewFlow( Credentials credentials, Flow flow, HashMap<String, String> paramMap, String flowUri, long userId) { String result = null; try { result = headServer.createFlow(credentials, paramMap, flowUri, userId); byte[] flowContent = readFileAsBytes(result); assert getArtifactService() != null : "Artifact service is null."; assert flow != null : "Flow is null."; assert credentials != null : "Credentials are null."; assert flowContent != null : "Flowcontent is null."; String id = UUID.randomUUID().toString(); ResourcePath resourcePath = getArtifactService().saveFlow((SimpleCredentials) credentials, flow, id, flowContent); String uri = resourcePath.getProtocol() + ":" + resourcePath.getWorkspace() + "://" + resourcePath.getPath(); flow.setUri(uri); storeFlowInstance(flow); } catch (MeandreServerException e) { throw new ServiceException("Could not create flow: " + flowUri, e); } catch (ContentRepositoryServiceException e) { throw new ServiceException("Could not create flow: " + flowUri, e); } catch (IOException e) { throw new ServiceException("Could not create flow: " + flowUri, e); } return flow; }
/** @see FlowService#getHeadStatus() */ @Override public MeandreServerProxyStatus getHeadStatus() { return headServer.getStatus(); }
/** @see FlowService#getHeadConfig() */ @Override public MeandreServerProxyConfig getHeadConfig() { return headServer.getConfig(); }
/** Attempt to run any queued jobs. */ public void runJobs() { Session session = null; JobDao jobDao = daoFactory.getJobDao(); try { queueLock.lock(); workersLock.lock(); if (jobQueue.size() < 1) { logger.fine("No queued jobs."); return; } session = jobDao.getSessionFactory().openSession(); jobDao.startManagedSession(session); while (!jobQueue.isEmpty()) { logger.fine("Found " + jobQueue.size() + " queued jobs."); MeandreServerProxy server = loadBalancer.nextAvailableServer(); if (server == null) { logger.info("All servers are busy. Will try again in " + POLL_PERIOD + " seconds."); return; } logger.fine("Server " + server + " is available for processing."); Job job = jobQueue.peek(); job.incrementNumTries(); job.setJobStatus(JobStatus.SUBMITTED); job.setSubmitTimestamp(new Date()); logger.fine("Preparing to update job " + job.getId() + " as submitted."); Transaction transaction = session.beginTransaction(); transaction.begin(); try { jobDao.makePersistent(job); transaction.commit(); logger.fine("Job " + job.getId() + " updated."); } catch (HibernateException e) { logger.warning("Data access exception: " + e.getMessage()); rollback(transaction); return; } catch (DataAccessException e) { logger.warning("Data access exception: " + e.getMessage()); rollback(transaction); return; } catch (Exception e) { logger.warning(e.getMessage()); rollback(transaction); return; } try { logger.fine("Attempting to contact server " + server + " to execute job."); ExecResponse response = server.executeJob(job); logger.fine("Execution response received."); // If the executeJob() method above succeeded, the Meandre // server will have (most likely) changed the job status to // "started". If the status changes, the NEMA probe running // on the Meandre server will have written the new status to // the same database the flow service uses. Therefore, we // want to refresh the state of the job here to pick up the // status change. Otherwise, the old status (submitted) will // be rewritten to the database, and the "started" state // will be lost. session.refresh(job); logger.fine("Attempting to record job execution response."); job.setHost(server.getMeandreServerProxyConfig().getHost()); job.setPort(server.getMeandreServerProxyConfig().getPort()); job.setExecPort(response.getPort()); job.setExecutionInstanceId(response.getUri()); transaction = session.beginTransaction(); transaction.begin(); try { jobDao.makePersistent(job); transaction.commit(); logger.fine("Job execution response recorded in database."); jobQueue.remove(); } catch (HibernateException e) { logger.warning("Data access exception: " + e.getMessage()); rollback(transaction); return; } catch (DataAccessException e) { logger.warning("Data access exception: " + e.getMessage()); rollback(transaction); return; } catch (Exception e) { logger.warning(e.getMessage()); rollback(transaction); return; } } catch (MeandreServerException serverException) { logger.warning(serverException.getMessage()); job.setSubmitTimestamp(null); job.setJobStatus(JobStatus.SCHEDULED); if (job.getNumTries() == MAX_EXECUTION_TRIES) { logger.info( "Unsuccessfully tried " + MAX_EXECUTION_TRIES + " times to execute job " + job.getId() + ". Will mark job as failed."); job.setJobStatus(JobStatus.FAILED); job.setEndTimestamp(new Date()); job.setUpdateTimestamp(new Date()); jobQueue.remove(); } transaction = session.beginTransaction(); transaction.begin(); try { jobDao.makePersistent(job); transaction.commit(); } catch (HibernateException e) { logger.warning("Data access exception: " + e.getMessage()); rollback(transaction); return; } catch (DataAccessException e) { logger.warning("Data access exception: " + e.getMessage()); rollback(transaction); return; } catch (Exception e) { logger.warning(e.getMessage()); rollback(transaction); return; } } } } catch (Exception e) { e.printStackTrace(); } finally { workersLock.unlock(); queueLock.unlock(); if (session != null) { try { jobDao.endManagedSession(); session.close(); } catch (HibernateException e) { logger.warning(e.getMessage()); } } } }