public void statusChanged(StatusEvent e) { Task t = (Task) e.getSource(); try { if (logger.isDebugEnabled()) { logger.debug("Got task status change for " + t.getIdentity()); } LinkedList cluster = null; synchronized (tasks) { cluster = (LinkedList) tasks.get(t); } if (cluster == null) { super.statusChanged(e); } else { if (logger.isDebugEnabled()) { logger.debug("Got cluster status change for " + t.getIdentity()); } Status clusterMemberStatus = e.getStatus(); if (clusterMemberStatus.getStatusCode() == Status.FAILED) { clusterMemberStatus = new StatusImpl(Status.COMPLETED); } Iterator i = cluster.iterator(); while (i.hasNext()) { Object[] h = (Object[]) i.next(); Task ct = (Task) h[0]; StatusEvent nse = new StatusEvent(ct, clusterMemberStatus); ct.setStatus(clusterMemberStatus); fireJobStatusChangeEvent(nse); } if (e.getStatus().isTerminal()) { if (logger.isInfoEnabled()) { logger.info("Removing cluster " + t.getIdentity()); } synchronized (tasks) { tasks.remove(t); } } } } catch (Exception ex) { failTask(t, ex.getMessage(), ex); } }
public void enqueue(Task task, Object constraints) { if (shouldBeClustered(task, constraints)) { startTimer(); if (logger.isDebugEnabled()) { logger.debug("Adding task to clustering queue: " + task.getIdentity()); } synchronized (dq) { dq.addLast(new Object[] {task, constraints}); } } else { super.enqueue(task, constraints); } }
protected void failTask(Task t, String message, Exception e) { if (logger.isDebugEnabled()) { logger.debug("Failing task " + t.getIdentity()); } LinkedList cluster = null; synchronized (tasks) { cluster = (LinkedList) tasks.get(t); } if (cluster != null) { Iterator i = cluster.iterator(); while (i.hasNext()) { Object[] h = (Object[]) i.next(); super.failTask((Task) h[0], message, e); } } else { super.failTask(t, message, e); } }
private boolean shouldBeClustered(Task task, Object constraints) { if (!clusteringEnabled) { return false; } String reason = null; try { if (task.getType() != Task.JOB_SUBMISSION) { reason = "not a job"; return false; } if (((JobSpecification) task.getSpecification()).getAttribute("maxwalltime") == null) { reason = "no maxwalltime"; return false; } if (!(constraints instanceof Contact[])) { reason = "weird constraints"; return false; } if (((Contact[]) constraints).length != 1) { reason = "constraints size != 1"; return false; } boolean cluster = getMaxWallTime(task) < minClusterTime; if (!cluster) { reason = "not short enough"; } return cluster; } finally { if (reason != null) { if (logger.isDebugEnabled()) { logger.debug( "Task is not suitable for clustering (" + reason + ") " + task.getIdentity()); } } } }
/* * TODO Add maxmemory=max(maxmemory), minmemory=max(minmemory) and all other * similar attributes */ private void processDelayQueue() { if (logger.isDebugEnabled()) { logger.debug("Processing clustering queue"); } synchronized (dq) { while (!dq.isEmpty()) { int clusterTime = 0; LinkedList cluster = new LinkedList(); Map env = new HashMap(); Map attrs = new HashMap(); Object constraints = null; String dir = null; Iterator dqi = dq.iterator(); while (clusterTime < minClusterTime && dqi.hasNext()) { Object[] h = (Object[]) dqi.next(); Task task = (Task) h[0]; JobSpecification js = (JobSpecification) task.getSpecification(); if (constraints == null) { constraints = ((Object[]) h[1])[0]; } else if (!constraints.equals(((Object[]) h[1])[0])) { continue; } if (dir == null) { dir = js.getDirectory() == null ? "" : js.getDirectory(); } else if ((js.getDirectory() != null || !dir.equals("")) && !dir.equals(js.getDirectory())) { continue; } if (detectConflict(js, env, attrs)) { continue; } else { dqi.remove(); } merge(js, env, attrs); clusterTime += getMaxWallTime(task); cluster.addLast(h); } if (logger.isDebugEnabled()) { logger.debug("Got a cluster with size " + cluster.size()); } if (cluster.size() == 0) { continue; } else if (cluster.size() == 1) { Object[] h = (Object[]) cluster.removeFirst(); super.enqueue((Task) h[0], h[1]); } else if (cluster.size() > 1) { Task t = new TaskImpl(); int thisClusterId = clusterId++; t.setIdentity(new IdentityImpl("cluster-" + thisClusterId)); t.setType(Task.JOB_SUBMISSION); t.setRequiredService(1); JobSpecification js = new JobSpecificationImpl(); t.setSpecification(js); js.setExecutable("/bin/sh"); js.addArgument("shared/_swiftseq"); js.addArgument("cluster-" + thisClusterId); js.addArgument("/clusters/"); // slice path more here TODO js.setDirectory(dir); js.setAttribute("maxwalltime", secondsToTime(clusterTime)); if (logger.isInfoEnabled()) { logger.info("Creating cluster " + t.getIdentity() + " with size " + cluster.size()); } Iterator i = cluster.iterator(); while (i.hasNext()) { Object[] h = (Object[]) i.next(); Task st = (Task) h[0]; if (logger.isInfoEnabled()) { logger.info("Task " + st.getIdentity() + " clustered in " + t.getIdentity()); } JobSpecification sjs = (JobSpecification) st.getSpecification(); js.addArgument(sjs.getExecutable()); List args = sjs.getArgumentsAsList(); Iterator j = args.iterator(); while (j.hasNext()) { String arg = (String) j.next(); if (arg.equals("|")) { arg = "||"; } js.addArgument(arg); } js.addArgument("|"); } i = env.entrySet().iterator(); while (i.hasNext()) { Map.Entry e = (Map.Entry) i.next(); js.addEnvironmentVariable((String) e.getKey(), (String) e.getValue()); } i = attrs.entrySet().iterator(); while (i.hasNext()) { Map.Entry e = (Map.Entry) i.next(); js.setAttribute((String) e.getKey(), (String) e.getValue()); } synchronized (tasks) { tasks.put(t, cluster); } super.enqueue(t, new Contact[] {(Contact) constraints}); } } } }
private void submitJob(CoasterChannel channel, Task task, String configId) throws ProtocolException { NotificationManager.getDefault().registerListener(task.getIdentity(), task, this); jsc = new SubmitJobCommand(task, configId); jsc.executeAsync(channel, this); }