public Item confluence(List inItems, List itemKeys, Term node, FlowGraph graph) { // if any predecessor is reachable, so is this one, and if any // predecessor is normal reachable, and the edge key is not an // exception edge key, then so is this one. List l = this.filterItemsNonException(inItems, itemKeys); for (Iterator i = l.iterator(); i.hasNext(); ) { if (i.next() == DataFlowItem.REACHABLE) { // this term is reachable via a non-exception edge return DataFlowItem.REACHABLE; } } // If we fall through to here, then there were // no non-exception edges that were normally reachable. // We now need to determine if this node is // reachable via an exception edge key, or if // it is not reachable at all. for (Iterator i = inItems.iterator(); i.hasNext(); ) { if (((DataFlowItem) i.next()).reachable) { // this term is reachable, but only through an // exception edge. return DataFlowItem.REACHABLE_EX_ONLY; } } return DataFlowItem.NOT_REACHABLE; }
/** Run all pending passes on <code>job</code>. */ public boolean runAllPasses(Job job) { List pending = job.pendingPasses(); // Run until there are no more passes. if (!pending.isEmpty()) { Pass lastPass = (Pass) pending.get(pending.size() - 1); return runToPass(job, lastPass); } return true; }
private Node translateSJOutwhile(SJOutwhile outwhile, QQ qq) { String unique = UniqueID.newID("loopCond"); boolean isInterruptible = outwhile.isInterruptible(); BooleanLit interruptible = new BooleanLit_c(outwhile.position(), isInterruptible); // RAY List targets = outwhile.targets(); String translation; Object[] mapping; if (targets.size() == 1) { if (isInterruptible) { translation = "{ sessionj.runtime.net.LoopCondition %s = " + "sessionj.runtime.net.SJRuntime.negotiateOutsync(%E, %s);" + " while (%s.call(%E)) %S }"; mapping = new Object[] { unique, interruptible, ((Local) targets.get(0)).name(), unique, outwhile.cond(), outwhile.body() }; } else { translation = "{ sessionj.runtime.net.SJRuntime.negotiateOutsync(%E, %s);" + " while (sessionj.runtime.net.SJRuntime.outsync(%E, %s)) %S }"; String sockName = ((Local) targets.get(0)).name(); mapping = new Object[] {interruptible, sockName, outwhile.cond(), sockName, outwhile.body()}; } } else { Expr sockArray = buildNewArray(outwhile.position(), targets); translation = "{ sessionj.runtime.net.LoopCondition %s = " + "sessionj.runtime.net.SJRuntime.negotiateOutsync(%E, %E);" + " while (%s.call(%E)) %S }"; mapping = new Object[] {unique, interruptible, sockArray, unique, outwhile.cond(), outwhile.body()}; /* BooleanLit interruptible = new BooleanLit_c(outwhile.position(), outwhile.isInterruptible()); return qq.parseStmt( "{ sessionj.runtime.net.LoopCondition %s = " + "sessionj.runtime.net.SJRuntime.negotiateOutsync(%E, %E);" + " while (%s.call(%E)) %S }", unique, interruptible, sockArray, unique, outwhile.cond(), outwhile.body() );*/ } return qq.parseStmt(translation, mapping); // YAR }
// FIXME: does not integrate with recursive session method calls: recursionEnter/Exit and also // recurse do not match the control flow of recursive calls, and hence runtime type monitoring // does not work. private Node translateSJRecursion(SJRecursion r, QQ qq) // recursionEnter inserted by node factory, but translation is finished here.. { SJSessionOperationExt soe = getSJSessionOperationExt(r); Position pos = r.position(); Collection<Object> mapping = new LinkedList<Object>(); String bname = getRecursionBooleanName(soe.targetNames(), r.label()); mapping.add(bname); mapping.add(bname); String translation = "for (boolean %s = true; %s; ) { }"; For f = (For) qq.parseStmt(translation, mapping.toArray()); mapping.clear(); r = (SJRecursion) r.inits(f.inits()); r = (SJRecursion) r.cond(f.cond()); List stmts = new LinkedList(); stmts.addAll(r.body().statements()); translation = "%s = %E;"; mapping.add(bname); mapping.add(((Eval) stmts.remove(0)).expr()); // Factor out constant. Eval e = (Eval) qq.parseStmt(translation, mapping.toArray()); stmts.add(0, e); r = (SJRecursion) r.body(sjnf.Block(pos, stmts)); /*// Appending the recursion-exit hook. // Disabled to support delegation from within recursion scopes (socket will be null on recursion-exit). List<Local> targets = new LinkedList<Local>(); // FIXME: should be SJLocalSockets. for (String sjname : soe.targetNames()) // Unicast optimisation for SJRecursionExit is done within the NodeFactory method - this pass comes after SJUnicastOptimiser. { targets.add(sjnf.Local(pos, sjnf.Id(pos, sjname))); // Would it be bad to instead alias the recursionEnter targets? } SJRecursionExit re = sjnf.SJRecursionExit(pos, targets); // Problem: the sockets argument array is not yet filled (for other (internal) basic operations, this was done earlier by SJSessionOperationParser)... re = (SJRecursionExit) SJVariableParser.parseSJSessionOperation(this, re); // ...Current fix: use those routines form those earlier passes. re = (SJRecursionExit) SJSessionOperationParser.fixSJBasicOperationArguments(this, re);*/ // return sjnf.Block(pos, r, sjnf.Eval(pos, re)); return sjnf.Block(pos, r); }
/** * The confluence operator for <code>Initializer</code>s and <code>Constructor</code>s needs to be * a little special, as we are only concerned with non-exceptional flows in these cases. This * method ensures that a slightly different confluence is performed for these <code>Term</code>s, * otherwise <code>confluence(List, Term)</code> is called instead. */ protected Item confluence(List items, List itemKeys, Term node, FlowGraph graph) { if (node instanceof Initializer || node instanceof ConstructorDecl) { List filtered = filterItemsNonException(items, itemKeys); if (filtered.isEmpty()) { return createInitDFI(); } else if (filtered.size() == 1) { return (Item) filtered.get(0); } else { return confluence(filtered, node, graph); } } return confluence(items, node, graph); }
private Node translateSJRecurse(Node parent, SJRecurse r, QQ qq) { if (!(parent instanceof Eval)) { throw new RuntimeException("[SJCompoundOperationTranslator] Shouldn't get here."); } String translation = ""; List<Object> mapping = new LinkedList<Object>(); translation += "%s = %E"; mapping.add(getRecursionBooleanName(getSJSessionOperationExt(r).targetNames(), r.label())); mapping.add(r); return qq.parseExpr(translation, mapping.toArray()); }
/** * Insert the list of <code>newPasses</code> into <code>passes</code> immediately after the pass * named <code>id</code>. */ public void afterPass(List passes, Pass.ID id, List newPasses) { for (ListIterator i = passes.listIterator(); i.hasNext(); ) { Pass p = (Pass) i.next(); if (p.id() == id) { for (Iterator j = newPasses.iterator(); j.hasNext(); ) { i.add(j.next()); } return; } } throw new InternalCompilerError("Pass " + id + " not found."); }
/** * The confluence operator is essentially the union of all of the inItems. However, if two or more * of the initCount maps from the inItems each have a MinMaxInitCounts entry for the same * VarInstance, the conflict must be resolved, by using the minimum of all mins and the maximum of * all maxs. */ public Item confluence(List inItems, Term node, FlowGraph graph) { // Resolve any conflicts pairwise. Iterator iter = inItems.iterator(); Map m = null; while (iter.hasNext()) { Item itm = (Item) iter.next(); if (itm == BOTTOM) continue; if (m == null) { m = new HashMap(((DataFlowItem) itm).initStatus); } else { Map n = ((DataFlowItem) itm).initStatus; for (Iterator iter2 = n.entrySet().iterator(); iter2.hasNext(); ) { Map.Entry entry = (Map.Entry) iter2.next(); VarInstance v = (VarInstance) entry.getKey(); MinMaxInitCount initCount1 = (MinMaxInitCount) m.get(v); MinMaxInitCount initCount2 = (MinMaxInitCount) entry.getValue(); m.put(v, MinMaxInitCount.join(initCount1, initCount2)); } } } if (m == null) return BOTTOM; return new DataFlowItem(m); }
private Expr buildNewArray(Position pos, List contents) { NewArray na = sjnf.makeSocketsArray(pos, contents.size()); ArrayInit ai = sjnf.ArrayInit(pos, contents); na = na.init(ai).dims(Collections.emptyList()).additionalDims(1); return na; }
/** * Inform this <code>Job</code> that pass <code>p</code> has finished. If <code>okay</code> is * <code>true</code>, then the pass was completed successfully; if it is <code>false</code> the * pass was not completed successfully. * * <p>Pass <code>p</code> may be any pending pass. */ public void finishPass(Pass p, boolean okay) { List passes = passes(); status &= okay; for (int i = nextPass; i < passes.size(); i++) { Pass pass = (Pass) passes.get(i); if (pass == p) { nextPass = i + 1; return; } } throw new InternalCompilerError("Pass " + p + " was not a pending " + "pass."); }
/** Flatten complex expressions within the AST */ public Node leave(Node old, Node n, NodeVisitor v) { if (n == noFlatten) { noFlatten = null; return n; } if (n instanceof Block) { List l = (List) stack.removeFirst(); return ((Block) n).statements(l); } else if (n instanceof Stmt && !(n instanceof LocalDecl)) { List l = (List) stack.getFirst(); l.add(n); return n; } else if (n instanceof Expr && !(n instanceof Lit) && !(n instanceof Special) && !(n instanceof Local)) { Expr e = (Expr) n; if (e instanceof Assign) { return n; } // create a local temp, initialized to the value of the complex // expression String name = newID(); LocalDecl def = nf.LocalDecl( e.position(), Flags.FINAL, nf.CanonicalTypeNode(e.position(), e.type()), name, e); def = def.localInstance(ts.localInstance(e.position(), Flags.FINAL, e.type(), name)); List l = (List) stack.getFirst(); l.add(def); // return the local temp instead of the complex expression Local use = nf.Local(e.position(), name); use = (Local) use.type(e.type()); use = use.localInstance(ts.localInstance(e.position(), Flags.FINAL, e.type(), name)); return use; } return n; }
/** Write the statement to an output file. */ public void prettyPrint(CodeWriter w, PrettyPrinter tr) { w.write("for ("); w.begin(0); if (inits != null) { boolean first = true; for (Iterator i = inits.iterator(); i.hasNext(); ) { ForInit s = (ForInit) i.next(); printForInit(s, w, tr, first); first = false; if (i.hasNext()) { w.write(","); w.allowBreak(2, " "); } } } w.write(";"); w.allowBreak(0); if (cond != null) { printBlock(cond, w, tr); } w.write(";"); w.allowBreak(0); if (iters != null) { for (Iterator i = iters.iterator(); i.hasNext(); ) { ForUpdate s = (ForUpdate) i.next(); printForUpdate(s, w, tr); if (i.hasNext()) { w.write(","); w.allowBreak(2, " "); } } } w.end(); w.write(")"); printSubStmt(body, w, tr); }
/** * Insert the list of <code>newPasses</code> into <code>passes</code> immediately before the pass * named <code>id</code>. */ public void beforePass(List passes, Pass.ID id, List newPasses) { for (ListIterator i = passes.listIterator(); i.hasNext(); ) { Pass p = (Pass) i.next(); if (p.id() == id) { // Backup one position. i.previous(); for (Iterator j = newPasses.iterator(); j.hasNext(); ) { i.add(j.next()); } return; } } throw new InternalCompilerError("Pass " + id + " not found."); }
private Node translateSJInwhile(SJInwhile inwhile, QQ qq) { List targets = inwhile.targets(); /*return qq.parseStmt( "{ sessionj.runtime.net.SJRuntime.negotiateNormalInwhile(%E);" + " while (sessionj.runtime.net.SJRuntime.insync(%E)) %S }", sockArray, sockArray, inwhile.body() );*/ // RAY String translation; Object[] mapping; String target = ((SJInwhile_c) inwhile).arguments().get(0).toString(); // <By MQ> if (targets.size() == 1) { translation = "{ sessionj.runtime.net.SJRuntime.negotiateNormalInwhile(" + target + ", %s);" // <By MQ> + " while (sessionj.runtime.net.SJRuntime.insync(" + target + ", %s)) %S }"; // <By MQ> // + "\n" + "%s.flush();"; //<By MQ> to flush sends after inwhile String sockName = ((Local) targets.get(0)).name(); return qq.parseStmt( translation, new Object[] {sockName, sockName, inwhile.body() /*, sockName*/}); // <By MQ> } else { Expr sockArray = buildNewArray(inwhile.position(), targets); String tmpVarName = SJConstants.SJ_TMP_LOCAL + INWHILE_VAR + (inwhileCounter++); translation = "{ sessionj.runtime.net.SJSocket[] %s = %E;" + " sessionj.runtime.net.SJRuntime.negotiateNormalInwhile(" + target + ", %s);" // <By MQ> + " while (sessionj.runtime.net.SJRuntime.insync(" + target + ", %s)) %S }"; // <By MQ> mapping = new Object[] {tmpVarName, sockArray, tmpVarName, tmpVarName, inwhile.body()}; } return qq.parseStmt(translation, mapping); // YAR }
/** * Replace the pass named <code>id</code> in <code>passes</code> with the list of <code>newPasses * </code>. */ public void replacePass(List passes, Pass.ID id, List newPasses) { for (ListIterator i = passes.listIterator(); i.hasNext(); ) { Pass p = (Pass) i.next(); if (p.id() == id) { if (p instanceof BarrierPass) { throw new InternalCompilerError("Cannot replace a barrier pass."); } i.remove(); for (Iterator j = newPasses.iterator(); j.hasNext(); ) { i.add(j.next()); } return; } } throw new InternalCompilerError("Pass " + id + " not found."); }
/** * Get the sub-list of passes for the job between passes <code>begin</code> and <code>end</code>, * inclusive. */ public List passes(Job job, Pass.ID begin, Pass.ID end) { List l = passes(job); Pass p = null; Iterator i = l.iterator(); while (i.hasNext()) { p = (Pass) i.next(); if (begin == p.id()) break; if (!(p instanceof BarrierPass)) i.remove(); } while (p.id() != end && i.hasNext()) { p = (Pass) i.next(); } while (i.hasNext()) { p = (Pass) i.next(); i.remove(); } return l; }
/** Type check the statement. */ public Node typeCheck(TypeChecker tc) throws SemanticException { TypeSystem ts = tc.typeSystem(); // Check that all initializers have the same type. // This should be enforced by the parser, but check again here, // just to be sure. Type t = null; for (Iterator i = inits.iterator(); i.hasNext(); ) { ForInit s = (ForInit) i.next(); if (s instanceof LocalDecl) { LocalDecl d = (LocalDecl) s; Type dt = d.type().type(); if (t == null) { t = dt; } else if (!t.equals(dt)) { throw new InternalCompilerError( "Local variable " + "declarations in a for loop initializer must all " + "be the same type, in this case " + t + ", not " + dt + ".", d.position()); } } } if (cond != null && !ts.isImplicitCastValid(cond.type(), ts.Boolean())) { throw new SemanticException( "The condition of a for statement must have boolean type.", cond.position()); } return this; }
private Node translateSJOutinwhile(SJOutInwhile outinwhile, QQ qq) { List sources = outinwhile.insyncSources(); List targets = outinwhile.outsyncTargets(); String loopCond = UniqueID.newID("loopCond"); String peerInterruptible = UniqueID.newID("peerInterruptible"); List<Object> subst = new LinkedList<Object>(); String code; // FIXME: this should be better factored. here, should treat sources and targets separately. but // also should integrate better with the same "optimisations" in the translation of in/outwhile if (sources.size() == 1 && targets.size() == 1) { String sourceName = ((Local) sources.get(0)).name(); String targetName = ((Local) targets.get(0)).name(); code = "{ sessionj.runtime.net.SJRuntime.negotiateOutsync(false, %s); "; subst.add(targetName); if (outinwhile.hasCondition()) { /*code += "boolean %s = "; subst.add(peerInterruptible);*/ throw new RuntimeException("[SJCompoundOperation] TODO."); } code += "sessionj.runtime.net.SJRuntime."; if (outinwhile.hasCondition()) { // code += "negotiateInterruptingInwhile" throw new RuntimeException("[SJCompoundOperation] TODO."); } else { code += "negotiateNormalInwhile"; } code += "(%s); while(sessionj.runtime.net.SJRuntime.outsync("; subst.add(sourceName); if (outinwhile.hasCondition()) { /*code += "interruptingInsync(%E, %s, %E)"; subst.add(outinwhile.cond()); subst.add(peerInterruptible); subst.add(sourcesArray);*/ throw new RuntimeException("[SJCompoundOperation] TODO."); } else { code += "sessionj.runtime.net.SJRuntime.insync(%s)"; subst.add(sourceName); } code += ", %s)) %S }"; code += "\n" + "%s.flush();"; // <By MQ> to flush sends after outwhile subst.add(targetName); subst.add(outinwhile.body()); subst.add(targetName); // <By MQ> } else { Expr sourcesArray = buildNewArray(outinwhile.position(), sources); // inwhile sockets Expr targetsArray = buildNewArray(outinwhile.position(), targets); // outwhile sockets subst = new LinkedList<Object>(Arrays.asList(loopCond, targetsArray)); code = "{ sessionj.runtime.net.LoopCondition %s = " + "sessionj.runtime.net.SJRuntime.negotiateOutsync(false, %E); "; if (outinwhile.hasCondition()) { code += "boolean %s = "; subst.add(peerInterruptible); } code += "sessionj.runtime.net.SJRuntime."; code += outinwhile.hasCondition() ? "negotiateInterruptingInwhile" : "negotiateNormalInwhile"; code += "(%E); while(%s.call(sessionj.runtime.net.SJRuntime."; subst.add(sourcesArray); subst.add(loopCond); if (outinwhile.hasCondition()) { code += "interruptingInsync(%E, %s, %E)"; subst.add(outinwhile.cond()); subst.add(peerInterruptible); subst.add(sourcesArray); } else { code += "insync(%E)"; subst.add(sourcesArray); } code += ")) %S }"; subst.add(outinwhile.body()); } return qq.parseStmt(code, subst); }
/** * Before running <code>Pass pass</code> on <code>SourceJob job</code> make sure that all * appropriate scheduling invariants are satisfied, to ensure that all passes of other jobs that * <code>job</code> depends on will have already been done. */ protected void enforceInvariants(Job job, Pass pass) throws CyclicDependencyException { SourceJob srcJob = job.sourceJob(); if (srcJob == null) { return; } BarrierPass lastBarrier = srcJob.lastBarrier(); if (lastBarrier != null) { // make sure that _all_ dependent jobs have completed at least up to // the last barrier (not just children). // // Ideally the invariant should be that only the source jobs that // job _depends on_ should be brought up to the last barrier. // This is work to be done in the future... List allDependentSrcs = new ArrayList(srcJob.dependencies()); Iterator i = allDependentSrcs.iterator(); while (i.hasNext()) { Source s = (Source) i.next(); Object o = jobs.get(s); if (o == COMPLETED_JOB) continue; if (o == null) { throw new InternalCompilerError("Unknown source " + s); } SourceJob sj = (SourceJob) o; if (sj.pending(lastBarrier.id())) { // Make the job run up to the last barrier. // We ignore the return result, since even if the job // fails, we will keep on going and see // how far we get... if (Report.should_report(Report.frontend, 3)) { Report.report(3, "Running " + sj + " to " + lastBarrier.id() + " for " + srcJob); } runToPass(sj, lastBarrier.id()); } } } if (pass instanceof GlobalBarrierPass) { // need to make sure that _all_ jobs have completed just up to // this global barrier. // If we hit a cyclic dependency, ignore it and run the other // jobs up to that pass. Then try again to run the cyclic // pass. If we hit the cycle again for the same job, stop. LinkedList barrierWorklist = new LinkedList(jobs.values()); while (!barrierWorklist.isEmpty()) { Object o = barrierWorklist.removeFirst(); if (o == COMPLETED_JOB) continue; SourceJob sj = (SourceJob) o; if (sj.completed(pass.id()) || sj.nextPass() == sj.passByID(pass.id())) { // the source job has either done this global pass // (which is possible if the job was loaded late in the // game), or is right up to the global barrier. continue; } // Make the job run up to just before the global barrier. // We ignore the return result, since even if the job // fails, we will keep on going and see // how far we get... Pass beforeGlobal = sj.getPreviousTo(pass.id()); if (Report.should_report(Report.frontend, 3)) { Report.report(3, "Running " + sj + " to " + beforeGlobal.id() + " for " + srcJob); } // Don't use runToPass, since that catches the // CyclicDependencyException that we should report // back to the caller. while (!sj.pendingPasses().isEmpty()) { Pass p = (Pass) sj.pendingPasses().get(0); runPass(sj, p); if (p == beforeGlobal) { break; } } } } }