/** * This routine is called to split blocks during register allocation. At this point we know that * the block was legal before the spill stores and loads were inserted. For every hyperblock with * spills that is no longer legal, we pick its largest predicate block and split that in half. One * half is then placed into a new hyperblock. * * <p>The case that was causing trouble results from the classic diamond-shaped predicate flow * graph: * * <pre> * a * / \ * b c * \ / * d * </pre> * * where predicate block <code>a</code> contains most of the instructions, blocks <code>b</code> * and <code>c</code> each contain a predicated branch, and block <code>d</code> is empty. Because * of spilling, the hyperblock was too big but all of the predicate blocks were legal. As a result * we attempted to split block <code>d</code> which did no good. * * @param blocks is a list of hyperblocks that have had spills inserted * @return true if a block was split */ public final boolean splitBlocksWithSpills(Hyperblock hbStart, Vector<Hyperblock> blocks) { // If all the blocks with spills are legal we are done. boolean illegal = false; int bs = blocks.size(); for (int i = 0; i < bs; i++) { Hyperblock hb = blocks.get(i); if (!hb.isLegalBlock(true)) { illegal = true; } } if (!illegal) { return false; } // Remove any spill code because the register allocator will run again. // And remove any blocks which are legal from the set to split. for (int i = bs - 1; i > -1; i--) { Hyperblock hb = blocks.get(i); if (hb.isLegalBlock(true)) { blocks.remove(i); } hb.removeSpillCode(); workingSet.add(hb); } // Split hyperblocks that have violations. int bl = blocks.size(); for (int i = bl - 1; i > -1; i--) { Hyperblock hb = blocks.remove(i); splitHyperblock(hb); } // The register allocator and block splitter are going to run again. // Analyze all the hyperblocks that were created or changed. DataflowAnalysis df = new DataflowAnalysis(hbStart, regs); df.computeLiveness3(); int sl = workingSet.size(); for (int i = sl - 1; i > -1; i--) { Hyperblock hb = workingSet.remove(i); hb.enterSSA(); hb.analyzeLeaveSSA(); } return true; }
/** * Find a split point for a block with spills. <br> * We know the hyperblock was legal before the insertion of spill code. Return the "deepest" split * point in the predicate flow graph, or if there are no split points, the block with the most * instructions. */ private PredicateBlock findSplitPointSpills(Hyperblock hb) { Vector<PredicateBlock> wl = new Vector<PredicateBlock>(); PredicateBlock start = hb.getFirstBlock(); PredicateBlock last = hb.getLastBlock(); PredicateBlock biggest = null; PredicateBlock splitPoint = null; int biggestSize = 0; if (start.numOutEdges() == 0) { return start; } start.nextVisit(); start.setVisited(); wl.add(start); // Find the block with the most "real" instructions. // We do not include fanout, nulls, or spill code. while (!wl.isEmpty()) { int sl = wl.size(); for (int i = 0; i < sl; i++) { PredicateBlock block = wl.get(i); int size = 0; for (Instruction inst = block.getFirstInstruction(); inst != null; inst = inst.getNext()) { size++; // TODO? Should we use the real size of the instruction? } if (size >= biggestSize) { if (block != last) { biggestSize = size; biggest = block; } } if (block.isSplitPoint() && (block != start)) { splitPoint = block; } } wl = hb.getNextPFGLevel(wl); } return (splitPoint != null) ? splitPoint : biggest; }
/** The main routine for block splitting. */ public final void split(Hyperblock hbStart) { Stack<Node> wl = WorkArea.<Node>getStack("split"); DataflowAnalysis df = new DataflowAnalysis(hbStart, regs); int trips = 0; // Add all the hyperblocks to the working set. hbStart.nextVisit(); hbStart.setVisited(); wl.push(hbStart); while (!wl.isEmpty()) { Hyperblock hb = (Hyperblock) wl.pop(); hb.pushOutEdges(wl); workingSet.add(hb); } // Split the hyperblocks. while (!workingSet.isEmpty()) { df.computeLiveness3(); wl.addAll(workingSet); while (!wl.isEmpty()) { Hyperblock hb = (Hyperblock) wl.pop(); hb.enterSSA(); hb.analyzeLeaveSSA(); if (!hb.isLegalBlock(true)) { splitHyperblock(hb); } else { workingSet.remove(hb); } } if ((++trips % WARN_SPLIT_ATTEMPTS) == 0) { System.err.println( "** Warning: the block splitter has run " + trips + " times for " + gen.getCurrentRoutine().getName() + "()."); } } WorkArea.<Node>returnStack(wl); }
/** Split an unpredicated predicate block. */ private void splitBlock(Hyperblock hb, PredicateBlock block) { int chunkSize = (block.getBlockSize() + block.getFanout()) / 2; int maxChunk = (int) (Trips2Machine.maxBlockSize * MAXFILL); if (chunkSize > maxChunk) { chunkSize = maxChunk; } Instruction splitLocation = findSplitLocation(hb, block, chunkSize); PredicateBlock start = block.cut(splitLocation, gen); Hyperblock nhb = new Hyperblock(start, regs); // Insert the new hyperblock into the HFG. for (int i = hb.numOutEdges() - 1; i > -1; i--) { Hyperblock out = (Hyperblock) hb.getOutEdge(i); out.replaceInEdge(hb, nhb); hb.deleteOutEdge(out); nhb.addOutEdge(out); } hb.addOutEdge(nhb); nhb.addInEdge(hb); workingSet.add(nhb); hb.invalidateDomination(); hb.findLastBlock(); hb.determinePredicatesBranches(); nhb.findLastBlock(); nhb.determinePredicatesBranches(); // Update the return block if it has changed. if (gen.getReturnBlock() == hb) { gen.setReturnBlock(nhb); } }
/** Reverse if-convert the given predicate block from the hyperblock. */ private void reverseIfConvert(Hyperblock hb, PredicateBlock start) { Stack<Node> wl = WorkArea.<Node>getStack("reverseIfConvert"); Stack<PredicateBlock> reverse = WorkArea.<PredicateBlock>getStack("reverseIfConvert"); Vector<PredicateBlock> blocks = new Vector<PredicateBlock>(); Vector<Hyperblock> hbs = new Vector<Hyperblock>(); // Find the blocks which need to be reverse if-converted. start.nextVisit(); start.setVisited(); wl.add(start); while (!wl.isEmpty()) { PredicateBlock block = (PredicateBlock) wl.pop(); block.pushOutEdges(wl); for (int i = 0; i < block.numInEdges(); i++) { PredicateBlock pred = (PredicateBlock) block.getInEdge(i); if (!pred.visited()) { blocks.add(block); break; } else if (blocks.contains(pred) && block.numInEdges() > 1) { blocks.add(block); break; } } } // Order the blocks to reverse if-convert based on their depth from the root. PredicateBlock head = hb.getFirstBlock(); Vector<PredicateBlock> wl2 = new Vector<PredicateBlock>(); head.nextVisit(); head.setVisited(); wl2.add(head); while (!wl2.isEmpty()) { int l = wl2.size(); for (int i = 0; i < l; i++) { PredicateBlock block = wl2.get(i); if (blocks.contains(block)) { blocks.remove(block); reverse.push(block); } } wl2 = hb.getNextPFGLevel(wl2); } // Remove the special "dummy" last block from the PFG. PredicateBlock last = hb.getLastBlock(); assert (last.numOutEdges() == 0 && !last.isPredicated()); if (last.getFirstInstruction() == null) { for (int i = last.numInEdges() - 1; i > -1; i--) { PredicateBlock pred = (PredicateBlock) last.getInEdge(i); pred.deleteOutEdge(last); last.deleteInEdge(pred); } reverse.remove(last); } // Reverse if-convert. while (!reverse.isEmpty()) { PredicateBlock block = reverse.pop(); Hyperblock hbn = reverseIfConvertBlock(block); hbs.add(hbn); workingSet.add(hbn); } // Update the PFG. hb.updateLastBlock(); hb.invalidateDomination(); // The dominators are now invalid. // Insert the new hyperblocks in the HFG. HashMap<Instruction, Hyperblock> entries = computeEntries(hb, hbs); hbs.add(hb); Hyperblock.computeHyperblockFlowGraph(hbs, entries); // Update the return block. Since 'hbs' is an ordered list, the // first element in the list is the hyperblock with the return // because this was the original tail of the PFG which was reverse // if-converted. if (hb == gen.getReturnBlock()) { gen.setReturnBlock(hbs.firstElement()); } WorkArea.<Node>returnStack(wl); WorkArea.<PredicateBlock>returnStack(reverse); }
/** Find the predicate block in a hyperblock to split. */ private PredicateBlock findSplitPoint(Hyperblock hb) { Vector<PredicateBlock> wl = new Vector<PredicateBlock>(); int totalSize = hb.getFanout() + hb.getBlockSize(); int splits = (totalSize / Trips2Machine.maxBlockSize) + 1; int splitSize = totalSize / splits; int hbSize = 0; PredicateBlock start = hb.getFirstBlock(); PredicateBlock lastUnpredicated = null; int lastUnpredicatedHBSize = 0; assert (hb.numSpills() == 0) : "This method should not be called for blocks with spills."; start.nextVisit(); start.setVisited(); wl.add(start); while (!wl.isEmpty()) { int l = wl.size(); int levelSize = 0; int levelLSID = 0; // Compute the statistics for this level of the PFG. for (int i = 0; i < l; i++) { PredicateBlock block = wl.get(i); int blockSize = block.getBlockSize() + block.getFanout(); int id = block.maxLSID(); levelSize += blockSize; if (id > levelLSID) { levelLSID = id; } // Remember the block and the hyperblock size if this block is unpredicated // and not the special exit block. // TODO - Can we remove the restriction on being the last block now? if (!block.isPredicated()) { if (block.numOutEdges() > 0) { if (lastUnpredicatedHBSize < (blockSize + hbSize)) { lastUnpredicatedHBSize = blockSize + hbSize; lastUnpredicated = block; } } } } // Determine if all the blocks can be added to the hyperblock. int size = hbSize + levelSize; if ((size > Trips2Machine.maxBlockSize) || (levelLSID >= Trips2Machine.maxLSQEntries)) { break; } hbSize = size; wl = hb.getNextPFGLevel(wl); } assert (!wl.isEmpty()) : "This block does not need to be split?"; // If there is only one unpredicated block in the level and it is // not the special exit block use it. Or if this is the only // block in the PFG. int l = wl.size(); if (l == 1) { PredicateBlock block = wl.get(0); if (!block.isPredicated()) { if ((start == block) || (block.numOutEdges() > 0)) { // System.out.println("block"); return block; } } } // Is there a last known unpredicated block of adequate size use it. if (lastUnpredicated != null) { if (lastUnpredicatedHBSize >= splitSize) { // System.out.println("*** last unpred is greater than split sz " + splitSize); return lastUnpredicated; } } // Try to find a parent that's unpredicated unless the parent is // the first block. for (int i = 0; i < l; i++) { PredicateBlock block = wl.get(i); int pl = block.numInEdges(); for (int j = 0; j < pl; j++) { PredicateBlock pred = (PredicateBlock) block.getInEdge(j); if (!pred.isPredicated() && pred.numInEdges() > 1) { // System.out.println("unpred parent not start"); return pred; } } } // Reverse if-convert the largest block in this level which is not // an exit. Although this seems like a good idea, there is not // always enough room in the hyperblock to fanout the live-outs to // the write instructions. Don't do this for now. -- Aaron PredicateBlock candidate = null; int largest = 0; // for (int i = 0; i < l; i++) { // PredicateBlock block = (PredicateBlock) wl.get(i); // int bsize = block.getBlockSize() + block.getFanout() + block.getSpillSize(); // if ((bsize > largest) && (block.numBranches() == 0)) { // largest = bsize; // candidate = block; // } // } // if (candidate != null) { // //System.out.println("level no exit"); // return candidate; // } // Reverse if-convert a parent which is not start. // Prefer parents that are split points. for (int i = 0; i < l; i++) { PredicateBlock block = wl.get(i); int pl = block.numInEdges(); for (int j = 0; j < pl; j++) { PredicateBlock pred = (PredicateBlock) block.getInEdge(j); if (pred != start) { if (pred.isSplitPoint()) { // System.out.println("pred out isSplit not start"); return pred; } candidate = pred; } } } if (candidate != null) { // System.out.println("pred out not start"); return candidate; } // Reverse if-convert the largest successor of start without an exit. largest = 0; for (int i = 0; i < start.numOutEdges(); i++) { PredicateBlock block = (PredicateBlock) start.getOutEdge(i); int bsize = block.getBlockSize() + block.getFanout() + block.getSpillSize(); if ((bsize > largest) && !block.hasBranch()) { largest = bsize; candidate = block; } } if (candidate != null) { // System.out.println("start successor no exit"); return candidate; } // System.out.println("1st start successor ?"); return (PredicateBlock) start.getOutEdge(0); }