@SuppressWarnings("try")
  private int addLiveValueToBlock(Value operand, AbstractBlockBase<?> block) {
    try (Indent indent = Debug.logAndIndent("add incoming value!")) {
      int index = -1;
      for (AbstractBlockBase<?> pred : block.getPredecessors()) {
        try (Indent indent1 = Debug.logAndIndent("Add outgoing operand to %s", pred)) {
          BlockData predData = getOrInit(pred);
          int predIndex = predData.addOutgoing(operand);

          if (index == -1) {
            index = predIndex;
          } else {
            assert predIndex == index;
          }

          for (AbstractBlockBase<?> succ : pred.getSuccessors()) {
            Debug.log("Add incoming operand to %s", succ);
            BlockData succData = getOrInit(succ);
            if (!succData.contains(operand)) {
              int succIndex = succData.addIncoming(operand);
              assert succIndex == predIndex;
            }
          }
        }
      }
      Debug.log("new index: %d", index);
      return index;
    }
  }
Exemplo n.º 2
0
 /**
  * Applies {@linkplain LoweringPhase lowering} to a replacement graph.
  *
  * @param replacementGraph a replacement (i.e., snippet or method substitution) graph
  */
 @SuppressWarnings("try")
 protected StructuredGraph lowerReplacement(
     final StructuredGraph replacementGraph, LoweringTool tool) {
   final PhaseContext c =
       new PhaseContext(
           tool.getMetaAccess(),
           tool.getConstantReflection(),
           tool.getConstantFieldProvider(),
           tool.getLowerer(),
           tool.getReplacements(),
           tool.getStampProvider(),
           tool.getNodeCostProvider());
   if (!graph().hasValueProxies()) {
     new RemoveValueProxyPhase().apply(replacementGraph);
   }
   GuardsStage guardsStage = graph().getGuardsStage();
   if (!guardsStage.allowsFloatingGuards()) {
     new GuardLoweringPhase().apply(replacementGraph, null);
     if (guardsStage.areFrameStatesAtDeopts()) {
       new FrameStateAssignmentPhase().apply(replacementGraph);
     }
   }
   try (Scope s = Debug.scope("LoweringSnippetTemplate", replacementGraph)) {
     new LoweringPhase(new CanonicalizerPhase(), tool.getLoweringStage())
         .apply(replacementGraph, c);
   } catch (Throwable e) {
     throw Debug.handle(e);
   }
   return replacementGraph;
 }
  /** Build a new trace starting at {@code block}. */
  @SuppressWarnings("try")
  private Collection<T> startTrace(T block, int traceNumber) {
    ArrayDeque<T> trace = new ArrayDeque<>();
    try (Indent i = Debug.logAndIndent("StartTrace: " + block)) {
      try (Indent indentFront = Debug.logAndIndent("Head:")) {
        for (T currentBlock = block;
            currentBlock != null;
            currentBlock = selectPredecessor(currentBlock)) {
          addBlockToTrace(currentBlock, traceNumber);
          trace.addFirst(currentBlock);
        }
      }
      /* Number head blocks. Can not do this in the loop as we go backwards. */
      int blockNr = 0;
      for (T b : trace) {
        b.setLinearScanNumber(blockNr++);
      }

      try (Indent indentBack = Debug.logAndIndent("Tail:")) {
        for (T currentBlock = selectSuccessor(block);
            currentBlock != null;
            currentBlock = selectSuccessor(currentBlock)) {
          addBlockToTrace(currentBlock, traceNumber);
          trace.addLast(currentBlock);
          /* This time we can number the blocks immediately as we go forwards. */
          currentBlock.setLinearScanNumber(blockNr++);
        }
      }
    }
    Debug.log("Trace: %s", trace);
    return trace;
  }
  @SuppressWarnings("try")
  private void accessRecursive(
      Value operand,
      AbstractBlockBase<?> defBlock,
      AbstractBlockBase<?> block,
      Deque<AbstractBlockBase<?>> worklist) {
    try (Indent indent = Debug.logAndIndent("get operand %s in block %s", operand, block)) {
      if (block.equals(defBlock)) {
        Debug.log("found definition!");
        return;
      }
      BlockData data = getOrInit(block);
      Integer index = data.getIndex(operand);
      if (index != null) {
        // value is live at block begin but might not be initialized
        Value in = data.getIncoming(index);
        if (Value.ILLEGAL.equals(in)) {
          data.setIncoming(index, operand);
          Debug.log("uninitialized incoming value -> initialize!");
        } else {
          Debug.log("incoming value already initialized!");
        }
        return;
      }

      // the value is not yet live a current block
      int idx = addLiveValueToBlock(operand, block);
      data.setIncoming(idx, operand);

      for (AbstractBlockBase<?> pred : block.getPredecessors()) {
        worklist.addLast(pred);
      }
    }
  }
Exemplo n.º 5
0
 @SuppressWarnings("try")
 public boolean verify() {
   try (Scope s = Debug.scope("SSAVerifier", lir)) {
     for (AbstractBlockBase<?> block : lir.getControlFlowGraph().getBlocks()) {
       doBlock(block);
     }
   } catch (Throwable e) {
     throw Debug.handle(e);
   }
   return true;
 }
Exemplo n.º 6
0
 @Override
 protected void handleSpillSlot(Interval interval) {
   assert interval.location() != null : "interval  not assigned " + interval;
   if (interval.canMaterialize()) {
     assert !isStackSlot(interval.location())
         : "interval can materialize but assigned to a stack slot " + interval;
     return;
   }
   assert isStackSlot(interval.location()) : "interval not assigned to a stack slot " + interval;
   try (Scope s1 = Debug.scope("LSRAOptimization")) {
     Debug.log("adding stack to unhandled list %s", interval);
     unhandledLists.addToListSortedByStartAndUsePositions(RegisterBinding.Stack, interval);
   }
 }
Exemplo n.º 7
0
  private void splitRegisterInterval(Interval interval, Register reg) {
    // collect current usage of registers
    initVarsForAlloc(interval);
    initUseLists(false);
    spillExcludeActiveFixed();
    // spillBlockUnhandledFixed(cur);
    assert unhandledLists.get(RegisterBinding.Fixed) == Interval.EndMarker
        : "must not have unhandled fixed intervals because all fixed intervals have a use at position 0";
    spillBlockInactiveFixed(interval);
    spillCollectActiveAny();
    spillCollectInactiveAny(interval);

    if (Debug.isLogEnabled()) {
      try (Indent indent2 = Debug.logAndIndent("state of registers:")) {
        for (Register register : availableRegs) {
          int i = register.number;
          try (Indent indent3 =
              Debug.logAndIndent(
                  "reg %d: usePos: %d, blockPos: %d, intervals: ", i, usePos[i], blockPos[i])) {
            for (int j = 0; j < spillIntervals[i].size(); j++) {
              Debug.log("%d ", spillIntervals[i].get(j).operandNumber);
            }
          }
        }
      }
    }

    // the register must be free at least until this position
    boolean needSplit = blockPos[reg.number] <= interval.to();

    int splitPos = blockPos[reg.number];

    assert splitPos > 0 : "invalid splitPos";
    assert needSplit || splitPos > interval.from() : "splitting interval at from";

    Debug.log("assigning interval %s to %s", interval, reg);
    interval.assignLocation(reg.asValue(interval.kind()));
    if (needSplit) {
      // register not available for full interval : so split it
      splitWhenPartialRegisterAvailable(interval, splitPos);
    }

    // perform splitting and spilling for all affected intervals
    splitAndSpillIntersectingIntervals(reg);

    // activate interval
    activeLists.addToListSortedByCurrentFromPositions(RegisterBinding.Any, interval);
    interval.state = State.Active;
  }
Exemplo n.º 8
0
  @SuppressWarnings("try")
  private ScheduleResult getFinalSchedule(
      final String snippet, final TestMode mode, final SchedulingStrategy schedulingStrategy) {
    final StructuredGraph graph = parseEager(snippet, AllowAssumptions.NO);
    try (Scope d = Debug.scope("FloatingReadTest", graph)) {
      try (OverrideScope s =
          OptionValue.override(
              OptScheduleOutOfLoops,
              schedulingStrategy == SchedulingStrategy.LATEST_OUT_OF_LOOPS,
              OptImplicitNullChecks,
              false)) {
        HighTierContext context = getDefaultHighTierContext();
        CanonicalizerPhase canonicalizer = new CanonicalizerPhase();
        canonicalizer.apply(graph, context);
        if (mode == TestMode.INLINED_WITHOUT_FRAMESTATES) {
          new InliningPhase(canonicalizer).apply(graph, context);
        }
        new LoweringPhase(canonicalizer, LoweringTool.StandardLoweringStage.HIGH_TIER)
            .apply(graph, context);
        if (mode == TestMode.WITHOUT_FRAMESTATES || mode == TestMode.INLINED_WITHOUT_FRAMESTATES) {
          graph.clearAllStateAfter();
        }
        Debug.dump(graph, "after removal of framestates");

        new FloatingReadPhase().apply(graph);
        new RemoveValueProxyPhase().apply(graph);

        MidTierContext midContext =
            new MidTierContext(
                getProviders(),
                getTargetProvider(),
                OptimisticOptimizations.ALL,
                graph.getProfilingInfo());
        new GuardLoweringPhase().apply(graph, midContext);
        new LoweringPhase(canonicalizer, LoweringTool.StandardLoweringStage.MID_TIER)
            .apply(graph, midContext);
        new LoweringPhase(canonicalizer, LoweringTool.StandardLoweringStage.LOW_TIER)
            .apply(graph, midContext);

        SchedulePhase schedule = new SchedulePhase(schedulingStrategy);
        schedule.apply(graph);
        assertDeepEquals(1, graph.getNodes().filter(StartNode.class).count());
        return graph.getLastSchedule();
      }
    } catch (Throwable e) {
      throw Debug.handle(e);
    }
  }
 @SuppressWarnings("try")
 private TraceBuilderResult<T> build(T startBlock) {
   try (Indent indent = Debug.logAndIndent("start trace building: " + startBlock)) {
     ArrayList<Trace<T>> traces = buildTraces(startBlock);
     return new TraceBuilderResult<>(traces, blockToTrace);
   }
 }
Exemplo n.º 10
0
 private void test(final String snippet) {
   // No debug scope to reduce console noise for @Test(expected = ...) tests
   StructuredGraph graph = parseEager(snippet, AllowAssumptions.YES);
   Debug.dump(graph, "Graph");
   new CanonicalizerPhase(true).apply(graph, new PhaseContext(getProviders()));
   StructuredGraph referenceGraph = parseEager(REFERENCE_SNIPPET, AllowAssumptions.YES);
   assertEquals(referenceGraph, graph);
 }
Exemplo n.º 11
0
 @SuppressWarnings("unused")
 private static void printRegisterBindingList(RegisterBindingLists list, RegisterBinding binding) {
   for (Interval interval = list.get(binding);
       interval != Interval.EndMarker;
       interval = interval.next) {
     Debug.log("%s", interval);
   }
 }
Exemplo n.º 12
0
  /**
   * Creates a graph for this stub.
   *
   * <p>If the stub returns an object, the graph created corresponds to this pseudo code:
   *
   * <pre>
   *     Object foreignFunctionStub(args...) {
   *         foreignFunction(currentThread,  args);
   *         if (clearPendingException(thread())) {
   *             getAndClearObjectResult(thread());
   *             DeoptimizeCallerNode.deopt(InvalidateReprofile, RuntimeConstraint);
   *         }
   *         return verifyObject(getAndClearObjectResult(thread()));
   *     }
   * </pre>
   *
   * If the stub returns a primitive or word, the graph created corresponds to this pseudo code
   * (using {@code int} as the primitive return type):
   *
   * <pre>
   *     int foreignFunctionStub(args...) {
   *         int result = foreignFunction(currentThread,  args);
   *         if (clearPendingException(thread())) {
   *             DeoptimizeCallerNode.deopt(InvalidateReprofile, RuntimeConstraint);
   *         }
   *         return result;
   *     }
   * </pre>
   *
   * If the stub is void, the graph created corresponds to this pseudo code:
   *
   * <pre>
   *     void foreignFunctionStub(args...) {
   *         foreignFunction(currentThread,  args);
   *         if (clearPendingException(thread())) {
   *             DeoptimizeCallerNode.deopt(InvalidateReprofile, RuntimeConstraint);
   *         }
   *     }
   * </pre>
   *
   * In each example above, the {@code currentThread} argument is the C++ JavaThread value (i.e.,
   * %r15 on AMD64) and is only prepended if {@link #prependThread} is true.
   */
  @Override
  protected StructuredGraph getGraph() {
    WordTypes wordTypes = providers.getWordTypes();
    Class<?>[] args = linkage.getDescriptor().getArgumentTypes();
    boolean isObjectResult =
        !linkage.getOutgoingCallingConvention().getReturn().getLIRKind().isValue();

    StructuredGraph graph = new StructuredGraph(toString(), null, AllowAssumptions.NO);
    graph.disableUnsafeAccessTracking();

    GraphKit kit = new GraphKit(graph, providers, wordTypes, providers.getGraphBuilderPlugins());
    ParameterNode[] params = createParameters(kit, args);

    ReadRegisterNode thread =
        kit.append(
            new ReadRegisterNode(
                providers.getRegisters().getThreadRegister(),
                wordTypes.getWordKind(),
                true,
                false));
    ValueNode result = createTargetCall(kit, params, thread);
    kit.createInvoke(
        StubUtil.class,
        "handlePendingException",
        thread,
        ConstantNode.forBoolean(isObjectResult, graph));
    if (isObjectResult) {
      InvokeNode object =
          kit.createInvoke(HotSpotReplacementsUtil.class, "getAndClearObjectResult", thread);
      result = kit.createInvoke(StubUtil.class, "verifyObject", object);
    }
    kit.append(
        new ReturnNode(linkage.getDescriptor().getResultType() == void.class ? null : result));

    if (Debug.isDumpEnabled()) {
      Debug.dump(graph, "Initial stub graph");
    }

    kit.inlineInvokes();

    if (Debug.isDumpEnabled()) {
      Debug.dump(graph, "Stub graph before compilation");
    }

    return graph;
  }
Exemplo n.º 13
0
 @Override
 void walk() {
   try (Scope s = Debug.scope("OptimizingLinearScanWalker")) {
     for (AbstractBlock<?> block : allocator.sortedBlocks) {
       optimizeBlock(block);
     }
   }
   super.walk();
 }
Exemplo n.º 14
0
  @SuppressWarnings("try")
  protected LIRGenerationResult getLIRGenerationResult(final StructuredGraph graph) {
    try (Scope s = Debug.scope("FrontEnd")) {
      GraalCompiler.emitFrontEnd(
          getProviders(),
          getBackend(),
          graph,
          getDefaultGraphBuilderSuite(),
          OptimisticOptimizations.NONE,
          graph.getProfilingInfo(),
          getSuites());
    } catch (Throwable e) {
      throw Debug.handle(e);
    }

    LIRGenerationResult lirGen =
        GraalCompiler.emitLIR(getBackend(), graph, null, null, getLIRSuites(), null);
    return lirGen;
  }
Exemplo n.º 15
0
 @Override
 public DebugCloseable start() {
   if (!isConditional() || Debug.isMemUseTrackingEnabled()) {
     MemUseCloseableCounterImpl result = new MemUseCloseableCounterImpl(this);
     currentTracker.set(result);
     return result;
   } else {
     return VOID_CLOSEABLE;
   }
 }
Exemplo n.º 16
0
 @SuppressWarnings("try")
 private void doBlock(AbstractBlockBase<?> b) {
   if (visited.get(b.getId())) {
     return;
   }
   for (AbstractBlockBase<?> pred : b.getPredecessors()) {
     if (!b.isLoopHeader() || !pred.isLoopEnd()) {
       doBlock(pred);
     }
   }
   try (Indent indent = Debug.logAndIndent(Debug.INFO_LOG_LEVEL, "handle block %s", b)) {
     assert verifyBlock(b);
   }
 }
Exemplo n.º 17
0
  public LoopsData(final StructuredGraph graph) {
    cfg =
        Debug.scope(
            "ControlFlowGraph",
            new Callable<ControlFlowGraph>() {

              @Override
              public ControlFlowGraph call() throws Exception {
                return ControlFlowGraph.compute(graph, true, true, true, true);
              }
            });
    for (Loop lirLoop : cfg.getLoops()) {
      LoopEx ex = new LoopEx(lirLoop, this);
      lirLoopToEx.put(lirLoop, ex);
      loopBeginToEx.put(ex.loopBegin(), ex);
    }
  }
Exemplo n.º 18
0
 public void finish(LIRGeneratorTool gen) {
   Debug.dump(gen.getResult().getLIR(), "Before SSI operands");
   AbstractControlFlowGraph<?> cfg = gen.getResult().getLIR().getControlFlowGraph();
   for (AbstractBlockBase<?> block : cfg.getBlocks()) {
     // set label
     BlockData data = blockData.get(block);
     if (data != null) {
       if (data.incoming != null && data.incoming.size() > 0) {
         LabelOp label = getLabel(gen, block);
         label.addIncomingValues(data.incoming.toArray(new Value[data.incoming.size()]));
       }
       // set block end
       if (data.outgoing != null && data.outgoing.size() > 0) {
         BlockEndOp blockEndOp = getBlockEnd(gen, block);
         blockEndOp.addOutgoingValues(data.outgoing.toArray(new Value[data.outgoing.size()]));
       }
     }
   }
 }
Exemplo n.º 19
0
 private static void printNode(Node n) {
   Formatter buf = new Formatter();
   buf.format("%s", n);
   if (n instanceof MemoryCheckpoint.Single) {
     buf.format(" // kills %s", ((MemoryCheckpoint.Single) n).getLocationIdentity());
   } else if (n instanceof MemoryCheckpoint.Multi) {
     buf.format(" // kills ");
     for (LocationIdentity locid : ((MemoryCheckpoint.Multi) n).getLocationIdentities()) {
       buf.format("%s, ", locid);
     }
   } else if (n instanceof FloatingReadNode) {
     FloatingReadNode frn = (FloatingReadNode) n;
     buf.format(" // from %s", frn.getLocationIdentity());
     buf.format(", lastAccess: %s", frn.getLastLocationAccess());
     buf.format(", address: %s", frn.getAddress());
   } else if (n instanceof GuardNode) {
     buf.format(", anchor: %s", ((GuardNode) n).getAnchor());
   }
   Debug.log("%s", buf);
 }
Exemplo n.º 20
0
  @Override
  public void lower(LoweringTool tool) {
    StructuredGraph replacementGraph = getLoweredSnippetGraph(tool);

    InvokeNode invoke = replaceWithInvoke();
    assert invoke.verify();

    if (replacementGraph != null) {
      // Pull out the receiver null check so that a replaced
      // receiver can be lowered if necessary
      if (!targetMethod.isStatic()) {
        ValueNode nonNullReceiver = InliningUtil.nonNullReceiver(invoke);
        if (nonNullReceiver instanceof Lowerable) {
          ((Lowerable) nonNullReceiver).lower(tool);
        }
      }
      InliningUtil.inline(invoke, replacementGraph, false, null);
      Debug.dump(Debug.INFO_LOG_LEVEL, graph(), "After inlining replacement %s", replacementGraph);
    } else {
      if (isPlaceholderBci(invoke.bci())) {
        throw new GraalError("%s: cannot lower to invoke with placeholder BCI: %s", graph(), this);
      }

      if (invoke.stateAfter() == null) {
        ResolvedJavaMethod method = graph().method();
        if (method.getAnnotation(MethodSubstitution.class) != null
            || method.getAnnotation(Snippet.class) != null) {
          // One cause for this is that a MacroNode is created for a method that
          // no longer needs a MacroNode. For example, Class.getComponentType()
          // only needs a MacroNode prior to JDK9 as it was given a non-native
          // implementation in JDK9.
          throw new GraalError(
              "%s macro created for call to %s in %s must be lowerable to a snippet or intrinsic graph. "
                  + "Maybe a macro node is not needed for this method in the current JDK?",
              getClass().getSimpleName(), targetMethod.format("%h.%n(%p)"), graph());
        }
        throw new GraalError("%s: cannot lower to invoke without state: %s", graph(), this);
      }
      invoke.lower(tool);
    }
  }
Exemplo n.º 21
0
  private void optimizeBlock(AbstractBlock<?> block) {
    if (block.getPredecessorCount() == 1) {
      int nextBlock = allocator.getFirstLirInstructionId(block);
      try (Scope s1 = Debug.scope("LSRAOptimization")) {
        Debug.log("next block: %s (%d)", block, nextBlock);
      }
      try (Indent indent0 = Debug.indent()) {
        walkTo(nextBlock);

        try (Scope s1 = Debug.scope("LSRAOptimization")) {
          boolean changed = true;
          // we need to do this because the active lists might change
          loop:
          while (changed) {
            changed = false;
            try (Indent indent1 =
                Debug.logAndIndent("Active intervals: (block %s [%d])", block, nextBlock)) {
              for (Interval active = activeLists.get(RegisterBinding.Any);
                  active != Interval.EndMarker;
                  active = active.next) {
                Debug.log("active   (any): %s", active);
                if (optimize(nextBlock, block, active, RegisterBinding.Any)) {
                  changed = true;
                  break loop;
                }
              }
              for (Interval active = activeLists.get(RegisterBinding.Stack);
                  active != Interval.EndMarker;
                  active = active.next) {
                Debug.log("active (stack): %s", active);
                if (optimize(nextBlock, block, active, RegisterBinding.Stack)) {
                  changed = true;
                  break loop;
                }
              }
            }
          }
        }
      }
    }
  }
  public void setFactory() {
    /*
     * setting a custom debug value factory creating a constant timer for checking scope
     * creation and inlining scopes with metric intercepting works
     */
    factory = Debug.getDebugValueFactory();
    Debug.setDebugValueFactory(
        new DebugValueFactory() {
          @Override
          public DebugTimer createTimer(String name, boolean conditional) {
            // can still use together with real timer
            // TimerImpl realTimer = new TimerImpl(name, conditional, true);
            return new DebugTimer() {
              int runs = 0;

              // private DebugCloseable t;

              @Override
              public DebugCloseable start() {
                // t = realTimer.start();
                return new DebugCloseable() {
                  @Override
                  public void close() {
                    // t.close();
                    runs++;
                    MethodMetricsImpl.addToCurrentScopeMethodMetrics(name, 1);
                  }
                };
              }

              @Override
              public void setConditional(boolean flag) {}

              @Override
              public boolean isConditional() {
                return false;
              }

              @Override
              public TimeUnit getTimeUnit() {
                return TimeUnit.MILLISECONDS;
              }

              @Override
              public long getCurrentValue() {
                return runs;
              }
            };
          }

          @Override
          public DebugCounter createCounter(String name, boolean conditional) {
            return factory.createCounter(name, conditional);
          }

          @Override
          public DebugMethodMetrics createMethodMetrics(ResolvedJavaMethod method) {
            return factory.createMethodMetrics(method);
          }

          @Override
          public DebugMemUseTracker createMemUseTracker(String name, boolean conditional) {
            return factory.createMemUseTracker(name, conditional);
          }
        });
  }
 @Override
 public void afterTest() {
   super.afterTest();
   Debug.setDebugValueFactory(factory);
 }
Exemplo n.º 24
0
  private boolean optimize(
      int currentPos,
      AbstractBlock<?> currentBlock,
      Interval currentInterval,
      RegisterBinding binding) {
    // BEGIN initialize and sanity checks
    assert currentBlock != null : "block must not be null";
    assert currentInterval != null : "interval must not be null";

    assert currentBlock.getPredecessorCount() == 1
        : "more than one predecessors -> optimization not possible";

    if (!currentInterval.isSplitChild()) {
      // interval is not a split child -> no need for optimization
      return false;
    }

    if (currentInterval.from() == currentPos) {
      // the interval starts at the current position so no need for splitting
      return false;
    }

    // get current location
    AllocatableValue currentLocation = currentInterval.location();
    assert currentLocation != null : "active intervals must have a location assigned!";

    // get predecessor stuff
    AbstractBlock<?> predecessorBlock = currentBlock.getPredecessors().get(0);
    int predEndId = allocator.getLastLirInstructionId(predecessorBlock);
    Interval predecessorInterval = currentInterval.getIntervalCoveringOpId(predEndId);
    assert predecessorInterval != null
        : "variable not live at the end of the only predecessor! "
            + predecessorBlock
            + " -> "
            + currentBlock
            + " interval: "
            + currentInterval;
    AllocatableValue predecessorLocation = predecessorInterval.location();
    assert predecessorLocation != null : "handled intervals must have a location assigned!";

    // END initialize and sanity checks

    if (currentLocation.equals(predecessorLocation)) {
      // locations are already equal -> nothing to optimize
      return false;
    }

    if (!isStackSlot(predecessorLocation) && !isRegister(predecessorLocation)) {
      assert predecessorInterval.canMaterialize();
      // value is materialized -> no need for optimization
      return false;
    }

    assert isStackSlot(currentLocation) || isRegister(currentLocation)
        : "current location not a register or stack slot " + currentLocation;

    try (Indent indent =
        Debug.logAndIndent("location differs: %s vs. %s", predecessorLocation, currentLocation)) {
      // split current interval at current position
      Debug.log("splitting at position %d", currentPos);

      assert allocator.isBlockBegin(currentPos) && ((currentPos & 1) == 0)
          : "split pos must be even when on block boundary";

      Interval splitPart = currentInterval.split(currentPos, allocator);
      activeLists.remove(binding, currentInterval);

      assert splitPart.from() >= currentPosition
          : "cannot append new interval before current walk position";

      // the currentSplitChild is needed later when moves are inserted for reloading
      assert splitPart.currentSplitChild() == currentInterval
          : "overwriting wrong currentSplitChild";
      splitPart.makeCurrentSplitChild();

      if (Debug.isLogEnabled()) {
        Debug.log("left interval  : %s", currentInterval.logString(allocator));
        Debug.log("right interval : %s", splitPart.logString(allocator));
      }

      if (Options.LSRAOptSplitOnly.getValue()) {
        // just add the split interval to the unhandled list
        unhandledLists.addToListSortedByStartAndUsePositions(RegisterBinding.Any, splitPart);
      } else {
        if (isRegister(predecessorLocation)) {
          splitRegisterInterval(splitPart, asRegister(predecessorLocation));
        } else {
          assert isStackSlot(predecessorLocation);
          Debug.log("assigning interval %s to %s", splitPart, predecessorLocation);
          splitPart.assignLocation(predecessorLocation);
          // activate interval
          activeLists.addToListSortedByCurrentFromPositions(RegisterBinding.Stack, splitPart);
          splitPart.state = State.Active;

          splitStackInterval(splitPart);
        }
      }
    }
    return true;
  }
 private void addBlockToTrace(T currentBlock, int traceNumber) {
   Debug.log("add %s (prob: %f)", currentBlock, currentBlock.probability());
   processed.set(currentBlock.getId());
   blockToTrace[currentBlock.getId()] = traceNumber;
 }
public class GreedyInliningPolicy extends AbstractInliningPolicy {

  private static final DebugMetric metricInliningStoppedByMaxDesiredSize =
      Debug.metric("InliningStoppedByMaxDesiredSize");

  public GreedyInliningPolicy(Map<Invoke, Double> hints) {
    super(hints);
  }

  public boolean continueInlining(StructuredGraph currentGraph) {
    if (currentGraph.getNodeCount() >= MaximumDesiredSize.getValue()) {
      InliningUtil.logInliningDecision("inlining is cut off by MaximumDesiredSize");
      metricInliningStoppedByMaxDesiredSize.increment();
      return false;
    }
    return true;
  }

  @Override
  public boolean isWorthInlining(
      Replacements replacements,
      MethodInvocation invocation,
      int inliningDepth,
      boolean fullyProcessed) {

    final InlineInfo info = invocation.callee();
    final double probability = invocation.probability();
    final double relevance = invocation.relevance();

    if (InlineEverything.getValue()) {
      InliningUtil.logInlinedMethod(info, inliningDepth, fullyProcessed, "inline everything");
      return true;
    }

    if (isIntrinsic(replacements, info)) {
      InliningUtil.logInlinedMethod(info, inliningDepth, fullyProcessed, "intrinsic");
      return true;
    }

    if (info.shouldInline()) {
      InliningUtil.logInlinedMethod(info, inliningDepth, fullyProcessed, "forced inlining");
      return true;
    }

    double inliningBonus = getInliningBonus(info);
    int nodes = info.determineNodeCount();
    int lowLevelGraphSize = previousLowLevelGraphSize(info);

    if (SmallCompiledLowLevelGraphSize.getValue() > 0
        && lowLevelGraphSize > SmallCompiledLowLevelGraphSize.getValue() * inliningBonus) {
      InliningUtil.logNotInlinedMethod(
          info,
          inliningDepth,
          "too large previous low-level graph (low-level-nodes: %d, relevance=%f, probability=%f, bonus=%f, nodes=%d)",
          lowLevelGraphSize,
          relevance,
          probability,
          inliningBonus,
          nodes);
      return false;
    }

    if (nodes < TrivialInliningSize.getValue() * inliningBonus) {
      InliningUtil.logInlinedMethod(
          info,
          inliningDepth,
          fullyProcessed,
          "trivial (relevance=%f, probability=%f, bonus=%f, nodes=%d)",
          relevance,
          probability,
          inliningBonus,
          nodes);
      return true;
    }

    /*
     * TODO (chaeubl): invoked methods that are on important paths but not yet compiled -> will
     * be compiled anyways and it is likely that we are the only caller... might be useful to
     * inline those methods but increases bootstrap time (maybe those methods are also getting
     * queued in the compilation queue concurrently)
     */
    double invokes = determineInvokeProbability(info);
    if (LimitInlinedInvokes.getValue() > 0
        && fullyProcessed
        && invokes > LimitInlinedInvokes.getValue() * inliningBonus) {
      InliningUtil.logNotInlinedMethod(
          info,
          inliningDepth,
          "callee invoke probability is too high (invokeP=%f, relevance=%f, probability=%f, bonus=%f, nodes=%d)",
          invokes,
          relevance,
          probability,
          inliningBonus,
          nodes);
      return false;
    }

    double maximumNodes =
        computeMaximumSize(relevance, (int) (MaximumInliningSize.getValue() * inliningBonus));
    if (nodes <= maximumNodes) {
      InliningUtil.logInlinedMethod(
          info,
          inliningDepth,
          fullyProcessed,
          "relevance-based (relevance=%f, probability=%f, bonus=%f, nodes=%d <= %f)",
          relevance,
          probability,
          inliningBonus,
          nodes,
          maximumNodes);
      return true;
    }

    InliningUtil.logNotInlinedMethod(
        info,
        inliningDepth,
        "relevance-based (relevance=%f, probability=%f, bonus=%f, nodes=%d > %f)",
        relevance,
        probability,
        inliningBonus,
        nodes,
        maximumNodes);
    return false;
  }
}
  private void visitDeoptBegin(
      AbstractBeginNode deoptBegin,
      DeoptimizationAction deoptAction,
      DeoptimizationReason deoptReason,
      JavaConstant speculation,
      StructuredGraph graph) {
    if (deoptBegin.predecessor() instanceof AbstractBeginNode) {
      /* Walk up chains of LoopExitNodes to the "real" BeginNode that leads to deoptimization. */
      visitDeoptBegin(
          (AbstractBeginNode) deoptBegin.predecessor(),
          deoptAction,
          deoptReason,
          speculation,
          graph);
      return;
    }

    if (deoptBegin instanceof AbstractMergeNode) {
      AbstractMergeNode mergeNode = (AbstractMergeNode) deoptBegin;
      Debug.log("Visiting %s", mergeNode);
      FixedNode next = mergeNode.next();
      while (mergeNode.isAlive()) {
        AbstractEndNode end = mergeNode.forwardEnds().first();
        AbstractBeginNode newBeginNode = findBeginNode(end);
        visitDeoptBegin(newBeginNode, deoptAction, deoptReason, speculation, graph);
      }
      assert next.isAlive();
      AbstractBeginNode newBeginNode = findBeginNode(next);
      visitDeoptBegin(newBeginNode, deoptAction, deoptReason, speculation, graph);
      return;
    } else if (deoptBegin.predecessor() instanceof IfNode) {
      IfNode ifNode = (IfNode) deoptBegin.predecessor();
      AbstractBeginNode otherBegin = ifNode.trueSuccessor();
      LogicNode conditionNode = ifNode.condition();
      FixedGuardNode guard =
          graph.add(
              new FixedGuardNode(
                  conditionNode,
                  deoptReason,
                  deoptAction,
                  speculation,
                  deoptBegin == ifNode.trueSuccessor()));
      FixedWithNextNode pred = (FixedWithNextNode) ifNode.predecessor();
      AbstractBeginNode survivingSuccessor;
      if (deoptBegin == ifNode.trueSuccessor()) {
        survivingSuccessor = ifNode.falseSuccessor();
      } else {
        survivingSuccessor = ifNode.trueSuccessor();
      }
      graph.removeSplitPropagate(ifNode, survivingSuccessor);

      Node newGuard = guard;
      if (survivingSuccessor instanceof LoopExitNode) {
        newGuard = ProxyNode.forGuard(guard, (LoopExitNode) survivingSuccessor, graph);
      }
      survivingSuccessor.replaceAtUsages(InputType.Guard, newGuard);

      Debug.log(
          "Converting deopt on %-5s branch of %s to guard for remaining branch %s.",
          deoptBegin == ifNode.trueSuccessor() ? "true" : "false", ifNode, otherBegin);
      FixedNode next = pred.next();
      pred.setNext(guard);
      guard.setNext(next);
      survivingSuccessor.simplify(simplifierTool);
      return;
    }

    // We could not convert the control split - at least cut off control flow after the split.
    FixedWithNextNode deoptPred = deoptBegin;
    FixedNode next = deoptPred.next();

    if (!(next instanceof DeoptimizeNode)) {
      DeoptimizeNode newDeoptNode =
          graph.add(new DeoptimizeNode(deoptAction, deoptReason, speculation));
      deoptPred.setNext(newDeoptNode);
      assert deoptPred == newDeoptNode.predecessor();
      GraphUtil.killCFG(next);
    }
  }
Exemplo n.º 28
0
 private static void dumpGraph(final StructuredGraph graph) {
   Debug.dump(graph, "Graph");
 }
Exemplo n.º 29
0
 public void detectedCountedLoops() {
   for (LoopEx loop : loops()) {
     InductionVariables ivs = new InductionVariables(loop);
     LoopBeginNode loopBegin = loop.loopBegin();
     FixedNode next = loopBegin.next();
     while (next instanceof FixedGuardNode || next instanceof ValueAnchorNode) {
       next = ((FixedWithNextNode) next).next();
     }
     if (next instanceof IfNode) {
       IfNode ifNode = (IfNode) next;
       boolean negated = false;
       if (!loopBegin.isLoopExit(ifNode.falseSuccessor())) {
         if (!loopBegin.isLoopExit(ifNode.trueSuccessor())) {
           continue;
         }
         negated = true;
       }
       LogicNode ifTest = ifNode.condition();
       if (!(ifTest instanceof IntegerLessThanNode)) {
         if (ifTest instanceof IntegerBelowThanNode) {
           Debug.log("Ignored potential Counted loop at %s with |<|", loopBegin);
         }
         continue;
       }
       IntegerLessThanNode lessThan = (IntegerLessThanNode) ifTest;
       Condition condition = null;
       InductionVariable iv = null;
       ValueNode limit = null;
       if (loop.isOutsideLoop(lessThan.x())) {
         iv = ivs.get(lessThan.y());
         if (iv != null) {
           condition = lessThan.condition().mirror();
           limit = lessThan.x();
         }
       } else if (loop.isOutsideLoop(lessThan.y())) {
         iv = ivs.get(lessThan.x());
         if (iv != null) {
           condition = lessThan.condition();
           limit = lessThan.y();
         }
       }
       if (condition == null) {
         continue;
       }
       if (negated) {
         condition = condition.negate();
       }
       boolean oneOff = false;
       switch (condition) {
         case LE:
           oneOff = true; // fall through
         case LT:
           if (iv.direction() != Direction.Up) {
             continue;
           }
           break;
         case GE:
           oneOff = true; // fall through
         case GT:
           if (iv.direction() != Direction.Down) {
             continue;
           }
           break;
         default:
           throw GraalInternalError.shouldNotReachHere();
       }
       loop.setCounted(
           new CountedLoopInfo(
               loop,
               iv,
               limit,
               oneOff,
               negated ? ifNode.falseSuccessor() : ifNode.trueSuccessor()));
     }
   }
 }
public class DeadCodeEliminationPhase extends Phase {

  public static class Options {
    // @formatter:off
    @Option(help = "Disable optional dead code eliminations", type = OptionType.Debug)
    public static final OptionValue<Boolean> ReduceDCE = new OptionValue<>(true);
    // @formatter:on
  }

  // Metrics
  private static final DebugMetric metricNodesRemoved = Debug.metric("NodesRemoved");

  public enum Optionality {
    Optional,
    Required;
  }

  /**
   * Creates a dead code elimination phase that will be run irrespective of {@link
   * Options#ReduceDCE}.
   */
  public DeadCodeEliminationPhase() {
    this(Optionality.Required);
  }

  /**
   * Creates a dead code elimination phase that will be run only if it is {@linkplain
   * Optionality#Required non-optional} or {@link Options#ReduceDCE} is false.
   */
  public DeadCodeEliminationPhase(Optionality optionality) {
    this.optional = optionality == Optionality.Optional;
  }

  private final boolean optional;

  @Override
  public void run(StructuredGraph graph) {
    if (optional && Options.ReduceDCE.getValue()) {
      return;
    }

    NodeFlood flood = graph.createNodeFlood();
    int totalNodeCount = graph.getNodeCount();
    flood.add(graph.start());
    iterateSuccessorsAndInputs(flood);
    int totalMarkedCount = flood.getTotalMarkedCount();
    if (totalNodeCount == totalMarkedCount) {
      // All nodes are live => nothing more to do.
      return;
    } else {
      // Some nodes are not marked alive and therefore dead => proceed.
      assert totalNodeCount > totalMarkedCount;
    }

    deleteNodes(flood, graph);
  }

  private static void iterateSuccessorsAndInputs(NodeFlood flood) {
    BiConsumer<Node, Node> consumer =
        (n, succOrInput) -> {
          assert succOrInput.isAlive() : "dead successor or input " + succOrInput + " in " + n;
          flood.add(succOrInput);
        };
    for (Node current : flood) {
      if (current instanceof AbstractEndNode) {
        AbstractEndNode end = (AbstractEndNode) current;
        flood.add(end.merge());
      } else {
        current.acceptSuccessors(consumer);
        current.acceptInputs(consumer);
      }
    }
  }

  private static void deleteNodes(NodeFlood flood, StructuredGraph graph) {
    BiConsumer<Node, Node> consumer =
        (n, input) -> {
          if (input.isAlive() && flood.isMarked(input)) {
            input.removeUsage(n);
          }
        };

    for (Node node : graph.getNodes()) {
      if (!flood.isMarked(node)) {
        node.markDeleted();
        node.acceptInputs(consumer);
        metricNodesRemoved.increment();
      }
    }
  }
}