Exemplo n.º 1
0
  /**
   * The confluence operator is essentially the union of all of the inItems. However, if two or more
   * of the initCount maps from the inItems each have a MinMaxInitCounts entry for the same
   * VarInstance, the conflict must be resolved, by using the minimum of all mins and the maximum of
   * all maxs.
   */
  public Item confluence(List inItems, Term node, FlowGraph graph) {
    // Resolve any conflicts pairwise.
    Iterator iter = inItems.iterator();
    Map m = null;
    while (iter.hasNext()) {
      Item itm = (Item) iter.next();
      if (itm == BOTTOM) continue;
      if (m == null) {
        m = new HashMap(((DataFlowItem) itm).initStatus);
      } else {
        Map n = ((DataFlowItem) itm).initStatus;
        for (Iterator iter2 = n.entrySet().iterator(); iter2.hasNext(); ) {
          Map.Entry entry = (Map.Entry) iter2.next();
          VarInstance v = (VarInstance) entry.getKey();
          MinMaxInitCount initCount1 = (MinMaxInitCount) m.get(v);
          MinMaxInitCount initCount2 = (MinMaxInitCount) entry.getValue();
          m.put(v, MinMaxInitCount.join(initCount1, initCount2));
        }
      }
    }

    if (m == null) return BOTTOM;

    return new DataFlowItem(m);
  }
Exemplo n.º 2
0
  /**
   * Add a new <code>SourceJob</code> for the <code>Source source</code>, with AST <code>ast</code>.
   * A new job will be created if needed. If the <code>Source source</code> has already been
   * processed, and its job discarded to release resources, then <code>null</code> will be returned.
   */
  public SourceJob addJob(Source source, Node ast) {
    Object o = jobs.get(source);
    SourceJob job = null;
    if (o == COMPLETED_JOB) {
      // the job has already been completed.
      // We don't need to add a job
      return null;
    } else if (o == null) {
      // No appropriate job yet exists, we will create one.

      job = this.createSourceJob(source, ast);

      // record the job in the map and the worklist.
      jobs.put(source, job);
      worklist.addLast(job);

      if (Report.should_report(Report.frontend, 3)) {
        Report.report(3, "Adding job for " + source + " at the " + "request of job " + currentJob);
      }
    } else {
      job = (SourceJob) o;
    }

    // if the current source job caused this job to load, record the
    // dependency.
    if (currentJob instanceof SourceJob) {
      ((SourceJob) currentJob).addDependency(source);
    }

    return job;
  }
Exemplo n.º 3
0
  /** Perform the appropriate flow operations for declaration of a local variable */
  protected Map flowLocalDecl(
      DataFlowItem inItem, FlowGraph graph, LocalDecl ld, Set succEdgeKeys) {
    Map m = new HashMap(inItem.initStatus);
    MinMaxInitCount initCount = (MinMaxInitCount) m.get(ld.localInstance());
    // if (initCount == null) {
    if (ld.init() != null) {
      // declaration of local var with initialization.
      initCount = new MinMaxInitCount(InitCount.ONE, InitCount.ONE);
    } else {
      // declaration of local var with no initialization.
      initCount = new MinMaxInitCount(InitCount.ZERO, InitCount.ZERO);
    }

    m.put(ld.localInstance(), initCount);
    //        }
    //        else {
    // the initCount is not null. We now have a problem. Why is the
    // initCount not null? Has this variable been assigned in its own
    // initialization, or is this a declaration inside a loop body?
    // XXX@@@ THIS IS A BUG THAT NEEDS TO BE FIXED.
    // Currently, the declaration "final int i = (i=5);" will
    // not be rejected, as we cannot distinguish between that and
    // "while (true) {final int i = 4;}"
    //        }

    // record the fact that we have seen a local declaration
    currCBI.localDeclarations.add(ld.localInstance());

    return itemToMap(new DataFlowItem(m), succEdgeKeys);
  }
Exemplo n.º 4
0
  /** Perform the appropriate flow operations for declaration of a formal parameter */
  protected Map flowFormal(DataFlowItem inItem, FlowGraph graph, Formal f, Set succEdgeKeys) {
    Map m = new HashMap(inItem.initStatus);
    // a formal argument is always defined.
    m.put(f.localInstance(), new MinMaxInitCount(InitCount.ONE, InitCount.ONE));

    // record the fact that we have seen the formal declaration
    currCBI.localDeclarations.add(f.localInstance());

    return itemToMap(new DataFlowItem(m), succEdgeKeys);
  }
Exemplo n.º 5
0
  /**
   * Run all jobs in the work list (and any children they have) to completion. This method returns
   * <code>true</code> if all jobs were successfully completed. If all jobs were successfully
   * completed, then the worklist will be empty.
   *
   * <p>The scheduling of <code>Job</code>s uses two methods to maintain scheduling invariants:
   * <code>selectJobFromWorklist</code> selects a <code>SourceJob</code> from <code>worklist</code>
   * (a list of jobs that still need to be processed); <code>enforceInvariants</code> is called
   * before a pass is performed on a <code>SourceJob</code> and is responsible for ensuring all
   * dependencies are satisfied before the pass proceeds, i.e. enforcing any scheduling invariants.
   */
  public boolean runToCompletion() {
    boolean okay = true;

    while (okay && !worklist.isEmpty()) {
      SourceJob job = selectJobFromWorklist();

      if (Report.should_report(Report.frontend, 1)) {
        Report.report(1, "Running job " + job);
      }

      okay &= runAllPasses(job);

      if (job.completed()) {
        // the job has finished. Let's remove it from the map so it
        // can be garbage collected, and free up the AST.
        jobs.put(job.source(), COMPLETED_JOB);

        if (Report.should_report(Report.frontend, 1)) {
          Report.report(1, "Completed job " + job);
        }
      } else {
        // the job is not yet completed (although, it really
        // should be...)
        if (Report.should_report(Report.frontend, 1)) {
          Report.report(1, "Failed to complete job " + job);
        }
        worklist.add(job);
      }
    }

    if (Report.should_report(Report.frontend, 1))
      Report.report(1, "Finished all passes -- " + (okay ? "okay" : "failed"));

    return okay;
  }
Exemplo n.º 6
0
  /**
   * Check that the conditions of initialization are not broken.
   *
   * <p>To summarize the conditions: - Local variables must be initialized before use, (i.e. min
   * count > 0) - Final local variables (including Formals) cannot be assigned to more than once
   * (i.e. max count <= 1) - Final non-static fields whose target is this cannot be assigned to more
   * than once - Final static fields whose target is the current class cannot be assigned to more
   * than once
   *
   * <p>This method is also responsible for maintaining state between the dataflows over
   * Initializers, by copying back the appropriate MinMaxInitCounts to the map
   * currClassFinalFieldInitCounts.
   */
  public void check(FlowGraph graph, Term n, Item inItem, Map outItems) throws SemanticException {
    DataFlowItem dfIn = (DataFlowItem) inItem;
    if (dfIn == null) {
      // There is no input data flow item. This can happen if we are
      // checking an unreachable term, and so no Items have flowed
      // through the term. For example, in the code fragment:
      //     a: do { break a; } while (++i < 10);
      // the expression "++i < 10" is unreachable, but the as there is
      // no unreachable statement, the Java Language Spec permits it.

      // Set inItem to a default Item
      dfIn = createInitDFI();
    }

    DataFlowItem dfOut = null;
    if (outItems != null && !outItems.isEmpty()) {
      // due to the flow equations, all DataFlowItems in the outItems map
      // are the same, so just take the first one.
      dfOut = (DataFlowItem) outItems.values().iterator().next();
    }

    if (n instanceof Local) {
      checkLocal(graph, (Local) n, dfIn, dfOut);
    } else if (n instanceof LocalAssign) {
      checkLocalAssign(graph, (LocalAssign) n, dfIn, dfOut);
    } else if (n instanceof FieldAssign) {
      checkFieldAssign(graph, (FieldAssign) n, dfIn, dfOut);
    } else if (n instanceof ClassBody) {
      // we need to check that the locals used inside this class body
      // have all been defined at this point.
      Set localsUsed = (Set) currCBI.localsUsedInClassBodies.get(n);

      if (localsUsed != null) {
        checkLocalsUsedByInnerClass(graph, (ClassBody) n, localsUsed, dfIn, dfOut);
      }
    }

    if (n == graph.finishNode()) {
      if (currCBI.currCodeDecl instanceof Initializer) {
        finishInitializer(graph, (Initializer) currCBI.currCodeDecl, dfIn, dfOut);
      }
      if (currCBI.currCodeDecl instanceof ConstructorDecl) {
        finishConstructorDecl(graph, (ConstructorDecl) currCBI.currCodeDecl, dfIn, dfOut);
      }
    }
  }
Exemplo n.º 7
0
 /** Initialize the <code>passes</code> field and the <code>passMap</code> field. */
 protected void init() {
   passes = new ArrayList(getPasses());
   passMap = new HashMap();
   for (int i = 0; i < passes.size(); i++) {
     Pass pass = (Pass) passes.get(i);
     passMap.put(pass.id(), new Integer(i));
   }
 }
Exemplo n.º 8
0
  /** Perform the appropriate flow operations for assignment to a local variable */
  protected Map flowLocalAssign(
      DataFlowItem inItem, FlowGraph graph, LocalAssign a, Set succEdgeKeys) {
    Local l = (Local) a.left();
    Map m = new HashMap(inItem.initStatus);
    MinMaxInitCount initCount = (MinMaxInitCount) m.get(l.localInstance());

    // initcount could be null if the local is defined in the outer
    // class, or if we have not yet seen its declaration (i.e. the
    // local is used in its own initialization)
    if (initCount == null) {
      initCount = new MinMaxInitCount(InitCount.ZERO, InitCount.ZERO);
    }

    initCount = new MinMaxInitCount(initCount.getMin().increment(), initCount.getMax().increment());

    m.put(l.localInstance(), initCount);
    return itemToMap(new DataFlowItem(m), succEdgeKeys);
  }
Exemplo n.º 9
0
  /** Perform the appropriate flow operations for assignment to a field */
  protected Map flowFieldAssign(
      DataFlowItem inItem, FlowGraph graph, FieldAssign a, Set succEdgeKeys) {
    Field f = (Field) a.left();
    FieldInstance fi = f.fieldInstance();

    if (fi.flags().isFinal() && isFieldsTargetAppropriate(f)) {
      // this field is final and the target for this field is
      // appropriate for what we are interested in.
      Map m = new HashMap(inItem.initStatus);
      MinMaxInitCount initCount = (MinMaxInitCount) m.get(fi);
      // initCount may be null if the field is defined in an
      // outer class.
      if (initCount != null) {
        initCount =
            new MinMaxInitCount(initCount.getMin().increment(), initCount.getMax().increment());
        m.put(fi, initCount);
        return itemToMap(new DataFlowItem(m), succEdgeKeys);
      }
    }
    return null;
  }
Exemplo n.º 10
0
  public Map flow(Item in, FlowGraph graph, Term n, Set succEdgeKeys) {
    if (in == DataFlowItem.NOT_REACHABLE) {
      return itemToMap(in, succEdgeKeys);
    }

    // in is either REACHABLE or REACHABLE_EX_ONLY.
    // return a map where all exception edges are REACHABLE_EX_ONLY,
    // and all non-exception edges are REACHABLE.
    Map m = itemToMap(DataFlowItem.REACHABLE_EX_ONLY, succEdgeKeys);

    if (succEdgeKeys.contains(FlowGraph.EDGE_KEY_OTHER)) {
      m.put(FlowGraph.EDGE_KEY_OTHER, DataFlowItem.REACHABLE);
    }
    if (succEdgeKeys.contains(FlowGraph.EDGE_KEY_TRUE)) {
      m.put(FlowGraph.EDGE_KEY_TRUE, DataFlowItem.REACHABLE);
    }
    if (succEdgeKeys.contains(FlowGraph.EDGE_KEY_FALSE)) {
      m.put(FlowGraph.EDGE_KEY_FALSE, DataFlowItem.REACHABLE);
    }

    return m;
  }
Exemplo n.º 11
0
 /** Adds a dependency from the current job to the given Source. */
 public void addDependencyToCurrentJob(Source s) {
   if (s == null) return;
   if (currentJob != null) {
     Object o = jobs.get(s);
     if (o != COMPLETED_JOB) {
       if (Report.should_report(Report.frontend, 2)) {
         Report.report(2, "Adding dependency from " + currentJob.source() + " to " + s);
       }
       currentJob.sourceJob().addDependency(s);
     }
   } else {
     throw new InternalCompilerError("No current job!");
   }
 }
Exemplo n.º 12
0
  /**
   * Before running <code>Pass pass</code> on <code>SourceJob job</code> make sure that all
   * appropriate scheduling invariants are satisfied, to ensure that all passes of other jobs that
   * <code>job</code> depends on will have already been done.
   */
  protected void enforceInvariants(Job job, Pass pass) throws CyclicDependencyException {
    SourceJob srcJob = job.sourceJob();
    if (srcJob == null) {
      return;
    }

    BarrierPass lastBarrier = srcJob.lastBarrier();
    if (lastBarrier != null) {
      // make sure that _all_ dependent jobs have completed at least up to
      // the last barrier (not just children).
      //
      // Ideally the invariant should be that only the source jobs that
      // job _depends on_ should be brought up to the last barrier.
      // This is work to be done in the future...
      List allDependentSrcs = new ArrayList(srcJob.dependencies());
      Iterator i = allDependentSrcs.iterator();
      while (i.hasNext()) {
        Source s = (Source) i.next();
        Object o = jobs.get(s);
        if (o == COMPLETED_JOB) continue;
        if (o == null) {
          throw new InternalCompilerError("Unknown source " + s);
        }
        SourceJob sj = (SourceJob) o;
        if (sj.pending(lastBarrier.id())) {
          // Make the job run up to the last barrier.
          // We ignore the return result, since even if the job
          // fails, we will keep on going and see
          // how far we get...
          if (Report.should_report(Report.frontend, 3)) {
            Report.report(3, "Running " + sj + " to " + lastBarrier.id() + " for " + srcJob);
          }
          runToPass(sj, lastBarrier.id());
        }
      }
    }

    if (pass instanceof GlobalBarrierPass) {
      // need to make sure that _all_ jobs have completed just up to
      // this global barrier.

      // If we hit a cyclic dependency, ignore it and run the other
      // jobs up to that pass.  Then try again to run the cyclic
      // pass.  If we hit the cycle again for the same job, stop.
      LinkedList barrierWorklist = new LinkedList(jobs.values());

      while (!barrierWorklist.isEmpty()) {
        Object o = barrierWorklist.removeFirst();
        if (o == COMPLETED_JOB) continue;
        SourceJob sj = (SourceJob) o;
        if (sj.completed(pass.id()) || sj.nextPass() == sj.passByID(pass.id())) {
          // the source job has either done this global pass
          // (which is possible if the job was loaded late in the
          // game), or is right up to the global barrier.
          continue;
        }

        // Make the job run up to just before the global barrier.
        // We ignore the return result, since even if the job
        // fails, we will keep on going and see
        // how far we get...
        Pass beforeGlobal = sj.getPreviousTo(pass.id());
        if (Report.should_report(Report.frontend, 3)) {
          Report.report(3, "Running " + sj + " to " + beforeGlobal.id() + " for " + srcJob);
        }

        // Don't use runToPass, since that catches the
        // CyclicDependencyException that we should report
        // back to the caller.
        while (!sj.pendingPasses().isEmpty()) {
          Pass p = (Pass) sj.pendingPasses().get(0);

          runPass(sj, p);

          if (p == beforeGlobal) {
            break;
          }
        }
      }
    }
  }
Exemplo n.º 13
0
 public int hashCode() {
   return (initStatus.hashCode());
 }
Exemplo n.º 14
0
 public String toString() {
   return initStatus.toString();
 }