コード例 #1
0
ファイル: UnitSolverDialog.java プロジェクト: ptII/ptII
  /**
   * Remove all the annotations from the graph. Actors, their ports, and relations are inspected to
   * see if they either a _color and/or an _explanation attribute. If so, then the attribute is
   * removed via a MoMl changeRequest.
   */
  public void deAnnotateGraph() {
    StringBuffer moml = new StringBuffer();
    Iterator entities = _model.entityList(ComponentEntity.class).iterator();

    while (entities.hasNext()) {
      ComponentEntity entity = (ComponentEntity) (entities.next());
      String entityDeletes = _deletesIfNecessary(entity);
      moml.append("<entity name=\"" + entity.getName() + "\">");

      if (entityDeletes != null) {
        moml.append(entityDeletes);
      }

      Iterator ports = entity.portList().iterator();

      while (ports.hasNext()) {
        Port port = (Port) (ports.next());
        String portDeletes = _deletesIfNecessary(port);

        if (portDeletes != null) {
          moml.append("<port name=\"" + port.getName() + "\">" + portDeletes + "</port>");
        }
      }

      moml.append("</entity>");
    }

    Iterator relations = _model.relationList().iterator();

    while (relations.hasNext()) {
      Relation relation = (Relation) (relations.next());
      String relationDeletes = _deletesIfNecessary(relation);

      if (relationDeletes != null) {
        moml.append(
            "<relation name=\"" + relation.getName() + "\">" + relationDeletes + "\"/></relation>");
      }
    }

    if (moml.length() > 0) {
      String momlUpdate = "<group>" + moml.toString() + "</group>";
      MoMLChangeRequest request = new MoMLChangeRequest(this, _model, momlUpdate);
      request.setUndoable(true);
      request.setPersistent(false);

      _model.requestChange(request);
    }
  }
コード例 #2
0
ファイル: PSDFScheduler.java プロジェクト: ptII/ptII
  /**
   * Return a string representation of the buffer sizes of the relations in the model. This
   * diagnostic method shows the buffer size expression for each relation along with the relation
   * itself.
   *
   * @return A string representation of the buffer sizes.
   */
  public String displayBufferSizes() {
    StringBuffer result = new StringBuffer();
    PSDFDirector director = (PSDFDirector) getContainer();
    CompositeActor model = (CompositeActor) director.getContainer();
    Iterator relations = model.relationList().iterator();

    while (relations.hasNext()) {
      Relation relation = (Relation) relations.next();
      Variable variable = (Variable) relation.getAttribute("bufferSize");
      result.append(relation.getName() + ": ");

      if (variable == null) {
        result.append("null");
      } else {
        result.append(variable.getExpression());
      }

      result.append("\n");
    }

    return result.toString();
  }
コード例 #3
0
ファイル: PSDFScheduler.java プロジェクト: ptII/ptII
  // Expand the P-APGAN-clustered graph. The schedule element that is
  // returned has an iteration count of 1. This iteration count expression
  // can be changed by the caller to iterate the schedule computed in
  // this method.
  // @param graph The graph containing the node.
  // @param node The super node to expand.
  // @param apgan The scheduler that was used to build the cluster hierarchy.
  // @return The schedule saving the expansion result.
  private SymbolicScheduleElement _expandAPGAN(
      PSDFGraph graph, ptolemy.graph.Node node, PSDFAPGANStrategy strategy) {
    PSDFGraph childGraph = (PSDFGraph) strategy.getClusterManager().getSubgraph(node);

    try {
      // Atomic node
      if (childGraph == null) {
        PSDFNodeWeight weight = (PSDFNodeWeight) node.getWeight();
        SymbolicFiring firing = new SymbolicFiring((Actor) weight.getComputation(), "1");
        return firing;

        // Super node
      } else {
        // FIXME: why call new Schedule here?
        /*Schedule schedule = */ new Schedule();

        // Expand the super node with adjacent nodes contained
        // within it.
        Edge edge = (Edge) childGraph.edges().iterator().next();
        ptolemy.graph.Node source = edge.source();
        ptolemy.graph.Node sink = edge.sink();
        SymbolicScheduleElement first = _expandAPGAN(childGraph, source, strategy);
        SymbolicScheduleElement second = _expandAPGAN(childGraph, sink, strategy);

        // Determine the iteration counts of the source and
        // sink clusters.
        String producedExpression = strategy.producedExpression(edge);
        String consumedExpression = strategy.consumedExpression(edge);

        // These errors should not occur.
        if (producedExpression == null) {
          throw new RuntimeException(
              "Internal error: null "
                  + "production rate expression. The offending edge "
                  + "follows.\n"
                  + edge);
        } else if (consumedExpression == null) {
          throw new RuntimeException(
              "Internal error: null "
                  + "consumption rate expression. The offending edge "
                  + "follows.\n"
                  + edge);
        }

        String denominator = PSDFGraphs.gcdExpression(producedExpression, consumedExpression);
        String firstIterations = "(" + consumedExpression + ") / (" + denominator + ")";
        String secondIterations = "(" + producedExpression + ") / (" + denominator + ")";

        first.setIterationCount(firstIterations);
        second.setIterationCount(secondIterations);

        SymbolicSchedule symbolicSchedule = new SymbolicSchedule("1");
        symbolicSchedule.add((ScheduleElement) first);
        symbolicSchedule.add((ScheduleElement) second);

        // Compute buffer sizes and associate them with the
        // corresponding relations.
        Iterator edges = childGraph.edges().iterator();

        while (edges.hasNext()) {
          Edge nextEdge = (Edge) edges.next();
          PSDFEdgeWeight weight = (PSDFEdgeWeight) nextEdge.getWeight();
          IOPort sourcePort = weight.getSourcePort();
          List relationList = sourcePort.linkedRelationList();

          if (relationList.size() != 1) {
            // FIXME: Need to generalize this?
            throw new RuntimeException(
                "Cannot handle relation "
                    + "lists that are not singletons.\n"
                    + "The size of this relation list is "
                    + relationList.size()
                    + "\nA dump of the offending edge follows.\n"
                    + nextEdge
                    + "\n");
          }

          Iterator relations = relationList.iterator();
          Relation relation = (Relation) relations.next();
          String produced = strategy.producedExpression(nextEdge);
          String consumed = strategy.consumedExpression(nextEdge);
          String bufferSizeExpression =
              "(("
                  + produced
                  + ") * ("
                  + consumed
                  + ")) / "
                  + PSDFGraphs.gcdExpression(produced, consumed);

          // Due to the bottom-up traversal in _expandAPGAN,
          // relations that are linked to multiple sink
          // nodes will have their buffer sizes
          // progressively replaced by those of outer
          // clusterings, and will end up with the buffer
          // size determined by the outermost clustering.
          _debug(
              "Associating buffer size expression '"
                  + bufferSizeExpression
                  + "' with relation '"
                  + relation.getName()
                  + "'\n");
          _bufferSizeMap.put(relation, bufferSizeExpression);
        }

        return symbolicSchedule;
      }
    } catch (Throwable throwable) {
      throw new KernelRuntimeException(
          throwable, "Error converting cluster hierarchy to " + "schedule.\n");
    }
  }
コード例 #4
0
ファイル: PSDFScheduler.java プロジェクト: ptII/ptII
  /**
   * Return the parameterized scheduling sequence. An exception will be thrown if the graph is not
   * schedulable.
   *
   * @return A schedule of the deeply contained opaque entities in the firing order.
   * @exception NotSchedulableException If a parameterized schedule cannot be derived for the model.
   * @exception IllegalActionException If the rate parameters of the model are not correct, or the
   *     computed rates for external ports are not correct.
   */
  @SuppressWarnings("unused")
  protected Schedule _getSchedule() throws NotSchedulableException, IllegalActionException {
    PSDFDirector director = (PSDFDirector) getContainer();
    CompositeActor model = (CompositeActor) director.getContainer();

    // Get the vectorization factor.
    String vectorizationFactorExpression = "1";

    String vectorizationName = director.vectorizationFactor.getName(model);
    vectorizationFactorExpression = vectorizationName.replaceAll("\\.", "::");

    if (vectorizationFactorExpression.indexOf(" ") != -1) {
      throw new InternalErrorException(
          "The vectorizationFactor "
              + "PSDFDirector parameter must "
              + "not have spaces in its value.  The original value "
              + "was \""
              + vectorizationName
              + "\". Try changing the name of "
              + "director.");
    }

    PSDFGraphReader graphReader = new PSDFGraphReader();
    PSDFGraph graph = (PSDFGraph) graphReader.convert(model);
    _debug("PSDF graph = \n" + graph.toString());

    if (_debugFlag) {
      graph.printEdgeRateExpressions();
    }

    PSDFAPGANStrategy strategy = new PSDFAPGANStrategy(graph);
    ptolemy.graph.sched.Schedule graphSchedule = strategy.schedule();
    _debug("P-APGAN schedule = \n" + graphSchedule.toString());

    SymbolicScheduleElement resultSchedule =
        _expandAPGAN(graph, strategy.getClusterManager().getRootNode(), strategy);
    resultSchedule.setIterationCount(vectorizationFactorExpression);

    _debug("Final schedule = \n" + resultSchedule.toString());

    if (_debugging) {
      _debug("The buffer size map:\n");

      Iterator relations = _bufferSizeMap.keySet().iterator();

      while (relations.hasNext()) {
        Relation relation = (Relation) relations.next();
        _debug(relation.getName() + ": " + _bufferSizeMap.get(relation) + "\n");
      }
    }

    _saveBufferSizes(_bufferSizeMap);

    // Crazy hack to infer firing counts for each actor.
    try {
      _inferFiringCounts(resultSchedule, null);
    } catch (NameDuplicationException ex) {
      throw new NotSchedulableException(new LinkedList(), ex, "Error recording firing counts");
    }

    // Crazy hack to Infer port production: FIXME: This should be
    // done as part of the APGAN expansion where the rates of
    // external ports are unknown The reason is that it will make
    // rate information propagate from an actor input port to
    // another actors input port that are connected on the inside
    // to the same external input port.  See
    // BaseSDFScheduler.setContainerRates.
    Iterator ports = model.portList().iterator();

    while (ports.hasNext()) {
      IOPort port = (IOPort) ports.next();

      if (_debugging && VERBOSE) {
        _debug("External Port " + port.getName());
      }

      if (port.isInput() && port.isOutput()) {
        throw new NotSchedulableException(
            port,
            "External port is both an input and an output, " + "which is not allowed in SDF.");
      } else if (port.isInput()) {
        List sinks = port.insideSinkPortList();

        if (sinks.size() > 0) {
          IOPort connectedPort = (IOPort) sinks.get(0);
          Entity entity = (Entity) connectedPort.getContainer();
          String name = connectedPort.getName(model);
          String identifier = name.replaceAll("\\.", "::");

          String sinkExpression;
          Variable sinkRateVariable =
              DFUtilities.getRateVariable(connectedPort, "tokenConsumptionRate");

          if (sinkRateVariable == null) {
            sinkExpression = "1";
          } else {
            sinkExpression = identifier + "::" + sinkRateVariable.getName();
          }

          String expression = sinkExpression + " * " + entity.getName() + "::firingsPerIteration";

          DFUtilities.setExpressionIfNotDefined(port, "tokenConsumptionRate", expression);

          if (_debugging && VERBOSE) {
            _debug("Setting tokenConsumptionRate to " + expression);
          }
        }
      } else if (port.isOutput()) {
        List sources = port.insideSourcePortList();

        if (sources.size() > 0) {
          IOPort connectedPort = (IOPort) sources.get(0);
          Entity entity = (Entity) connectedPort.getContainer();
          String name = connectedPort.getName(model);
          String identifier = name.replaceAll("\\.", "::");
          Variable sourceRateVariable =
              DFUtilities.getRateVariable(connectedPort, "tokenProductionRate");
          String sourceExpression;

          if (sourceRateVariable == null) {
            sourceExpression = "1";
          } else {
            sourceExpression = identifier + "::" + sourceRateVariable.getName();
          }

          String expression = sourceExpression + " * " + entity.getName() + "::firingsPerIteration";

          DFUtilities.setExpressionIfNotDefined(port, "tokenProductionRate", expression);

          if (_debugging && VERBOSE) {
            _debug("Setting tokenProductionRate to " + expression);
          }
        }

        // Infer init production.
        // Note that this is a very simple type of inference...
        // However, in general, we don't want to try to
        // flatten this model...
        //  Iterator connectedPorts =
        //                     port.insideSourcePortList().iterator();
        //                 IOPort foundOutputPort = null;
        //                 int inferredRate = 0;
        //                 while (connectedPorts.hasNext()) {
        //                     IOPort connectedPort = (IOPort) connectedPorts.next();
        //                     int newRate;
        //                     if (connectedPort.isOutput()) {
        //                         newRate =
        //                             DFUtilities.getTokenInitProduction(connectedPort);
        //                     } else {
        //                         newRate = 0;
        //                     }
        //                     // If we've already set the rate, then check that the
        //                     // rate for any other internal port is correct.
        //                     if (foundOutputPort != null &&
        //                             newRate != inferredRate) {
        //                         throw new NotSchedulableException(
        //                                 "External output port " + port
        //                                 + " is connected on the inside to ports "
        //                                 + "with different initial production: "
        //                                 + foundOutputPort + " and "
        //                                 + connectedPort);
        //                     }
        //                     foundOutputPort = connectedPort;
        //                     inferredRate = newRate;
        //                 }
        //                 DFUtilities._setIfNotDefined(
        //                         port, "tokenInitProduction", inferredRate);
        //                 if (_debugging && VERBOSE) {
        //                     _debug("Setting tokenInitProduction to "
        //                             + inferredRate);
        //                 }
      } else {
        throw new NotSchedulableException(
            port,
            "External port is neither an input and an output, " + "which is not allowed in SDF.");
      }
    }

    // Set the schedule to be valid.
    setValid(true);

    if (resultSchedule instanceof Schedule) {
      return (Schedule) resultSchedule;
    } else {
      // Must be ScheduleElement.
      Schedule schedule = new Schedule();
      schedule.add((ScheduleElement) resultSchedule);
      return schedule;
    }
  }