/**
   * Compute the all pair shortest path of the graph in the form of two dimensional array (matrix).
   *
   * @return The all pair shortest path matrix as a double[][] Object.
   */
  @Override
  protected Object _compute() {
    int n = graph().nodeCount();

    // Initialize shortest path matrix
    _allPairShortestPath = new double[n + 1][n][n];
    _predecessors = new int[n + 1][n][n];

    for (int i = 0; i < n; i++) {
      for (int j = 0; j < n; j++) {
        _predecessors[0][i][j] = -1;

        _allPairShortestPath[0][i][j] = Double.MAX_VALUE;
      }

      Node node = graph().node(i);
      Iterator outputEdges = ((DirectedGraph) graph()).outputEdges(node).iterator();

      while (outputEdges.hasNext()) {
        Edge edge = (Edge) outputEdges.next();
        int sinkLabel = ((DirectedGraph) graph()).nodeLabel(edge.sink());

        if (_allPairShortestPath[0][i][sinkLabel] > _edgeLengths.toDouble(edge)) {
          _allPairShortestPath[0][i][sinkLabel] = _edgeLengths.toDouble(edge);
        }

        _predecessors[0][i][sinkLabel] = i;
      }
    }

    super._compute();
    _predecessorResult = _predecessors[n];
    return _allPairShortestPath[n];
  }
Пример #2
0
  // Expand the P-APGAN-clustered graph. The schedule element that is
  // returned has an iteration count of 1. This iteration count expression
  // can be changed by the caller to iterate the schedule computed in
  // this method.
  // @param graph The graph containing the node.
  // @param node The super node to expand.
  // @param apgan The scheduler that was used to build the cluster hierarchy.
  // @return The schedule saving the expansion result.
  private SymbolicScheduleElement _expandAPGAN(
      PSDFGraph graph, ptolemy.graph.Node node, PSDFAPGANStrategy strategy) {
    PSDFGraph childGraph = (PSDFGraph) strategy.getClusterManager().getSubgraph(node);

    try {
      // Atomic node
      if (childGraph == null) {
        PSDFNodeWeight weight = (PSDFNodeWeight) node.getWeight();
        SymbolicFiring firing = new SymbolicFiring((Actor) weight.getComputation(), "1");
        return firing;

        // Super node
      } else {
        // FIXME: why call new Schedule here?
        /*Schedule schedule = */ new Schedule();

        // Expand the super node with adjacent nodes contained
        // within it.
        Edge edge = (Edge) childGraph.edges().iterator().next();
        ptolemy.graph.Node source = edge.source();
        ptolemy.graph.Node sink = edge.sink();
        SymbolicScheduleElement first = _expandAPGAN(childGraph, source, strategy);
        SymbolicScheduleElement second = _expandAPGAN(childGraph, sink, strategy);

        // Determine the iteration counts of the source and
        // sink clusters.
        String producedExpression = strategy.producedExpression(edge);
        String consumedExpression = strategy.consumedExpression(edge);

        // These errors should not occur.
        if (producedExpression == null) {
          throw new RuntimeException(
              "Internal error: null "
                  + "production rate expression. The offending edge "
                  + "follows.\n"
                  + edge);
        } else if (consumedExpression == null) {
          throw new RuntimeException(
              "Internal error: null "
                  + "consumption rate expression. The offending edge "
                  + "follows.\n"
                  + edge);
        }

        String denominator = PSDFGraphs.gcdExpression(producedExpression, consumedExpression);
        String firstIterations = "(" + consumedExpression + ") / (" + denominator + ")";
        String secondIterations = "(" + producedExpression + ") / (" + denominator + ")";

        first.setIterationCount(firstIterations);
        second.setIterationCount(secondIterations);

        SymbolicSchedule symbolicSchedule = new SymbolicSchedule("1");
        symbolicSchedule.add((ScheduleElement) first);
        symbolicSchedule.add((ScheduleElement) second);

        // Compute buffer sizes and associate them with the
        // corresponding relations.
        Iterator edges = childGraph.edges().iterator();

        while (edges.hasNext()) {
          Edge nextEdge = (Edge) edges.next();
          PSDFEdgeWeight weight = (PSDFEdgeWeight) nextEdge.getWeight();
          IOPort sourcePort = weight.getSourcePort();
          List relationList = sourcePort.linkedRelationList();

          if (relationList.size() != 1) {
            // FIXME: Need to generalize this?
            throw new RuntimeException(
                "Cannot handle relation "
                    + "lists that are not singletons.\n"
                    + "The size of this relation list is "
                    + relationList.size()
                    + "\nA dump of the offending edge follows.\n"
                    + nextEdge
                    + "\n");
          }

          Iterator relations = relationList.iterator();
          Relation relation = (Relation) relations.next();
          String produced = strategy.producedExpression(nextEdge);
          String consumed = strategy.consumedExpression(nextEdge);
          String bufferSizeExpression =
              "(("
                  + produced
                  + ") * ("
                  + consumed
                  + ")) / "
                  + PSDFGraphs.gcdExpression(produced, consumed);

          // Due to the bottom-up traversal in _expandAPGAN,
          // relations that are linked to multiple sink
          // nodes will have their buffer sizes
          // progressively replaced by those of outer
          // clusterings, and will end up with the buffer
          // size determined by the outermost clustering.
          _debug(
              "Associating buffer size expression '"
                  + bufferSizeExpression
                  + "' with relation '"
                  + relation.getName()
                  + "'\n");
          _bufferSizeMap.put(relation, bufferSizeExpression);
        }

        return symbolicSchedule;
      }
    } catch (Throwable throwable) {
      throw new KernelRuntimeException(
          throwable, "Error converting cluster hierarchy to " + "schedule.\n");
    }
  }