/**
   * Compute dimension related information using slicing and redef info. In effect, this is where
   * projection constraints are applied
   *
   * <p>Assume that the constraint compiler has given us the following info:
   *
   * <ol>
   *   <li>A list of the variables to include.
   *   <li>A pair (DapDimension,Slice) for each redef
   *   <li>For each variable in #1, a list of slices taken from the constraint expression
   * </ol>
   *
   * <p>Two products will be produced.
   *
   * <ol>
   *   <li>The variables map will be modified so that the slices properly reflect any original or
   *       redef dimensions.
   *   <li>A set, dimrefs, of all referenced original dimensions.
   * </ol>
   *
   * <p>The processing is as follows
   *
   * <ol>
   *   <li>For each redef create a new redef dimension
   *   <li>For each variable:
   *       <ol>
   *         <li>if the variable is scalar, do nothing.
   *         <li>if the variable has no associated slices, then make its new dimensions be the
   *             original dimensions.
   *         <li>otherwise, walk the slices and create new dimensions from them; use redefs where
   *             indicated
   *         <li>
   *       </ol>
   * </ol>
   */
  protected void computedimensions() throws DapException {
    // Build the redefmap
    for (DapDimension key : redefslice.keySet()) {
      Slice slice = redefslice.get(key);
      DapDimension newdim = (DapDimension) key.clone();
      newdim.setSize(slice.getCount());
      redef.put(key, newdim);
    }

    // Process each variable
    for (int i = 0; i < segments.size(); i++) {
      Segment seg = segments.get(i);
      if (seg.var.getRank() == 0) continue;
      List<Slice> slices = seg.slices;
      List<DapDimension> orig = seg.var.getDimensions();
      List<DapDimension> newdims = new ArrayList<>();
      // If the slice list is short then pad it with
      // default slices
      if (slices == null) slices = new ArrayList<Slice>();
      while (slices.size() < orig.size()) // pad
      {
        slices.add(new Slice().setConstrained(false));
      }
      assert (slices != null && slices.size() == orig.size());
      for (int j = 0; j < slices.size(); j++) {
        Slice slice = slices.get(j);
        DapDimension dim0 = orig.get(j);
        DapDimension newdim = redef.get(dim0);
        if (newdim == null) newdim = dim0;
        // fill in the undefined last value
        slice.setMaxSize(newdim.getSize());
        slice.finish();

        Slice newslice = null;
        if (slice.isConstrained()) {
          // Construct an anonymous dimension for this slice
          newdim = new DapDimension(slice.getCount());
        } else { // replace with a new slice from the dim
          newslice = new Slice(newdim);
          if (newslice != null) {
            // track set of referenced non-anonymous dimensions
            if (!dimrefs.contains(dim0)) dimrefs.add(dim0);
            slices.set(j, newslice);
          }
        }
        // record the dimension per variable
        newdims.add(newdim);
      }
      seg.setDimset(newdims);
    }
  }
 void setSlices(List<Slice> slices) throws DapException {
   this.slices = slices;
   // Make sure they are finished
   for (Slice sl : slices) sl.finish();
 }
  private void flattenInternal(FlatNode top) {
    Iterator<FlatNode> dataFlow = DataFlowTraversal.getTraversal(top).iterator();

    while (dataFlow.hasNext()) {
      FlatNode node = dataFlow.next();
      System.out.println(node);
      InputSliceNode input = sliceNodes.inputNodes.get(node.contents);
      OutputSliceNode output = sliceNodes.outputNodes.get(node.contents);
      FilterSliceNode filterNode = sliceNodes.filterNodes.get(node.contents);

      assert input != null && output != null && filterNode != null;

      // set up the slice
      Slice slice = new Slice(input);
      input.setNext(filterNode);
      filterNode.setPrevious(input);
      filterNode.setNext(output);
      output.setPrevious(filterNode);
      input.setParent(slice);
      output.setParent(slice);
      filterNode.setParent(slice);

      System.out.println("  outputs: " + node.ways);
      if (node.ways != 0) {
        assert node.ways == node.getEdges().length && node.ways == node.weights.length;

        // set up the i/o arcs
        // set up the splitting...
        LinkedList<InterSliceEdge> outEdges = new LinkedList<InterSliceEdge>();
        LinkedList<Integer> outWeights = new LinkedList<Integer>();
        HashMap<InputSliceNode, InterSliceEdge> newEdges =
            new HashMap<InputSliceNode, InterSliceEdge>();
        for (int i = 0; i < node.ways; i++) {
          if (node.weights[i] == 0) continue;
          InterSliceEdge edge =
              new InterSliceEdge(output, sliceNodes.inputNodes.get(node.getEdges()[i].contents));
          newEdges.put(sliceNodes.inputNodes.get(node.getEdges()[i].contents), edge);
          outEdges.add(edge);
          outWeights.add(node.weights[i]);
        }
        edges.put(output, newEdges);

        LinkedList<LinkedList<InterSliceEdge>> translatedEdges =
            new LinkedList<LinkedList<InterSliceEdge>>();
        if (node.isDuplicateSplitter()) {
          outWeights = new LinkedList<Integer>();
          outWeights.add(new Integer(1));
          translatedEdges.add(outEdges);
        } else {
          for (int i = 0; i < outEdges.size(); i++) {
            LinkedList<InterSliceEdge> link = new LinkedList<InterSliceEdge>();
            link.add(outEdges.get(i));
            translatedEdges.add(link);
          }
        }

        output.set(outWeights, translatedEdges);
      } else {
        // no outputs
        output.setWeights(new int[0]);
        output.setDests(new InterSliceEdge[0][0]);
      }

      if (node.isFilter()) {
        if (node.getFilter().getPushInt() == 0) {
          output.setWeights(new int[0]);
          output.setDests(new InterSliceEdge[0][0]);
        }
      }

      // set up the joining, the edges should exist already from upstream
      System.out.println("  inputs: " + node.inputs);
      if (node.inputs != 0) {
        assert node.inputs == node.incoming.length && node.inputs == node.incomingWeights.length;

        LinkedList<Integer> inWeights = new LinkedList<Integer>();
        LinkedList<InterSliceEdge> inEdges = new LinkedList<InterSliceEdge>();
        for (int i = 0; i < node.inputs; i++) {
          if (node.incomingWeights[i] == 0) continue;
          inEdges.add(edges.get(sliceNodes.outputNodes.get(node.incoming[i].contents)).get(input));
          inWeights.add(node.incomingWeights[i]);
        }
        input.set(inWeights, inEdges);
      } else {
        input.setWeights(new int[0]);
        input.setSources(new InterSliceEdge[0]);
      }

      if (node.isFilter() && node.getFilter().getPopInt() == 0) {
        input.setWeights(new int[0]);
        input.setSources(new InterSliceEdge[0]);
      }

      // set up the work hashmaps
      int workEst = 0;
      if (sliceNodes.generatedIds.contains(filterNode)) {
        workEst = 3 * filterNode.getFilter().getSteadyMult();
      } else {
        assert node.isFilter();
        workEst = work.getWork((SIRFilter) node.contents);
      }
      bottleNeckFilter.put(slice, filterNode);
      sliceBNWork.put(slice, workEst);
      workEstimation.put(filterNode.getFilter(), workEst);

      slice.finish();

      if (node.contents instanceof SIRFileReader || node.contents instanceof SIRFileWriter) {
        System.out.println("Found io " + node.contents);
        ioList.add(slice);
      }

      if (topSlice == null) topSlice = slice;
      sliceList.add(slice);
    }
  }