Esempio n. 1
0
 /** [paretNames] -> childName -> model -> params -> Object(JComponent) */
 public Value apply(Value v) {
   // extract a string[] from vector.
   Value.Vector vec = (Value.Vector) v;
   String[] parentName = new String[vec.length()];
   for (int i = 0; i < parentName.length; i++) {
     parentName[i] = ((Value.Str) vec.elt(i)).getString();
   }
   return new FN2(parentName);
 }
Esempio n. 2
0
  /**
   * Extracts the parameters for the subModels from the parameters vector returned by
   * DTOM.makeParameters(...) Basically the same as BNet.makeSubParamList(...), but for input as
   * follows - [ ( [intraslice parents], [temporal parents], (submodel,subparams) ) ]
   */
  protected static Value[] makeSubParamListDBN(Value.Vector params) {
    Value[] value = new Value[params.length()];

    for (int i = 0; i < value.length; i++) {
      Value.Structured temp = (Value.Structured) params.elt(i);
      // value[i] = ((Value.Structured)(temp.cmpnt(2))).cmpnt(1);
      value[i] =
          ((Value.Structured) (temp.cmpnt(3)))
              .cmpnt(1); // One extra component in input vs TOM params
    }
    return value;
  }
Esempio n. 3
0
  /**
   * Extracts the subModels from the parameters vector returned by DTOM.makeParameters(...).
   * Basically the same as BNet.makeSubModelList(...), but for input as follows - [ ( [intraslice
   * parents], [temporal parents], (submodel,subparams) ) ]
   */
  protected static Value.Model[] makeSubModelListDBN(Value.Vector params) {
    Value.Model[] model = new Value.Model[params.length()];

    for (int i = 0; i < model.length; i++) {
      Value.Structured temp = (Value.Structured) params.elt(i);
      // model[i] = (Value.Model)((Value.Structured)(temp.cmpnt(2))).cmpnt(0);
      model[i] =
          (Value.Model)
              ((Value.Structured) (temp.cmpnt(3)))
                  .cmpnt(0); // One extra component in input vs TOM params
    }
    return model;
  }
Esempio n. 4
0
  /** Do a quick traversal of the dTree to find the number of leaves on the tree. */
  public int findNumLeaves(Value.Structured params) {
    int splitAttribute = params.intCmpnt(0);

    int numLeaves = 0;

    // we have hit a leaf.
    if (splitAttribute == -1) {
      numLeaves = 1;
    } else {
      Value.Vector paramVector = (Value.Vector) params.cmpnt(2);
      for (int i = 0; i < paramVector.length(); i++) {
        numLeaves += findNumLeaves((Value.Structured) paramVector.elt(i));
      }
    }

    return numLeaves;
  }
Esempio n. 5
0
  /** Do a quick traversal of the dTree to find the maximum depth reached. */
  public int findTreeDepth(Value.Structured params) {
    int splitAttribute = params.intCmpnt(0);

    int maxDepth = 0;

    // we have hit a leaf.
    if (splitAttribute == -1) {
      return 0;
    } else {
      Value.Vector paramVector = (Value.Vector) params.cmpnt(2);
      for (int i = 0; i < paramVector.length(); i++) {
        int newDepth = findTreeDepth((Value.Structured) paramVector.elt(i));
        if (newDepth > maxDepth) {
          maxDepth = newDepth;
        }
      }
    }
    return maxDepth + 1;
  }
Esempio n. 6
0
  /** Set the current parameters to a given value. */
  public void updateParams(Value newParams) {
    if (this.params == newParams) return;

    this.params = newParams;
    Value.Structured params = (Value.Structured) this.params; // save us doing repeated typecasts.
    int numLeaves = findNumLeaves(params);
    int depth = findTreeDepth(params) + 1;

    setLayout(new GridLayout(numLeaves, depth));

    int currentDepth = 0; // Keep track of how deep into the tree we are
    int currentLeaf = 0; // Keep track of how many leaf noded have been placed so far.
    Component[][] component = new Component[depth][numLeaves];

    // Use a stack to keep track of parameters not yet drawn (could also be done recursively.)
    java.util.Stack paramStack = new java.util.Stack();
    paramStack.add(params);
    while (true) {
      Value.Structured currentParams = (Value.Structured) paramStack.pop();
      int splitAttribute = currentParams.intCmpnt(0);

      if (splitAttribute == -1) {
        Value.Structured subModelParams = (Value.Structured) currentParams.cmpnt(2);
        Value.Model subModel = (Value.Model) subModelParams.cmpnt(0);
        Value subParams = subModelParams.cmpnt(1);
        Component leaf = DualVisualiser.makeComponent(varName, parentName, subModel, subParams);

        component[currentDepth][currentLeaf] = leaf;

        currentLeaf++;

        if (currentLeaf < numLeaves) {
          while (component[currentDepth - 1][currentLeaf] == null) {
            currentDepth--;
          }
        }

      } else {
        Value.Vector paramVector = (Value.Vector) currentParams.cmpnt(2);
        for (int i = 0; i < paramVector.length(); i++) {
          Value.Structured elt = (Value.Structured) paramVector.elt(paramVector.length() - i - 1);
          paramStack.push(elt);
        }

        int x = currentLeaf;
        for (int value = 0; value < paramVector.length(); value++) {
          Value.Structured elt = (Value.Structured) paramVector.elt(value);
          int subLeaves = findNumLeaves(elt);
          Color colour = getColour(value);
          for (int j = 0; j < subLeaves; j++) {
            if (component[currentDepth][x] != null) {
              throw new RuntimeException(
                  "SHouldn't be overwriting! [" + currentDepth + "," + x + "]");
            }
            if (j == 0)
              component[currentDepth][x] =
                  new JLabel(currentParams.cmpnt(0).toString() + " = " + value);
            else component[currentDepth][x] = new JLabel("");
            component[currentDepth][x].setBackground(colour);
            ((JComponent) component[currentDepth][x]).setOpaque(true);
            x++;
          }
        }

        currentDepth++;
      }
      if (currentLeaf == numLeaves) break;
    }

    for (int i = 0; i < numLeaves; i++) {
      for (int j = 0; j < depth; j++) {
        //         if ( component[j][i] == null ) component[j][i] = new JLabel("("+j+","+i+")");
        if (component[j][i] == null) component[j][i] = new JLabel("");
        this.add(component[j][i]);
      }
    }
  }
Esempio n. 7
0
  /** Set the structure of the given tom to mimic the 2d vector [[]] passed in structure. */
  public static void setTOMStructure(TOM tom, Value.Vector structure) {

    // Extract vector into arc matrix.
    boolean[][] arc = new boolean[structure.length()][structure.length()];
    for (int i = 0; i < arc.length; i++) {
      for (int j = 0; j < arc.length; j++) {
        Value v = ((Value.Vector) structure.elt(i)).elt(j);
        arc[i][j] = (((Value.Discrete) v).getDiscrete() != 0);
      }
    }

    // Attempt to find a consistent total ordering
    // Has the current variable been placed in ordering?
    boolean[] used = new boolean[arc.length];
    int[] order = new int[arc.length];
    int numPlaced = 0;

    // Perform a topological sort on the DAG
    while (numPlaced < arc.length) {
      int changes = 0;
      for (int i = 0; i < arc.length; i++) {
        if (used[i] == false) { // if var[i] not placed yet
          for (int j = 0; j < arc.length; j++) {
            // if arc j->i exists and j has not been placed, we canot place i.
            if ((i != j) && arc[i][j] && !used[j]) {
              break;
            }
            // If all Js have been tested with no problems, we can add i
            if (j == arc.length - 1) {
              order[numPlaced] = i;
              used[i] = true;
              numPlaced++;
              changes++;
            }
          }
        }
      }
      // If no changes are made the links specified are not consistent with a DAG structure.
      if (changes == 0) {
        throw new RuntimeException("Inconsistent network in setTOMStructure");
      }
    }

    // Remove all arcs in original network
    for (int i = 0; i < arc.length; i++) {
      for (int j = 0; j < arc.length; j++) {
        if ((i != j) && tom.isArc(i, j)) {
          tom.removeArc(i, j);
        }
      }
    }

    // Initialise order
    for (int i = 0; i < order.length; i++) {
      tom.swapOrder(tom.nodeAt(i), order[i], true);
    }

    // Add arcs as required.
    for (int i = 0; i < arc.length; i++) {
      for (int j = 0; j < arc.length; j++) {
        if ((i != j) && arc[i][j]) {
          tom.addArc(i, j);
        }
      }
    }
  }
Esempio n. 8
0
  /**
   * Returns a time series (a vector) sampled from the DBN (passed as a DTOM) Based on
   * BNetStochastic.generate(...). Used (perhaps amongst other things) during the calculation of
   * (approximate) KL divergence between models
   *
   * @param rand RNG to use
   * @param dtom DTOM, parameters of which to generate data from (must have caseInfo set in DTOM)
   * @param n length of time series to generate from parameters
   * @return Time series sampled/generated from the DTOM
   */
  public static Value.Vector generateTimeSeriesDTOM(Random rand, DTOM dtom, int n) {
    // Get the variable names and number of nodes; also get the ordering of the nodes (used for
    // forward sampling)
    int numNodes = dtom.getNumNodes();
    String[] name =
        ((cdms.core.Type.Structured) ((cdms.core.Type.Vector) (dtom.caseInfo.data).t).elt).labels;
    int[] order = dtom.getTotalOrderCopy();

    // Initialize the vectors and structures etc for storing the time series data: (Value.Vector)
    Value.Vector origData = dtom.caseInfo.data;
    Type.Structured inputTypes =
        (Type.Structured)
            ((Type.Vector) origData.t).elt; // Type info for generated data is same as original data

    // Store the assignments in an integer array:
    int[][] newData = new int[numNodes][n];

    // Get the parameters for each node (parameters for first time slice, and second time slice)
    Value.Vector[] paramsT0 = new Value.Vector[numNodes];
    Value.Vector[] paramsT1 = new Value.Vector[numNodes];
    for (int i = 0; i < numNodes; i++) {
      DNode node = (DNode) dtom.getNode(i);
      try {
        // Learn parameters - T1 (inc. temporal arcs)
        Value.Structured model = node.learnModel(dtom.caseInfo.mmlModelLearner, dtom.caseInfo.data);
        paramsT1[i] = (Value.Vector) model.cmpnt(2);

        // Learn parameters - T0 (no temporal arcs)
        Value.Structured modelT0 =
            node.learnModelT0(dtom.caseInfo.mmlModelLearner, dtom.caseInfo.data);
        paramsT0[i] = (Value.Vector) modelT0.cmpnt(2);
      } catch (LearnerException e) {
        throw new RuntimeException("Error learning models. " + e);
      }
    }

    // Determine the arity of each node: (Using a very inelegant method...)
    final Type.Structured datatype = (Type.Structured) ((Type.Vector) (dtom.caseInfo.data).t).elt;
    int[] arity = new int[numNodes];
    for (int i = 0; i < numNodes; i++) {
      Type.Symbolic sType = (Type.Symbolic) datatype.cmpnts[i];
      arity[i] = NeticaFn.makeValidNeticaNames(sType.ids, true).length;
    }

    // Generate a set of assignments for the FIRST time slice
    // (This needs to be done in order, to avoid sampling children before parents...)
    int[] assignmentT0 = new int[numNodes];
    for (int i = 0; i < numNodes; i++) {
      DNode currNode = (DNode) dtom.getNode(order[i]); // ith node in total order
      Value.Vector currParams =
          paramsT0[order[i]].cmpnt(1); // parameters for ith node in total order

      // Get the appropriate distribution to sample from (given values of parents)
      Structured vals;
      if (currNode.getNumParents() == 0) { // No parents
        vals =
            (Value.Structured)
                currParams.elt(
                    0); // Contains the actual probability values; only one element in structure if
                        // no parents...
      } else { // This node has parents (which already have assigned values)
        // Need to work out the index of the relevent parameters given the assignments of parents
        // Parameters are in order of [0,0,0], [0,0,1], [0,0,2], ..., [A,B,C]
        // Index given by: sum_x( val[pa[x]]*prod( arity[pa[x+1...end]] )

        int[] currParents = currNode.getParentCopy(); // Intraslice parents

        // Collect assignments and arity for the current parents
        int[] assignment = new int[currParents.length];
        int[] ar = new int[currParents.length];
        for (int z = 0; z < currParents.length; z++) {
          assignment[z] = assignmentT0[currParents[z]];
          ar[z] = arity[currParents[z]];
        }
        int index = assignmentToIndexReverse(assignment, ar);

        // Find the set of parameters for the current parent assignment:
        vals =
            (Value.Structured)
                currParams.elt(
                    index); // Contains the actual probability values for the current assignment of
                            // parents
      }

      // Now, sample a value according to the probability distribution:
      double rv = rand.nextDouble(); // Random value b/w 0 and 1
      double cumProb = 0.0;
      for (int idx = 0; idx < arity[order[i]]; idx++) { // i.e. loop through each value
        cumProb += vals.doubleCmpnt(idx);
        if (rv < cumProb) { // Assignment to node[ order[i] ] is idx
          assignmentT0[order[i]] = idx;
          break;
        }
      }
    }

    // Generate data from SECOND time slice CPDs - repeatedly...
    int[] assignmentT1 = new int[numNodes];
    for (int lineNum = 0; lineNum < n; lineNum++) {
      // First: record the first time slice assignemnts.
      // Then: copy the second time slice assignments to the first time slice assignments
      if (lineNum > 0) {
        // System.out.println("Assignments line " + (lineNum-1) + " - " +
        // Arrays.toString(assignmentT0) );
        for (int j = 0; j < numNodes; j++) { // j is variable number
          newData[j][lineNum - 1] = assignmentT0[j];
        }

        assignmentT0 = assignmentT1;
        assignmentT1 = new int[numNodes];
      }

      // Now, generate data for second time slice given values of first time slice:
      for (int i = 0; i < numNodes; i++) {
        DNode currNode = (DNode) dtom.getNode(order[i]); // ith node in total order
        Value.Vector currParams =
            paramsT1[order[i]].cmpnt(1); // parameters for ith node in total order

        // Get the appropriate distribution to sample from (given values of parents)
        Structured vals;
        if (currNode.getNumParents() == 0) { // No parents
          vals =
              (Value.Structured)
                  currParams.elt(
                      0); // Contains the actual probability values; only one element in structure
                          // if no parents...
        } else { // This node has parents (which already have assigned values)
          // Need to work out the index of the relevent parameters given the assignments of parents
          // Parameters are in order of [0,0,0], [0,0,1], [0,0,2], ..., [A,B,C]
          // Index given by: sum_x( val[pa[x]]*prod( arity[pa[x+1...end]] )
          // Complicated by the fact that we have temporal parents and intraslice parents...
          // [intraslice_parents, temporal_parents]

          int[] currParents = currNode.getParentCopy(); // Intraslice parents
          int[] currParentsTemporal =
              currNode.getTemporalParentCopy(); // Temporal (interslice) parents

          // Collect the parent assignments and arities
          int numParents = currParents.length + currParentsTemporal.length;
          int[] assignment = new int[numParents];
          int[] ar = new int[numParents];
          for (int z = 0; z < numParents; z++) {
            if (z < currParents.length) { // Dealing with intraslice parents
              assignment[z] = assignmentT1[currParents[z]];
              ar[z] = arity[currParents[z]];
            } else { // Dealing with interslice (t0) parents
              assignment[z] = assignmentT0[currParentsTemporal[z - currParents.length]];
              ar[z] = arity[currParentsTemporal[z - currParents.length]];
            }
          }
          int index = assignmentToIndexReverse(assignment, ar);

          // Find the set of parameters for the current parent assignment:
          vals =
              (Value.Structured)
                  currParams.elt(
                      index); // Contains the actual probability values for the current assignment
                              // of parents
        }

        // Now, sample a value according to the probability distribution:
        double rv = rand.nextDouble(); // Random value b/w 0 and 1
        double cumProb = 0.0;
        for (int idx = 0; idx < arity[order[i]]; idx++) { // i.e. loop through each value
          cumProb += vals.doubleCmpnt(idx);
          if (rv < cumProb) { // Assignment to node[ order[i] ] is idx
            assignmentT1[order[i]] = idx;
            break;
          }
        }
      }
    }

    // Copy the very last line of data:
    for (int j = 0; j < numNodes; j++) {
      newData[j][n - 1] = assignmentT1[j];
    }

    // Now, combine type and value (i.e. assignments) together for each variable:
    Value.Vector[] vecArray = new Value.Vector[numNodes];
    for (int i = 0; i < numNodes; i++) {
      vecArray[i] =
          new VectorFN.FastDiscreteVector(newData[i], (Type.Symbolic) inputTypes.cmpnts[i]);
    }
    // And create the overall data structure:
    Value.Structured vecStruct = new Value.DefStructured(vecArray, name);
    Value.Vector newDataVector = new VectorFN.MultiCol(vecStruct);

    // Return the new time series vector...
    return newDataVector;
  }