Exemple #1
0
  private void writeNode(Tree tree, NodeRef node, boolean attributes, Map<String, Integer> idMap) {
    if (tree.isExternal(node)) {
      int k = node.getNumber() + 1;
      if (idMap != null) k = idMap.get(tree.getTaxonId(k - 1));

      out.print(k);
    } else {
      out.print("(");
      writeNode(tree, tree.getChild(node, 0), attributes, idMap);
      for (int i = 1; i < tree.getChildCount(node); i++) {
        out.print(",");
        writeNode(tree, tree.getChild(node, i), attributes, idMap);
      }
      out.print(")");
    }

    if (writeAttributesAs == AttributeType.BRANCH_ATTRIBUTES && !tree.isRoot(node)) {
      out.print(":");
    }

    if (attributes) {
      Iterator<?> iter = tree.getNodeAttributeNames(node);
      if (iter != null) {
        boolean first = true;
        while (iter.hasNext()) {
          if (first) {
            out.print("[&");
            first = false;
          } else {
            out.print(",");
          }
          String name = (String) iter.next();
          out.print(name + "=");
          Object value = tree.getNodeAttribute(node, name);
          printValue(value);
        }
        out.print("]");
      }
    }

    if (writeAttributesAs == AttributeType.NODE_ATTRIBUTES && !tree.isRoot(node)) {
      out.print(":");
    }

    if (!tree.isRoot(node)) {
      double length = tree.getBranchLength(node);
      if (formatter != null) {
        out.print(formatter.format(length));
      } else {
        out.print(length);
      }
    }
  }
 @Override
 public List<Integer> getChildren(int i) {
   NodeRef n = tree.getNode(i);
   int childCount = tree.getChildCount(n);
   List<Integer> children = new ArrayList<Integer>(childCount);
   for (int j = 0; j < childCount; ++j) children.add(tree.getChild(n, j).getNumber());
   return children;
 }
 @Override
 public int getSibling(int i) {
   NodeRef n = tree.getNode(i);
   if (tree.isRoot(n)) return RootedTree.NULL;
   NodeRef p = tree.getParent(n);
   int c1 = tree.getChild(p, 0).getNumber();
   int c2 = tree.getChild(p, 1).getNumber();
   return n.getNumber() == c2 ? c1 : c2;
 }
  private static int mauCanonicalSub(
      Tree tree, NodeRef node, int loc, NodeRef[] order, boolean[] wasSwaped) {
    if (tree.isExternal(node)) {
      order[loc] = node;
      assert (loc & 0x1) == 0;
      return loc + 1;
    }

    final boolean swap = MathUtils.nextBoolean();
    // wasSwaped[(loc-1)/2] = swap;

    int l = mauCanonicalSub(tree, tree.getChild(node, swap ? 1 : 0), loc, order, wasSwaped);

    order[l] = node;
    assert (l & 0x1) == 1;
    wasSwaped[(l - 1) / 2] = swap;

    l = mauCanonicalSub(tree, tree.getChild(node, swap ? 0 : 1), l + 1, order, wasSwaped);
    return l;
  }
  private double getUniqueBranches(Tree tree, NodeRef node) {

    if (tree.isExternal(node)) {
      return tree.getBranchLength(node);
    } else {
      double length = 0;
      if (isUnique(taxonList, tree, node)) {
        length = tree.getBranchLength(node);
        // System.out.println("length = " + length);
      }
      for (int i = 0; i < tree.getChildCount(node); i++) {
        length += getUniqueBranches(tree, tree.getChild(node, i));
      }
      // System.out.println("length of node " + node + " = " + length);
      return length;
    }
  }
 private void getDescendants(NodeRef n, List<Integer> descendants) {
   descendants.add(n.getNumber());
   if (tree.isExternal(n)) return;
   for (int i = 0; i < tree.getChildCount(n); ++i)
     getDescendants(tree.getChild(n, i), descendants);
 }
 @Override
 public int getRightChild(int i) {
   return tree.getChild(tree.getNode(i), 1).getNumber();
 }
 @Override
 public int getLeftChild(int i) {
   return tree.getChild(tree.getNode(i), 0).getNumber();
 }
  /**
   * Traverse the tree calculating partial likelihoods.
   *
   * @param tree tree
   * @param node node
   * @param operatorNumber operatorNumber
   * @param flip flip
   * @return boolean
   */
  private boolean traverse(Tree tree, NodeRef node, int[] operatorNumber, boolean flip) {

    boolean update = false;

    int nodeNum = node.getNumber();

    NodeRef parent = tree.getParent(node);

    if (operatorNumber != null) {
      operatorNumber[0] = -1;
    }

    // First update the transition probability matrix(ices) for this branch
    if (parent != null && updateNode[nodeNum]) {

      final double branchRate = branchRateModel.getBranchRate(tree, node);

      final double parentHeight = tree.getNodeHeight(parent);
      final double nodeHeight = tree.getNodeHeight(node);

      // Get the operational time of the branch
      final double branchLength = branchRate * (parentHeight - nodeHeight);
      if (branchLength < 0.0) {
        throw new RuntimeException("Negative branch length: " + branchLength);
      }

      if (flip) {
        substitutionModelDelegate.flipMatrixBuffer(nodeNum);
      }
      branchUpdateIndices[branchUpdateCount] = nodeNum;
      branchLengths[branchUpdateCount] = branchLength;
      branchUpdateCount++;

      update = true;
    }

    // If the node is internal, update the partial likelihoods.
    if (!tree.isExternal(node)) {

      // Traverse down the two child nodes
      NodeRef child1 = tree.getChild(node, 0);
      final int[] op1 = {-1};
      final boolean update1 = traverse(tree, child1, op1, flip);

      NodeRef child2 = tree.getChild(node, 1);
      final int[] op2 = {-1};
      final boolean update2 = traverse(tree, child2, op2, flip);

      // If either child node was updated then update this node too
      if (update1 || update2) {

        int x = operationCount[operationListCount] * Beagle.OPERATION_TUPLE_SIZE;

        if (flip) {
          // first flip the partialBufferHelper
          partialBufferHelper.flipOffset(nodeNum);
        }

        final int[] operations = this.operations[operationListCount];

        operations[x] = partialBufferHelper.getOffsetIndex(nodeNum);

        if (useScaleFactors) {
          // get the index of this scaling buffer
          int n = nodeNum - tipCount;

          if (recomputeScaleFactors) {
            // flip the indicator: can take either n or (internalNodeCount + 1) - n
            scaleBufferHelper.flipOffset(n);

            // store the index
            scaleBufferIndices[n] = scaleBufferHelper.getOffsetIndex(n);

            operations[x + 1] = scaleBufferIndices[n]; // Write new scaleFactor
            operations[x + 2] = Beagle.NONE;

          } else {
            operations[x + 1] = Beagle.NONE;
            operations[x + 2] = scaleBufferIndices[n]; // Read existing scaleFactor
          }

        } else {

          if (useAutoScaling) {
            scaleBufferIndices[nodeNum - tipCount] = partialBufferHelper.getOffsetIndex(nodeNum);
          }
          operations[x + 1] = Beagle.NONE; // Not using scaleFactors
          operations[x + 2] = Beagle.NONE;
        }

        operations[x + 3] = partialBufferHelper.getOffsetIndex(child1.getNumber()); // source node 1
        operations[x + 4] =
            substitutionModelDelegate.getMatrixIndex(child1.getNumber()); // source matrix 1
        operations[x + 5] = partialBufferHelper.getOffsetIndex(child2.getNumber()); // source node 2
        operations[x + 6] =
            substitutionModelDelegate.getMatrixIndex(child2.getNumber()); // source matrix 2

        operationCount[operationListCount]++;

        update = true;

        if (hasRestrictedPartials) {
          // Test if this set of partials should be restricted
          if (updateRestrictedNodePartials) {
            // Recompute map
            computeNodeToRestrictionMap();
            updateRestrictedNodePartials = false;
          }
          if (partialsMap[nodeNum] != null) {}
        }
      }
    }

    return update;
  }
  /**
   * Traverse the tree calculating partial likelihoods.
   *
   * @return whether the partials for this node were recalculated.
   */
  protected boolean traverse(Tree tree, NodeRef node) {

    boolean update = false;
    boolean rootUpdated = false;

    int nodeNum = node.getNumber();

    NodeRef parent = tree.getParent(node);

    // First update the transition probability matrix(ices) for this branch if it is a normal branch
    if (parent != null && updateNode[nodeNum]) {

      final double branchRate = branchRateModel.getBranchRate(tree, node);

      // Get the operational time of the branch
      final double branchTime =
          branchRate * (tree.getNodeHeight(parent) - tree.getNodeHeight(node));

      if (branchTime < 0.0) {
        throw new RuntimeException("Negative branch length: " + branchTime);
      }

      cenancestorlikelihoodCore.setNodeMatrixForUpdate(nodeNum);

      for (int i = 0; i < categoryCount; i++) {

        double branchLength = siteModel.getRateForCategory(i) * branchTime;
        siteModel.getSubstitutionModel().getTransitionProbabilities(branchLength, probabilities);
        cenancestorlikelihoodCore.setNodeMatrix(nodeNum, i, probabilities);
      }

      update = true;
    } else if (parent == null
        && cenancestorHeight != null
        && updateNode[nodeNum]) // The root has to be updated
    {
      // First update the transition probability matrix(ices) for the root-cenancestor fake branch
      rootUpdated = true;
      // Get the operational time of the fake branch from the root to the cenancestor
      double rootHeight = treeModel.getNodeHeight(treeModel.getRoot());
      double branchRate =
          branchRateModel.getBranchRate(
              rootHeight,
              getCenancestorHeight()); // TODO: Could this be easily improved? I would do to adapt
                                       // the tree structure and abstact tree likelihood
      double branchTime =
          branchRate
              * getCenancestorBranch(); // TODO: Could this be easily improved? The same as before

      for (int i = 0; i < categoryCount; i++) {
        double branchLength = siteModel.getRateForCategory(i) * branchTime;
        siteModel.getSubstitutionModel().getTransitionProbabilities(branchLength, probabilities);
        cenancestorlikelihoodCore.setNodeMatrix(nodeNum, i, probabilities);
      }
    }

    // If the node is internal, update the partial likelihoods.
    if (!tree.isExternal(node)) {

      // Traverse down the two child nodes
      NodeRef child1 = tree.getChild(node, 0);
      final boolean update1 = traverse(tree, child1);

      NodeRef child2 = tree.getChild(node, 1);
      final boolean update2 = traverse(tree, child2);

      // If either child node was updated then update this node too
      if (update1 || update2 || rootUpdated) {

        if (update1 || update2) {
          final int childNum1 = child1.getNumber();
          final int childNum2 = child2.getNumber();

          cenancestorlikelihoodCore.setNodePartialsForUpdate(nodeNum);

          if (integrateAcrossCategories) {
            cenancestorlikelihoodCore.calculatePartials(childNum1, childNum2, nodeNum);
          } else {
            cenancestorlikelihoodCore.calculatePartials(
                childNum1, childNum2, nodeNum, siteCategories);
          }

          if (COUNT_TOTAL_OPERATIONS) {
            totalOperationCount++;
          }
        }

        if (parent == null) {
          // No parent this is the root of the tree

          double[] partials;
          int nodeNumCenan = getCenancestorIndex();

          if (cenancestorHeight != null) {
            if (rootUpdated) {
              // Calculate the partials at the cenancestor. The transition matrix of the root was
              // calculated before.
              cenancestorlikelihoodCore.setNodePartialsForUpdate(nodeNumCenan);

              if (integrateAcrossCategories) {
                cenancestorlikelihoodCore.calculatePartials(nodeNum, nodeNumCenan);
              } else {
                cenancestorlikelihoodCore.calculatePartials(nodeNum, nodeNumCenan, siteCategories);
              }
            }

            partials = getCenancestorPartials();
          } else { // Using the cenancestor model without cenancestor date. It assumes that the root
                   // of the tree is the cenancestor. Not tested. It shouldn't be normally used
                   // either.
            partials = getRootPartials();
          }

          // calculate the pattern likelihoods
          double[] frequencies = frequencyModel.getFrequencies();
          cenancestorlikelihoodCore.calculateLogLikelihoods(
              partials, frequencies, patternLogLikelihoods);
        }

        update = true;
      }
    }

    return update;
  }