/** * @param tree * @param node * @return and array of the total amount of time spent in each of the discrete states along the * branch above the given node. */ private double[] getProcessValues(final Tree tree, final NodeRef node) { double[] processValues = null; double branchTime = tree.getBranchLength(node); if (mode == Mode.MARKOV_JUMP_PROCESS) { processValues = (double[]) trait.getTrait(tree, node); } else if (mode == Mode.PARSIMONY) { // an approximation to dwell times using parsimony, assuming // the state changes midpoint on the tree. Does a weighted // average of the equally parsimonious state reconstructions // at the top and bottom of each branch. if (treeChanged) { fitchParsimony.initialize(tree); // Debugging test to count work // treeInitializeCounter += 1; // if (treeInitializeCounter % 10 == 0) { // System.err.println("Cnt: "+treeInitializeCounter); // } treeChanged = false; } int[] states = fitchParsimony.getStates(tree, node); int[] parentStates = fitchParsimony.getStates(tree, tree.getParent(node)); processValues = new double[fitchParsimony.getPatterns().getStateCount()]; for (int state : states) { processValues[state] += branchTime / 2; } for (int state : parentStates) { processValues[state] += branchTime / 2; } for (int i = 0; i < processValues.length; i++) { // normalize by the number of equally parsimonious states at each end of the branch // processValues should add up to the total branch length processValues[i] /= (states.length + parentStates.length) / 2; } } else if (mode == Mode.NODE_STATES) { processValues = new double[dataType.getStateCount()]; // if (indicatorParameter != null) { // // this array should be size #states NOT #rates // processValues = new double[indicatorParameter.getDimension()]; // } else { // // this array should be size #states NOT #rates // processValues = new double[rateParameter.getDimension()]; // } // if the states are being sampled - then there is only one possible state at each // end of the branch. int state = ((int[]) trait.getTrait(tree, node))[traitIndex]; processValues[state] += branchTime / 2; int parentState = ((int[]) trait.getTrait(tree, tree.getParent(node)))[traitIndex]; processValues[parentState] += branchTime / 2; } return processValues; }
@Override public List<Integer> getAncestors(int i, boolean b) { List<Integer> ancestors = new ArrayList<Integer>(); if (b) ancestors.add(i); for (NodeRef n = tree.getParent(tree.getNode(i)); n != null; n = tree.getParent(n)) ancestors.add(n.getNumber()); return ancestors; }
@Override public int getSibling(int i) { NodeRef n = tree.getNode(i); if (tree.isRoot(n)) return RootedTree.NULL; NodeRef p = tree.getParent(n); int c1 = tree.getChild(p, 0).getNumber(); int c2 = tree.getChild(p, 1).getNumber(); return n.getNumber() == c2 ? c1 : c2; }
public void getTransitionProbabilities( Tree tree, NodeRef node, int rateCategory, double[] matrix) { NodeRef parent = tree.getParent(node); final double branchRate = branchModel.getBranchRate(tree, node); // Get the operational time of the branch final double startTime = tree.getNodeHeight(parent); final double endTime = tree.getNodeHeight(node); final double branchTime = branchRate * (startTime - endTime); if (branchTime < 0.0) { throw new RuntimeException("Negative branch length: " + branchTime); } double distance = siteModel.getRateForCategory(rateCategory) * branchTime; int matrixCount = 0; boolean oneMatrix = (getEpochWeights(startTime, endTime, weight) == 1); for (int m = 0; m < numberModels; m++) { if (weight[m] > 0) { SubstitutionModel model = modelList.get(m); if (matrixCount == 0) { if (oneMatrix) { model.getTransitionProbabilities(distance, matrix); break; } else model.getTransitionProbabilities(distance * weight[m], resultMatrix); matrixCount++; } else { model.getTransitionProbabilities(distance * weight[m], stepMatrix); // Sum over unobserved state int index = 0; for (int i = 0; i < stateCount; i++) { for (int j = 0; j < stateCount; j++) { productMatrix[index] = 0; for (int k = 0; k < stateCount; k++) { productMatrix[index] += resultMatrix[i * stateCount + k] * stepMatrix[k * stateCount + j]; } index++; } } // Swap pointers double[] tmpMatrix = resultMatrix; resultMatrix = productMatrix; productMatrix = tmpMatrix; } } } if (!oneMatrix) System.arraycopy(resultMatrix, 0, matrix, 0, stateCount * stateCount); }
@Override public int getParent(int i) { NodeRef n = tree.getParent(tree.getNode(i)); return n != null ? n.getNumber() : RootedTree.NULL; }
/** * Traverse the tree calculating partial likelihoods. * * @param tree tree * @param node node * @param operatorNumber operatorNumber * @param flip flip * @return boolean */ private boolean traverse(Tree tree, NodeRef node, int[] operatorNumber, boolean flip) { boolean update = false; int nodeNum = node.getNumber(); NodeRef parent = tree.getParent(node); if (operatorNumber != null) { operatorNumber[0] = -1; } // First update the transition probability matrix(ices) for this branch if (parent != null && updateNode[nodeNum]) { final double branchRate = branchRateModel.getBranchRate(tree, node); final double parentHeight = tree.getNodeHeight(parent); final double nodeHeight = tree.getNodeHeight(node); // Get the operational time of the branch final double branchLength = branchRate * (parentHeight - nodeHeight); if (branchLength < 0.0) { throw new RuntimeException("Negative branch length: " + branchLength); } if (flip) { substitutionModelDelegate.flipMatrixBuffer(nodeNum); } branchUpdateIndices[branchUpdateCount] = nodeNum; branchLengths[branchUpdateCount] = branchLength; branchUpdateCount++; update = true; } // If the node is internal, update the partial likelihoods. if (!tree.isExternal(node)) { // Traverse down the two child nodes NodeRef child1 = tree.getChild(node, 0); final int[] op1 = {-1}; final boolean update1 = traverse(tree, child1, op1, flip); NodeRef child2 = tree.getChild(node, 1); final int[] op2 = {-1}; final boolean update2 = traverse(tree, child2, op2, flip); // If either child node was updated then update this node too if (update1 || update2) { int x = operationCount[operationListCount] * Beagle.OPERATION_TUPLE_SIZE; if (flip) { // first flip the partialBufferHelper partialBufferHelper.flipOffset(nodeNum); } final int[] operations = this.operations[operationListCount]; operations[x] = partialBufferHelper.getOffsetIndex(nodeNum); if (useScaleFactors) { // get the index of this scaling buffer int n = nodeNum - tipCount; if (recomputeScaleFactors) { // flip the indicator: can take either n or (internalNodeCount + 1) - n scaleBufferHelper.flipOffset(n); // store the index scaleBufferIndices[n] = scaleBufferHelper.getOffsetIndex(n); operations[x + 1] = scaleBufferIndices[n]; // Write new scaleFactor operations[x + 2] = Beagle.NONE; } else { operations[x + 1] = Beagle.NONE; operations[x + 2] = scaleBufferIndices[n]; // Read existing scaleFactor } } else { if (useAutoScaling) { scaleBufferIndices[nodeNum - tipCount] = partialBufferHelper.getOffsetIndex(nodeNum); } operations[x + 1] = Beagle.NONE; // Not using scaleFactors operations[x + 2] = Beagle.NONE; } operations[x + 3] = partialBufferHelper.getOffsetIndex(child1.getNumber()); // source node 1 operations[x + 4] = substitutionModelDelegate.getMatrixIndex(child1.getNumber()); // source matrix 1 operations[x + 5] = partialBufferHelper.getOffsetIndex(child2.getNumber()); // source node 2 operations[x + 6] = substitutionModelDelegate.getMatrixIndex(child2.getNumber()); // source matrix 2 operationCount[operationListCount]++; update = true; if (hasRestrictedPartials) { // Test if this set of partials should be restricted if (updateRestrictedNodePartials) { // Recompute map computeNodeToRestrictionMap(); updateRestrictedNodePartials = false; } if (partialsMap[nodeNum] != null) {} } } } return update; }
/** * Traverse the tree calculating partial likelihoods. * * @return whether the partials for this node were recalculated. */ protected boolean traverse(Tree tree, NodeRef node) { boolean update = false; boolean rootUpdated = false; int nodeNum = node.getNumber(); NodeRef parent = tree.getParent(node); // First update the transition probability matrix(ices) for this branch if it is a normal branch if (parent != null && updateNode[nodeNum]) { final double branchRate = branchRateModel.getBranchRate(tree, node); // Get the operational time of the branch final double branchTime = branchRate * (tree.getNodeHeight(parent) - tree.getNodeHeight(node)); if (branchTime < 0.0) { throw new RuntimeException("Negative branch length: " + branchTime); } cenancestorlikelihoodCore.setNodeMatrixForUpdate(nodeNum); for (int i = 0; i < categoryCount; i++) { double branchLength = siteModel.getRateForCategory(i) * branchTime; siteModel.getSubstitutionModel().getTransitionProbabilities(branchLength, probabilities); cenancestorlikelihoodCore.setNodeMatrix(nodeNum, i, probabilities); } update = true; } else if (parent == null && cenancestorHeight != null && updateNode[nodeNum]) // The root has to be updated { // First update the transition probability matrix(ices) for the root-cenancestor fake branch rootUpdated = true; // Get the operational time of the fake branch from the root to the cenancestor double rootHeight = treeModel.getNodeHeight(treeModel.getRoot()); double branchRate = branchRateModel.getBranchRate( rootHeight, getCenancestorHeight()); // TODO: Could this be easily improved? I would do to adapt // the tree structure and abstact tree likelihood double branchTime = branchRate * getCenancestorBranch(); // TODO: Could this be easily improved? The same as before for (int i = 0; i < categoryCount; i++) { double branchLength = siteModel.getRateForCategory(i) * branchTime; siteModel.getSubstitutionModel().getTransitionProbabilities(branchLength, probabilities); cenancestorlikelihoodCore.setNodeMatrix(nodeNum, i, probabilities); } } // If the node is internal, update the partial likelihoods. if (!tree.isExternal(node)) { // Traverse down the two child nodes NodeRef child1 = tree.getChild(node, 0); final boolean update1 = traverse(tree, child1); NodeRef child2 = tree.getChild(node, 1); final boolean update2 = traverse(tree, child2); // If either child node was updated then update this node too if (update1 || update2 || rootUpdated) { if (update1 || update2) { final int childNum1 = child1.getNumber(); final int childNum2 = child2.getNumber(); cenancestorlikelihoodCore.setNodePartialsForUpdate(nodeNum); if (integrateAcrossCategories) { cenancestorlikelihoodCore.calculatePartials(childNum1, childNum2, nodeNum); } else { cenancestorlikelihoodCore.calculatePartials( childNum1, childNum2, nodeNum, siteCategories); } if (COUNT_TOTAL_OPERATIONS) { totalOperationCount++; } } if (parent == null) { // No parent this is the root of the tree double[] partials; int nodeNumCenan = getCenancestorIndex(); if (cenancestorHeight != null) { if (rootUpdated) { // Calculate the partials at the cenancestor. The transition matrix of the root was // calculated before. cenancestorlikelihoodCore.setNodePartialsForUpdate(nodeNumCenan); if (integrateAcrossCategories) { cenancestorlikelihoodCore.calculatePartials(nodeNum, nodeNumCenan); } else { cenancestorlikelihoodCore.calculatePartials(nodeNum, nodeNumCenan, siteCategories); } } partials = getCenancestorPartials(); } else { // Using the cenancestor model without cenancestor date. It assumes that the root // of the tree is the cenancestor. Not tested. It shouldn't be normally used // either. partials = getRootPartials(); } // calculate the pattern likelihoods double[] frequencies = frequencyModel.getFrequencies(); cenancestorlikelihoodCore.calculateLogLikelihoods( partials, frequencies, patternLogLikelihoods); } update = true; } } return update; }