public PopsIOSpeciesTreeModel( PopsIOSpeciesBindings piosb, Parameter popPriorScale, PriorComponent[] priorComponents) { super(PopsIOSpeciesTreeModelParser.PIO_SPECIES_TREE); this.piosb = piosb; this.popPriorScale = popPriorScale; addVariable(popPriorScale); popPriorScale.addBounds(new Parameter.DefaultBounds(Double.POSITIVE_INFINITY, 0.0, 1)); this.priorComponents = priorComponents; PopsIOSpeciesBindings.SpInfo[] species = piosb.getSpecies(); int nTaxa = species.length; int nNodes = 2 * nTaxa - 1; pionodes = new PopsIONode[nNodes]; for (int n = 0; n < nNodes; n++) { pionodes[n] = new PopsIONode(n); } ArrayList<Integer> tojoin = new ArrayList<Integer>(nTaxa); for (int n = 0; n < nTaxa; n++) { pionodes[n].setTaxon(species[n].name); pionodes[n].setHeight(0.0); pionodes[n].setUnion(piosb.tipUnionFromTaxon(pionodes[n].getTaxon())); tojoin.add(n); } double rate = 1.0; double treeheight = 0.0; for (int i = 0; i < nTaxa - 1; i++) { int numtojoin = tojoin.size(); int j = MathUtils.nextInt(numtojoin); Integer child0 = tojoin.get(j); tojoin.remove(j); int k = MathUtils.nextInt(numtojoin - 1); Integer child1 = tojoin.get(k); tojoin.remove(k); pionodes[nTaxa + i].addChildren(pionodes[child0], pionodes[child1]); pionodes[nTaxa + i].setHeight(treeheight + randomnodeheight(numtojoin * rate)); treeheight = pionodes[nTaxa + i].getHeight(); tojoin.add(nTaxa + i); } rootn = pionodes.length - 1; double scale = 0.99 * piosb.initialMinGeneNodeHeight() / pionodes[rootn].height; scaleAllHeights(scale); pionodes[rootn].fillinUnionsInSubtree(piosb.getSpecies().length); stree = makeSimpleTree(); Logger.getLogger("dr.evomodel.speciation.popsio") .info( "\tConstructing a PopsIO Species Tree Model, please cite:\n" + Citable.Utils.getCitationString(this)); }
@Override public double doOperation() throws OperatorFailedException { apspnet.beginNetworkEdit(); double b = (1.0 - scalingFactor) * (1.0 - scalingFactor) / scalingFactor; double c = scalingFactor / (1.0 - scalingFactor); double y = MathUtils.nextDouble(); double s = b * (y + c) * (y + c); int i = MathUtils.nextInt(apspnet.getNumberOfTetraTrees()); apspnet.setOneHybPopValue(i, s * apspnet.getOneHybPopValue(i)); apspnet.endNetworkEdit(); return 0.0; // this way of scaling, with proposal proportional to x^-(1/2) has hastings ratio 1 }
public double[][] simulateLocations() { NodeRef root = m_tree.getRoot(); // assume uniform double[][] latLongs = new double[m_tree.getNodeCount()][2]; double rootLat = MathUtils.nextDouble() * (maxLat - minLat) + minLat; double rootLong = MathUtils.nextDouble() * (maxLong - minLong) + minLong; int rootNum = root.getNumber(); latLongs[rootNum][LATITUDE_INDEX] = rootLat; latLongs[rootNum][LONGITUDE_INDEX] = rootLong; traverse(root, latLongs[rootNum], latLongs); return latLongs; }
public RandomBranchModel( TreeModel treeModel, // GY94CodonModel baseSubstitutionModel, // double rate, // boolean hasSeed, long seed) { super(RANDOM_BRANCH_MODEL); this.treeModel = treeModel; this.baseSubstitutionModel = baseSubstitutionModel; this.rate = rate; if (hasSeed) { // use fixed seed for e_i random = new MersenneTwister(seed); } else { // use BEAST seed random = new MersenneTwister(MathUtils.nextLong()); } // END: seed check setup(); } // END: Constructor
/** change the parameter and return the hastings ratio. */ public double doOperation() throws OperatorFailedException { double[] mean = sccs.getMode(); double[] currentValue = parameter.getParameterValues(); double[] newValue = new double[dim]; Set<Integer> updateSet = new HashSet<Integer>(); if (setSizeMean != -1.0) { final int listLength = Poisson.nextPoisson(setSizeMean); while (updateSet.size() < listLength) { int newInt = MathUtils.nextInt(parameter.getDimension()); if (!updateSet.contains(newInt)) { updateSet.add(newInt); } } } else { for (int i = 0; i < dim; ++i) { updateSet.add(i); } } double logq = 0; for (Integer i : updateSet) { newValue[i] = mean[i] + scaleFactor * MathUtils.nextGaussian(); if (UPDATE_ALL) { parameter.setParameterValueQuietly(i, newValue[i]); } else { parameter.setParameterValue(i, newValue[i]); } logq += (NormalDistribution.logPdf(currentValue[i], mean[i], scaleFactor) - NormalDistribution.logPdf(newValue[i], mean[i], scaleFactor)); } // for (Integer i : updateSet) { // parameter.setParameterValueQuietly(i, newValue[i]); // } if (UPDATE_ALL) { parameter.setParameterValueNotifyChangedAll(0, parameter.getParameterValue(0)); } return logq; }
public void drawTreeIndex() { // System.err.print("Drawing new tree, (old tree = " + currentTreeIndex); currentTreeIndex = MathUtils.nextInt(trees.length); // System.err.println(") new tree = " + currentTreeIndex); fireModelChanged(new TreeModel.TreeChangedEvent()); }
void traverse(NodeRef node, double[] parentSequence, double[][] latLongs) { for (int iChild = 0; iChild < m_tree.getChildCount(node); iChild++) { NodeRef child = m_tree.getChild(node, iChild); // find the branch length final double branchRate = m_branchRateModel.getBranchRate(m_tree, child); final double branchLength = branchRate * (m_tree.getNodeHeight(node) - m_tree.getNodeHeight(child)); if (branchLength < 0.0) { throw new RuntimeException("Negative branch length: " + branchLength); } double childLat = MathUtils.nextGaussian() * Math.sqrt(branchLength) + parentSequence[LATITUDE_INDEX]; double childLong = MathUtils.nextGaussian() * Math.sqrt(branchLength) + parentSequence[LONGITUDE_INDEX]; int childNum = child.getNumber(); latLongs[childNum][LATITUDE_INDEX] = childLat; latLongs[childNum][LONGITUDE_INDEX] = childLong; traverse(m_tree.getChild(node, iChild), latLongs[childNum], latLongs); } }
public static void checkTree(TreeModel treeModel) { // todo Should only be run if there exists a zero-length interval // TreeModel treeModel = (TreeModel) tree; for (int i = 0; i < treeModel.getInternalNodeCount(); i++) { NodeRef node = treeModel.getInternalNode(i); if (node != treeModel.getRoot()) { double parentHeight = treeModel.getNodeHeight(treeModel.getParent(node)); double childHeight0 = treeModel.getNodeHeight(treeModel.getChild(node, 0)); double childHeight1 = treeModel.getNodeHeight(treeModel.getChild(node, 1)); double maxChild = childHeight0; if (childHeight1 > maxChild) maxChild = childHeight1; double newHeight = maxChild + MathUtils.nextDouble() * (parentHeight - maxChild); treeModel.setNodeHeight(node, newHeight); } } treeModel.pushTreeChangedEvent(); }
private static int mauCanonicalSub( Tree tree, NodeRef node, int loc, NodeRef[] order, boolean[] wasSwaped) { if (tree.isExternal(node)) { order[loc] = node; assert (loc & 0x1) == 0; return loc + 1; } final boolean swap = MathUtils.nextBoolean(); // wasSwaped[(loc-1)/2] = swap; int l = mauCanonicalSub(tree, tree.getChild(node, swap ? 1 : 0), loc, order, wasSwaped); order[l] = node; assert (l & 0x1) == 1; wasSwaped[(l - 1) / 2] = swap; l = mauCanonicalSub(tree, tree.getChild(node, swap ? 0 : 1), l + 1, order, wasSwaped); return l; }
/** change the parameter and return the hastings ratio. */ public final double doOperation() throws OperatorFailedException { final double scale = (scaleFactor + (MathUtils.nextDouble() * ((1.0 / scaleFactor) - scaleFactor))); int goingUp = 0, goingDown = 0; if (upParameter != null) { for (Scalable.Default up : upParameter) { goingUp += up.scaleAllAndNotify(scale, -1); } } if (downParameter != null) { for (Scalable.Default dn : downParameter) { goingDown += dn.scaleAllAndNotify(1.0 / scale, -1); } } return (goingUp - goingDown - 2) * Math.log(scale); }
public void proposeTree() throws OperatorFailedException { TreeModel tree = c2cLikelihood.getTreeModel(); BranchMapModel branchMap = c2cLikelihood.getBranchMap(); NodeRef i; double oldMinAge, newMinAge, newRange, oldRange, newAge, q; // choose a random node avoiding root, and nodes that are ineligible for this move because they // have nowhere to // go final int nodeCount = tree.getNodeCount(); do { i = tree.getNode(MathUtils.nextInt(nodeCount)); } while (tree.getRoot() == i || !eligibleForMove(i, tree, branchMap)); final NodeRef iP = tree.getParent(i); // this one can go anywhere NodeRef j = tree.getNode(MathUtils.nextInt(tree.getNodeCount())); NodeRef k = tree.getParent(j); while ((k != null && tree.getNodeHeight(k) <= tree.getNodeHeight(i)) || (i == j)) { j = tree.getNode(MathUtils.nextInt(tree.getNodeCount())); k = tree.getParent(j); } if (iP == tree.getRoot() || j == tree.getRoot()) { throw new OperatorFailedException("Root changes not allowed!"); } if (k == iP || j == iP || k == i) throw new OperatorFailedException("move failed"); final NodeRef CiP = getOtherChild(tree, iP, i); NodeRef PiP = tree.getParent(iP); newMinAge = Math.max(tree.getNodeHeight(i), tree.getNodeHeight(j)); newRange = tree.getNodeHeight(k) - newMinAge; newAge = newMinAge + (MathUtils.nextDouble() * newRange); oldMinAge = Math.max(tree.getNodeHeight(i), tree.getNodeHeight(CiP)); oldRange = tree.getNodeHeight(PiP) - oldMinAge; q = newRange / Math.abs(oldRange); // need to account for the random repainting of iP if (branchMap.get(PiP.getNumber()) != branchMap.get(CiP.getNumber())) { q *= 0.5; } if (branchMap.get(k.getNumber()) != branchMap.get(j.getNumber())) { q *= 2; } tree.beginTreeEdit(); if (j == tree.getRoot()) { // 1. remove edges <iP, CiP> tree.removeChild(iP, CiP); tree.removeChild(PiP, iP); // 2. add edges <k, iP>, <iP, j>, <PiP, CiP> tree.addChild(iP, j); tree.addChild(PiP, CiP); // iP is the new root tree.setRoot(iP); } else if (iP == tree.getRoot()) { // 1. remove edges <k, j>, <iP, CiP>, <PiP, iP> tree.removeChild(k, j); tree.removeChild(iP, CiP); // 2. add edges <k, iP>, <iP, j>, <PiP, CiP> tree.addChild(iP, j); tree.addChild(k, iP); // CiP is the new root tree.setRoot(CiP); } else { // 1. remove edges <k, j>, <iP, CiP>, <PiP, iP> tree.removeChild(k, j); tree.removeChild(iP, CiP); tree.removeChild(PiP, iP); // 2. add edges <k, iP>, <iP, j>, <PiP, CiP> tree.addChild(iP, j); tree.addChild(k, iP); tree.addChild(PiP, CiP); } tree.setNodeHeight(iP, newAge); tree.endTreeEdit(); // logq = Math.log(q); // repaint the parent to match either its new parent or its new child (50% chance of each). if (MathUtils.nextInt(2) == 0) { branchMap.set(iP.getNumber(), branchMap.get(k.getNumber()), true); } else { branchMap.set(iP.getNumber(), branchMap.get(j.getNumber()), true); } if (DEBUG) { c2cLikelihood.checkPartitions(); } }
private double randomnodeheight(double rate) { return MathUtils.nextExponential(rate) + 1e-6 / rate; // 1e-6/rate to avoid very tiny heights }
/** change the parameter and return the hastings ratio. */ public final double doOperation() { int index = MathUtils.nextInt(links.getDimension()); int oldGroup = (int) assignments.getParameterValue(index); /* * Set index customer link to index and all connected to it to a new assignment (min value empty) */ int minEmp = minEmpty(modelLikelihood.getLogLikelihoodsVector()); links.setParameterValue(index, index); int[] visited = connected(index, links); int ii = 0; while (visited[ii] != 0) { assignments.setParameterValue(visited[ii] - 1, minEmp); ii++; } /* * Adjust likvector for group separated */ modelLikelihood.setLogLikelihoodsVector(oldGroup, getLogLikGroup(oldGroup)); modelLikelihood.setLogLikelihoodsVector(minEmp, getLogLikGroup(minEmp)); int maxFull = maxFull(modelLikelihood.getLogLikelihoodsVector()); double[] liks = modelLikelihood.getLogLikelihoodsVector(); /* * computing likelihoods of joint groups */ double[] crossedLiks = new double[maxFull + 1]; for (int ll = 0; ll < maxFull + 1; ll++) { if (ll != minEmp) { crossedLiks[ll] = getLogLik2Group(ll, minEmp); } } /* * Add logPrior */ double[] logP = new double[links.getDimension()]; for (int jj = 0; jj < links.getDimension(); jj++) { logP[jj] += depMatrix[index][jj]; int n = (int) assignments.getParameterValue(jj); if (n != minEmp) { logP[jj] += crossedLiks[n] - liks[n] - liks[minEmp]; } } logP[index] = Math.log(chiParameter.getParameterValue(0)); /* * possibilidade de mandar p zero as probs muito pequenas */ /* * Gibbs sampling */ this.rescale(logP); // Improve numerical stability this.exp(logP); // Transform back to probability-scale int k = MathUtils.randomChoicePDF(logP); links.setParameterValue(index, k); int newGroup = (int) assignments.getParameterValue(k); ii = 0; while (visited[ii] != 0) { assignments.setParameterValue(visited[ii] - 1, newGroup); ii++; } /* * updating conditional likelihood vector */ modelLikelihood.setLogLikelihoodsVector(newGroup, getLogLikGroup(newGroup)); if (newGroup != minEmp) { modelLikelihood.setLogLikelihoodsVector(minEmp, 0); } sampleMeans(maxFull); return 0.0; }
public double splitClade(Clade parent, Clade[] children) { // the number of all possible clades is 2^n with n the number of tips // reduced by 2 because we wont consider the clades with all or no tips // contained // divide this number by 2 because every clade has a matching clade to // form the split // #splits = 2^(n-1) - 1 final double splits = Math.pow(2, parent.getSize() - 1) - 1; double prob = 0; if (cladeCoProbabilities.containsKey(parent.getBits())) { HashMap<BitSet, Clade> childClades = cladeCoProbabilities.get(parent.getBits()); double noChildClades = 0.0; double sum = 0.0; Set<BitSet> keys = childClades.keySet(); for (BitSet child : keys) { Clade tmp = childClades.get(child); if (parent.getSize() > tmp.getSize() + 1) { sum += (tmp.getSampleCount() + EPSILON) / 2.0; noChildClades += 0.5; } else { sum += (tmp.getSampleCount() + EPSILON); noChildClades += 1.0; } } // add epsilon for each not observed clade sum += EPSILON * (splits - noChildClades); // roulette wheel double randomNumber = Math.random() * sum; for (BitSet child : keys) { Clade tmp = childClades.get(child); if (parent.getSize() > tmp.getSize() + 1) { randomNumber -= (tmp.getSampleCount() + EPSILON) / 2.0; } else { randomNumber -= (tmp.getSampleCount() + EPSILON); } if (randomNumber < 0) { children[0] = tmp; prob = (tmp.getSampleCount() + EPSILON) / sum; break; } } if (randomNumber >= 0) { // randomNumber /= EPSILON; prob = EPSILON / sum; BitSet newChild; BitSet inverseBits; do { do { newChild = (BitSet) parent.getBits().clone(); int index = -1; do { index = newChild.nextSetBit(index + 1); if (index > -1 && MathUtils.nextBoolean()) { newChild.clear(index); } } while (index > -1); } while (newChild.cardinality() == 0 || newChild.cardinality() == parent.getSize()); inverseBits = (BitSet) newChild.clone(); inverseBits.xor(parent.getBits()); } while (childClades.containsKey(newChild) || childClades.containsKey(inverseBits)); Clade randomClade = new Clade(newChild, 0.9999 * parent.getHeight()); children[0] = randomClade; BitSet secondChild = (BitSet) children[0].getBits().clone(); secondChild.xor(parent.getBits()); children[1] = new Clade(secondChild, 0.9999 * parent.getHeight()); } else { BitSet secondChild = (BitSet) children[0].getBits().clone(); secondChild.xor(parent.getBits()); children[1] = childClades.get(secondChild); if (children[1] == null) { children[1] = new Clade(secondChild, 0.9999 * parent.getHeight()); } } } else { prob = 1.0 / splits; BitSet newChild; do { newChild = (BitSet) parent.getBits().clone(); int index = -1; do { index = newChild.nextSetBit(index + 1); if (index > -1 && MathUtils.nextBoolean()) { newChild.clear(index); } } while (index > -1); } while (newChild.cardinality() == 0 || newChild.cardinality() == parent.getSize()); Clade randomClade = new Clade(newChild, 0.9999 * parent.getHeight()); // randomClade.addSample(); randomClade.addHeight(0.9999 * parent.getHeight()); children[0] = randomClade; BitSet secondChild = (BitSet) children[0].getBits().clone(); secondChild.xor(parent.getBits()); children[1] = new Clade(secondChild, 0.9999 * parent.getHeight()); // children[1].addSample(); randomClade.addHeight(0.9999 * parent.getHeight()); } return Math.log(prob); }
public void operateOneNode(final double factor) throws OperatorFailedException { // #print "operate: tree", ut.treerep(t) // if( verbose) System.out.println(" Mau at start: " + tree.getSimpleTree()); final int count = multree.getExternalNodeCount(); assert count == species.nSpSeqs(); NodeRef[] order = new NodeRef[2 * count - 1]; boolean[] swapped = new boolean[count - 1]; mauCanonical(multree, order, swapped); // internal node to change // count-1 - number of internal nodes int which = MathUtils.nextInt(count - 1); FixedBitSet left = new FixedBitSet(count); FixedBitSet right = new FixedBitSet(count); for (int k = 0; k < 2 * which + 1; k += 2) { left.set(multree.speciesIndex(order[k])); } for (int k = 2 * (which + 1); k < 2 * count; k += 2) { right.set(multree.speciesIndex(order[k])); } double newHeight; if (factor > 0) { newHeight = multree.getNodeHeight(order[2 * which + 1]) * factor; } else { final double limit = species.speciationUpperBound(left, right); newHeight = MathUtils.nextDouble() * limit; } multree.beginTreeEdit(); multree.setPreorderIndices(preOrderIndexBefore); final NodeRef node = order[2 * which + 1]; multree.setNodeHeight(node, newHeight); mauReconstruct(multree, order, swapped); // restore pre-order of pops - { multree.setPreorderIndices(preOrderIndexAfter); double[] splitPopValues = null; for (int k = 0; k < preOrderIndexBefore.length; ++k) { final int b = preOrderIndexBefore[k]; if (b >= 0) { final int a = preOrderIndexAfter[k]; if (a != b) { // if( verbose) System.out.println("pops: " + a + " <- " + b); final Parameter p1 = multree.sppSplitPopulations; if (splitPopValues == null) { splitPopValues = p1.getParameterValues(); } if (multree.constPopulation()) { p1.setParameterValue(count + a, splitPopValues[count + b]); } else { for (int i = 0; i < 2; ++i) { p1.setParameterValue(count + 2 * a + i, splitPopValues[count + 2 * b + i]); } } } } } } multree.endTreeEdit(); }