private void calcPatternPoints(int nPatterns) { patternPoints = new int[threadCount + 1]; if (proportionsInput.get() == null) { int range = nPatterns / threadCount; for (int i = 0; i < threadCount - 1; i++) { patternPoints[i + 1] = range * (i + 1); } patternPoints[threadCount] = nPatterns; } else { String[] strs = proportionsInput.get().split("\\s+"); double[] proportions = new double[threadCount]; for (int i = 0; i < threadCount; i++) { proportions[i] = Double.parseDouble(strs[i % strs.length]); } // normalise double sum = 0; for (double d : proportions) { sum += d; } for (int i = 0; i < threadCount; i++) { proportions[i] /= sum; } // cummulative for (int i = 1; i < threadCount; i++) { proportions[i] += proportions[i - 1]; } // calc ranges for (int i = 0; i < threadCount; i++) { patternPoints[i + 1] = (int) (proportions[i] * nPatterns + 0.5); } } }
/** * Calculate probability of choosing region affected by the given conversion under the * ClonalOrigin model. * * @param conv conversion region is associated with * @return log probability density */ public double getAffectedRegionProb(Conversion conv) { double logP = 0.0; // Total effective number of possible start sites double alpha = acg.getTotalSequenceLength() + acg.getLoci().size() * deltaInput.get().getValue(); // Calculate probability of converted region. if (conv.getStartSite() == 0) logP += Math.log((deltaInput.get().getValue() + 1) / alpha); else logP += Math.log(1.0 / alpha); // Probability of end site: double probEnd = Math.pow(1.0 - 1.0 / deltaInput.get().getValue(), conv.getEndSite() - conv.getStartSite()) / deltaInput.get().getValue(); // Include probability of going past the end: if (conv.getEndSite() == conv.getLocus().getSiteCount() - 1) probEnd += Math.pow( 1.0 - 1.0 / deltaInput.get().getValue(), conv.getLocus().getSiteCount() - conv.getStartSite()); logP += Math.log(probEnd); return logP; }
public void initAndValidate() { testCorrect = testCorrectInput.get(); paramList = paramListInput.get(); modelList = modelListInput.get(); freqsList = freqsListInput.get(); paramPointers = paramPointersInput.get(); modelPointers = modelPointersInput.get(); freqsPointers = freqsPointersInput.get(); pointerCount = paramPointers.getDimension(); dp = dpInput.get(); List<ParametricDistribution> distrs = dp.getBaseDistributions(); paramBaseDistr = distrs.get(0); modelBaseDistr = distrs.get(1); freqsBaseDistr = distrs.get(2); tempLikelihood = tempLikelihoodInput.get(); dpTreeLikelihood = dpTreeLikelihoodInput.get(); modelNetworkMap.put(1.0, new double[] {3.0}); modelNetworkMap.put(2.0, new double[] {3.0}); modelNetworkMap.put(3.0, new double[] {1.0, 2.0, 4.0}); modelNetworkMap.put(4.0, new double[] {3.0, 5.0}); modelNetworkMap.put(5.0, new double[] {4.0}); // System.out.println("is null? "+(modelNetworkMap.get(5.0) == null)); }
@Override public void initAndValidate() { scaleFactor = scaleFactorInput.get(); // determine taxon set to choose from if (taxonsetInput.get() != null) { List<String> taxaNames = new ArrayList<>(); for (String taxon : treeInput.get().getTaxaNames()) { taxaNames.add(taxon); } List<String> set = taxonsetInput.get().asStringList(); int nrOfTaxa = set.size(); taxonIndices = new int[nrOfTaxa]; int k = 0; for (String taxon : set) { int taxonIndex = taxaNames.indexOf(taxon); if (taxonIndex < 0) { throw new IllegalArgumentException("Cannot find taxon " + taxon + " in tree"); } taxonIndices[k++] = taxonIndex; } } else { taxonIndices = new int[treeInput.get().getTaxaNames().length]; for (int i = 0; i < taxonIndices.length; i++) { taxonIndices[i] = i; } } }
/** * Choose region to be affected by this conversion. * * @param conv Conversion object where these sites are stored. * @return log probability density of chosen attachment. */ public double drawAffectedRegion(Conversion conv) { double logP = 0.0; // Total effective number of possible start sites double alpha = acg.getTotalSequenceLength() + acg.getLoci().size() * deltaInput.get().getValue(); // Draw location of converted region. int startSite = -1; int endSite; Locus locus = null; double u = Randomizer.nextDouble() * alpha; for (Locus thisLocus : acg.getLoci()) { if (u < deltaInput.get().getValue() + thisLocus.getSiteCount()) { locus = thisLocus; if (u < deltaInput.get().getValue()) { startSite = 0; logP += Math.log(deltaInput.get().getValue() / alpha); } else { startSite = (int) (u - deltaInput.get().getValue()); logP += Math.log(1.0 / alpha); } break; } u -= deltaInput.get().getValue() + thisLocus.getSiteCount(); } if (locus == null) throw new IllegalStateException( "Programmer error: " + "loop in drawAffectedRegion() fell through."); endSite = startSite + (int) Randomizer.nextGeometric(1.0 / deltaInput.get().getValue()); endSite = Math.min(endSite, locus.getSiteCount() - 1); // Probability of end site: double probEnd = Math.pow(1.0 - 1.0 / deltaInput.get().getValue(), endSite - startSite) / deltaInput.get().getValue(); // Include probability of going past the end: if (endSite == locus.getSiteCount() - 1) probEnd += Math.pow(1.0 - 1.0 / deltaInput.get().getValue(), locus.getSiteCount() - startSite); logP += Math.log(probEnd); conv.setLocus(locus); conv.setStartSite(startSite); conv.setEndSite(endSite); return logP; }
@Override public void initAndValidate() { meanRate = meanRateInput.get(); speciesTreeRatesX = speciesTreeRatesInput.get(); geneTree = geneTreeInput.get(); geneNodeCount = geneTree.getNodeCount(); branchRates = new double[geneNodeCount]; storedBranchRates = new double[geneNodeCount]; needsUpdate = true; }
/** * Evaluate rate multiplier expression for the given variable values. * * @param scalarVarNames Names of scalar variables in expression * @param scalarVarVals Values of scalar variables in expression * @param vectorVarNames Names of vector variables in expression * @param vectorVarVals Values of vector variables in expression * @param functions * @return result of evaluating the expression */ public double evaluate( List<String> scalarVarNames, int[] scalarVarVals, List<String> vectorVarNames, List<Double[]> vectorVarVals, Map<String, Function> functions) { if (visitor == null) { // Parse predicate expression ANTLRInputStream input = new ANTLRInputStream(expInput.get()); MASTERGrammarLexer lexer = new MASTERGrammarLexer(input); CommonTokenStream tokens = new CommonTokenStream(lexer); MASTERGrammarParser parser = new MASTERGrammarParser(tokens); ParseTree parseTree = parser.expression(); visitor = new ExpressionEvaluator(parseTree, scalarVarNames, functions); } for (int i = 0; i < vectorVarNames.size(); i++) visitor.setVectorVar(vectorVarNames.get(i), vectorVarVals.get(i)); Double[] res = visitor.evaluate(scalarVarVals); if (res.length != 1) { throw new IllegalArgumentException("Reaction rate multiplier must be scalar!"); } return res[0]; }
@SuppressWarnings("unchecked") public void sync(int iPartition) { if (parentInputs.size() > 0 && _input.get() != null) { Input<?> input = parentInputs.get(iPartition); if (bIsList) { List<Object> list = (List<Object>) _input.get(); List<Object> targetList = ((List<Object>) input.get()); // targetList.clear(); // only clear former members for (BEASTInterface plugin : startInputs) { targetList.remove(plugin); } targetList.addAll(list); // sync outputs of items in list for (Object o : list) { if (o instanceof BEASTInterface) { ((BEASTInterface) o).getOutputs().add(parentPlugins.get(iPartition)); } } } else { try { // System.err.println("sync " + parentPlugins.get(iPartition) + "[" + input.getName() + "] // = " + _input.get()); input.setValue(_input.get(), parentPlugins.get(iPartition)); } catch (Exception e) { e.printStackTrace(); } } } }
@Override public void init(PrintStream out) throws Exception { final Tree tree = treeInput.get(); if (getID() == null || getID().matches("\\s*")) { out.print(tree.getID() + ".SAcount\t"); } else { out.print(getID() + "\t"); } }
@Override public void initAndValidate() { interpreter = new Interpreter(); NamedFunction.evalFunctionInputs(interpreter, functionInputs.get()); String script = valueInput.get(); try { interpreter.eval(script); } catch (EvalError e) { throw new RuntimeException(e); } }
public double proposal() { double logq = 0.0; // Pick two indcies at random int index1 = Randomizer.nextInt(pointerCount); int index2 = index1; while (index2 == index1) { index2 = Randomizer.nextInt(pointerCount); } int clusterIndex1 = paramPointers.indexInList(index1, paramList); int clusterIndex2 = paramPointers.indexInList(index2, paramList); // If the randomly draw sites are from the same cluster, perform a split-move. if (clusterIndex1 == clusterIndex2) { int[] clusterSites = dpValuableInput.get().getClusterSites(clusterIndex1); double temp = split(index1, index2, clusterIndex1, clusterSites); // System.out.println("split: "+temp); logq += temp; // System.out.println("split: "+temp); } else { // If the the two randomly drawn sites are not from the same cluster, perform a merge-move. int[] cluster1Sites = dpValuableInput.get().getClusterSites(clusterIndex1); int[] cluster2Sites = dpValuableInput.get().getClusterSites(clusterIndex2); // logq = merge(index1, index2,clusterIndex1,clusterIndex2,cluster1Sites,cluster2Sites); double temp = merge(index1, index2, clusterIndex1, clusterIndex2, cluster1Sites, cluster2Sites); // System.out.println("merge: "+temp); logq = temp; } return logq; }
@Override public void initAndValidate() throws Exception { sPathComponents = sPathInput.get().split("/"); if (sPathComponents[0].equals("")) { sPathComponents = new String[0]; } sConditionalAttribute = new String[sPathComponents.length]; sConditionalValue = new String[sPathComponents.length]; for (int i = 0; i < sPathComponents.length; i++) { int j = sPathComponents[i].indexOf('['); if (j >= 0) { String sConditionalComponents = sPathComponents[i].substring(j + 1, sPathComponents[i].lastIndexOf(']')); String[] sStrs = sConditionalComponents.split("="); sConditionalAttribute[i] = sStrs[0]; sConditionalValue[i] = sStrs[1].substring(1, sStrs[1].length() - 1); sPathComponents[i] = sPathComponents[i].substring(0, j); } } inputs = new ArrayList<BEASTInterface>(); startInputs = new ArrayList<BEASTInterface>(); BEASTObjectPanel.getID(this); }
/** * create new instance of src object, connecting all inputs from src object Note if input is a * SubstModel, it is duplicated as well. * * @param src object to be copied * @param i index used to extend ID with. * @return copy of src object */ private Object duplicate(BEASTInterface src, int i) { if (src == null) { return null; } BEASTInterface copy; try { copy = src.getClass().newInstance(); copy.setID(src.getID() + "_" + i); } catch (InstantiationException | IllegalAccessException e) { e.printStackTrace(); throw new RuntimeException( "Programmer error: every object in the model should have a default constructor that is publicly accessible: " + src.getClass().getName()); } for (Input<?> input : src.listInputs()) { if (input.get() != null) { if (input.get() instanceof List) { // handle lists // ((List)copy.getInput(input.getName())).clear(); for (Object o : (List<?>) input.get()) { if (o instanceof BEASTInterface) { // make sure it is not already in the list copy.setInputValue(input.getName(), o); } } } else if (input.get() instanceof SubstitutionModel) { // duplicate subst models BEASTInterface substModel = (BEASTInterface) duplicate((BEASTInterface) input.get(), i); copy.setInputValue(input.getName(), substModel); } else { // it is some other value copy.setInputValue(input.getName(), input.get()); } } } copy.initAndValidate(); return copy; }
@Override public void initStateNodes() throws Exception { typeLabel = typeLabelInput.get(); // nTypes = nTypesInput.get(); BeastTreeFromMaster masterTree = masterTreeInput.get(); TraitSet typeTrait = new TraitSet(); TraitSet dateTrait = new TraitSet(); String types = ""; String dates = ""; for (Node beastNode : masterTree.getExternalNodes()) { dates += beastNode.getID() + "=" + beastNode.getHeight() + ","; types += beastNode.getID() + "=" + (int) beastNode.getMetaData("location") + ","; } dates = dates.substring(0, dates.length() - 1); types = types.substring(0, types.length() - 1); typeTrait.initByName("value", types, "taxa", m_taxonset.get(), "traitname", "type"); dateTrait.initByName("value", dates, "taxa", m_taxonset.get(), "traitname", "date-backward"); SCMigrationModel migModel = new SCMigrationModel(); Double[] temp = new Double[nTypes.get()]; Arrays.fill(temp, muInput.get()); migModel.setInputValue("rateMatrix", new RealParameter(temp)); Arrays.fill(temp, popSizeInput.get()); migModel.setInputValue("popSizes", new RealParameter(temp)); migModel.initAndValidate(); if (random.get()) { tree = new StructuredCoalescentMultiTypeTree(); tree.setInputValue("migrationModel", migModel); } else { Node oldRoot = masterTree.getRoot(); MultiTypeNode newRoot = new MultiTypeNode(); newRoot.height = oldRoot.height; newRoot.nTypeChanges = 0; newRoot.changeTimes.addAll(new ArrayList<Double>()); newRoot.changeTypes.addAll(new ArrayList<Integer>()); newRoot.nodeType = 0; newRoot.labelNr = oldRoot.labelNr; newRoot.addChild(copyFromFlatNode(oldRoot.getLeft())); newRoot.addChild(copyFromFlatNode(oldRoot.getRight())); tree = new MultiTypeTree(newRoot); } tree.setInputValue("trait", typeTrait); tree.setInputValue("trait", dateTrait); tree.initAndValidate(); setInputValue("trait", dateTrait); setInputValue("trait", typeTrait); assignFromWithoutID(tree); }
public double split(int index1, int index2, int clusterIndex, int[] initClusterSites) { try { double logqSplit = 0.0; // Create a parameter by sampling from the prior QuietRealParameter newParam = getSample(paramBaseDistr, paramList.getUpper(), paramList.getLower()); QuietRealParameter newModel = getSample(modelBaseDistr, modelList.getUpper(), modelList.getLower()); QuietRealParameter newFreqs = getSample(freqsBaseDistr, freqsList.getUpper(), freqsList.getLower()); // Perform a split // paramList.splitParameter(clusterIndex,newParam); // modelList.splitParameter(clusterIndex,newModel); // freqsList.splitParameter(clusterIndex,newFreqs); // Remove the index 1 and index 2 from the cluster int[] clusterSites = new int[initClusterSites.length - 2]; int k = 0; for (int i = 0; i < initClusterSites.length; i++) { if (initClusterSites[i] != index1 && initClusterSites[i] != index2) { clusterSites[k++] = initClusterSites[i]; } } // Form a new cluster with index 1 // paramPointers.point(index1,newParam); // modelPointers.point(index1,newModel); // freqsPointers.point(index1,newFreqs); // Shuffle the cluster_-{index_1,index_2} to obtain a random permutation Randomizer.shuffle(clusterSites); // Create the weight vector of site patterns according to the order of the shuffled index. /*int[] tempWeights = new int[tempLikelihood.m_data.get().getPatternCount()]; int patIndex; for(int i = 0; i < clusterSites.length; i++){ patIndex = tempLikelihood.m_data.get().getPatternIndex(clusterSites[i]); tempWeights[patIndex] = 1; }*/ tempLikelihood.setupPatternWeightsFromSites(clusterSites); // Site log likelihoods in the order of the shuffled sites double[] logLik1 = tempLikelihood.calculateLogP(newParam, newModel, newFreqs, clusterSites); double[] logLik2 = new double[clusterSites.length]; for (int i = 0; i < logLik2.length; i++) { // logLik2[i] = dpTreeLikelihood.getSiteLogLikelihood(clusterIndex,clusterSites[i]); logLik2[i] = getSiteLogLikelihood( paramList.getParameter(clusterIndex).getIDNumber(), clusterIndex, clusterSites[i]); } double[] lik1 = new double[logLik1.length]; double[] lik2 = new double[logLik2.length]; double maxLog; // scale it so it may be more accurate for (int i = 0; i < logLik1.length; i++) { maxLog = Math.max(logLik1[i], logLik2[i]); if (Math.exp(maxLog) < 1e-100) { if (maxLog == logLik1[i]) { lik1[i] = 1.0; lik2[i] = Math.exp(logLik2[i] - maxLog); } else { lik1[i] = Math.exp(logLik1[i] - maxLog); lik2[i] = 1.0; } } else { lik1[i] = Math.exp(logLik1[i]); lik2[i] = Math.exp(logLik2[i]); } } /*boolean ohCrap = false; for(int i = 0; i < logLik1.length; i++){ if(Double.isNaN(logLik1[i])){ return Double.NEGATIVE_INFINITY; //ohCrap = true; //System.out.println("logLik1: "+logLik1); //logLik1[i] = Double.NEGATIVE_INFINITY; } if(Double.isNaN(logLik2[i])){ return Double.NEGATIVE_INFINITY; //ohCrap = true; //System.out.println("logLik1: "+logLik2); //logLik2[i] = Double.NEGATIVE_INFINITY; } lik1[i] = Math.exp(logLik1[i]); lik2[i] = Math.exp(logLik2[i]); //System.out.println(lik1[i]+" "+lik2[i]); } if(ohCrap){ for(int i = 0; i < newParam.getDimension();i++){ System.out.print(newParam.getValue(i)+" "); } System.out.println(); }*/ /*for(int i = 0; i < clusterSites.length;i++){ System.out.println("clusterSites: "+clusterSites[i]); } System.out.println("index 1: "+index1+" index2: "+index2);*/ int cluster1Count = 1; int cluster2Count = 1; // Assign members of the existing cluster (except for indice 1 and 2) randomly // to the existing and the new cluster double psi1, psi2, newClusterProb, draw; int[] newAssignment = new int[clusterSites.length]; for (int i = 0; i < clusterSites.length; i++) { psi1 = cluster1Count * lik1[i]; psi2 = cluster2Count * lik2[i]; newClusterProb = psi1 / (psi1 + psi2); draw = Randomizer.nextDouble(); if (draw < newClusterProb) { // System.out.println("in new cluster: "+clusterSites[i]); // paramPointers.point(clusterSites[i],newParam); // modelPointers.point(clusterSites[i],newModel); // freqsPointers.point(clusterSites[i],newFreqs); newAssignment[cluster1Count - 1] = clusterSites[i]; logqSplit += Math.log(newClusterProb); cluster1Count++; } else { logqSplit += Math.log(1.0 - newClusterProb); cluster2Count++; } } // System.out.println("halfway: "+logqSplit); logqSplit += paramBaseDistr.calcLogP(newParam) + modelBaseDistr.calcLogP(newModel) + freqsBaseDistr.calcLogP(newFreqs); if (-logqSplit > Double.NEGATIVE_INFINITY) { paramList = paramListInput.get(this); modelList = modelListInput.get(this); freqsList = freqsListInput.get(this); paramPointers = paramPointersInput.get(this); modelPointers = modelPointersInput.get(this); freqsPointers = freqsPointersInput.get(this); // Perform a split paramList.splitParameter(clusterIndex, newParam); modelList.splitParameter(clusterIndex, newModel); freqsList.splitParameter(clusterIndex, newFreqs); // Form a new cluster with index 1 paramPointers.point(index1, newParam); modelPointers.point(index1, newModel); freqsPointers.point(index1, newFreqs); for (int i = 0; i < (cluster1Count - 1); i++) { paramPointers.point(newAssignment[i], newParam); modelPointers.point(newAssignment[i], newModel); freqsPointers.point(newAssignment[i], newFreqs); } } return -logqSplit; } catch (Exception e) { // freqsBaseDistr.printDetails(); throw new RuntimeException(e); } }
public double merge( int index1, int index2, int clusterIndex1, int clusterIndex2, int[] cluster1Sites, int[] cluster2Sites) { double logqMerge = 0.0; HashMap<Integer, Integer> siteMap = new HashMap<Integer, Integer>(); // The value of the merged cluster will have that of cluster 2 before the merge. QuietRealParameter mergedParam = paramList.getParameter(clusterIndex2); QuietRealParameter mergedModel = modelList.getParameter(clusterIndex2); QuietRealParameter mergedFreqs = freqsList.getParameter(clusterIndex2); QuietRealParameter mergedRates = ratesList.getParameter(clusterIndex2); // Create a vector that combines the site indices of the two clusters int[] mergedClusterSites = new int[cluster1Sites.length + cluster2Sites.length - 2]; int k = 0; for (int i = 0; i < cluster1Sites.length; i++) { // Point every member in cluster 1 to cluster 2 // paramPointers.point(cluster1Sites[i],mergedParam); // modelPointers.point(cluster1Sites[i],mergedModel); // freqsPointers.point(cluster1Sites[i],mergedFreqs); // ratesPointers.point(cluster1Sites[i],mergedRates); if (cluster1Sites[i] != index1) { // For all members that are not index 1, // record the cluster in which they have been before the merge, // and assign them to the combined vector. siteMap.put(cluster1Sites[i], clusterIndex1); mergedClusterSites[k++] = cluster1Sites[i]; } } for (int i = 0; i < cluster2Sites.length; i++) { // All members in cluster 2 remains in cluster2 so no new pointer assignments if (cluster2Sites[i] != index2) { // For all members that are not index 2, // record the cluster in which they have been before the merge, // and assign them to the combined vector. siteMap.put(cluster2Sites[i], clusterIndex2); mergedClusterSites[k++] = cluster2Sites[i]; } } try { // Create a weight vector of patterns to inform the temporary tree likelihood // which set of pattern likelihoods are to be computed. // int[] tempWeights = dpTreeLikelihood.getClusterWeights(clusterIndex1); /*int[] tempWeights = new int[tempLikelihood.m_data.get().getPatternCount()]; for(int i = 0; i < cluster1Sites.length; i++){ int patIndex = tempLikelihood.m_data.get().getPatternIndex(cluster1Sites[i]); tempWeights[patIndex] = 1; } tempLikelihood.setPatternWeights(tempWeights); double[] cluster1SitesCluster2ParamLogLik = tempLikelihood.calculateLogP( mergedParam, mergedModel, mergedFreqs, mergedRates, cluster1Sites, index1 ); */ k = 0; int[] sCluster1Sites = new int[cluster1Sites.length - 1]; for (int i = 0; i < cluster1Sites.length; i++) { if (cluster1Sites[i] != index1) { sCluster1Sites[k++] = cluster1Sites[i]; } } tempLikelihood.setupPatternWeightsFromSites(sCluster1Sites); double[] cluster1SitesCluster2ParamLogLik = tempLikelihood.calculateLogP( mergedParam, mergedModel, mergedFreqs, mergedRates, sCluster1Sites); // tempWeights = dpTreeLikelihood.getClusterWeights(clusterIndex2); /*tempWeights = new int[tempLikelihood.m_data.get().getPatternCount()]; for(int i = 0; i < cluster2Sites.length; i++){ int patIndex = tempLikelihood.m_data.get().getPatternIndex(cluster2Sites[i]); tempWeights[patIndex] = 1; } tempLikelihood.setPatternWeights(tempWeights); RealParameter removedParam = paramList.getParameter(clusterIndex1); RealParameter removedModel = modelList.getParameter(clusterIndex1); RealParameter removedFreqs = freqsList.getParameter(clusterIndex1); RealParameter removedRates = ratesList.getParameter(clusterIndex1); double[] cluster2SitesCluster1ParamLogLik = tempLikelihood.calculateLogP( removedParam, removedModel, removedFreqs, removedRates, cluster2Sites, index2 );*/ k = 0; int[] sCluster2Sites = new int[cluster2Sites.length - 1]; for (int i = 0; i < cluster2Sites.length; i++) { if (cluster2Sites[i] != index2) { sCluster2Sites[k++] = cluster2Sites[i]; } } tempLikelihood.setupPatternWeightsFromSites(sCluster2Sites); RealParameter removedParam = paramList.getParameter(clusterIndex1); RealParameter removedModel = modelList.getParameter(clusterIndex1); RealParameter removedFreqs = freqsList.getParameter(clusterIndex1); RealParameter removedRates = ratesList.getParameter(clusterIndex1); double[] cluster2SitesCluster1ParamLogLik = tempLikelihood.calculateLogP( removedParam, removedModel, removedFreqs, removedRates, sCluster2Sites); // System.out.println("populate logLik1:"); double[] logLik1 = new double[mergedClusterSites.length]; for (int i = 0; i < (cluster1Sites.length - 1); i++) { // System.out.println(clusterIndex1+" "+mergedClusterSites[i]); logLik1[i] = dpTreeLikelihood.getSiteLogLikelihood(clusterIndex1, mergedClusterSites[i]); } System.arraycopy( cluster2SitesCluster1ParamLogLik, 0, logLik1, cluster1Sites.length - 1, cluster2SitesCluster1ParamLogLik.length); double[] logLik2 = new double[mergedClusterSites.length]; System.arraycopy( cluster1SitesCluster2ParamLogLik, 0, logLik2, 0, cluster1SitesCluster2ParamLogLik.length); // System.out.println("populate logLik2:"); for (int i = cluster1SitesCluster2ParamLogLik.length; i < logLik2.length; i++) { // System.out.println(clusterIndex2+" // "+mergedClusterSites[i-cluster1SitesCluster2ParamLogLik.length]); logLik2[i] = dpTreeLikelihood.getSiteLogLikelihood(clusterIndex2, mergedClusterSites[i]); } double[] lik1 = new double[logLik1.length]; double[] lik2 = new double[logLik2.length]; // scale it so it may be more accuate double minLog; /*for(int i = 0; i < logLik1.length; i++){ minLog = Math.min(logLik1[i],logLik2[i]); if(minLog == logLik1[i]){ lik1[i] = 1.0; lik2[i] = Math.exp(logLik2[i] - minLog); }else{ lik1[i] = Math.exp(logLik1[i] - minLog); lik2[i] = 1.0; } }*/ for (int i = 0; i < logLik1.length; i++) { lik1[i] = Math.exp(logLik1[i]); lik2[i] = Math.exp(logLik2[i]); // System.out.println(lik1[i]+" "+lik2[i]); } // Create a set of indices for random permutation int[] shuffle = new int[mergedClusterSites.length]; for (int i = 0; i < shuffle.length; i++) { shuffle[i] = i; } Randomizer.shuffle(shuffle); int cluster1Count = 1; int cluster2Count = 1; int cluster; double psi1, psi2, cluster1Prob; for (int i = 0; i < mergedClusterSites.length; i++) { cluster = siteMap.get(mergedClusterSites[shuffle[i]]); psi1 = cluster1Count * lik1[shuffle[i]]; psi2 = cluster2Count * lik2[shuffle[i]]; /*testCorrectness(i,cluster, clusterIndex1,clusterIndex2,shuffle, mergedClusterSites, lik1,lik2);*/ cluster1Prob = psi1 / (psi1 + psi2); // System.out.println(cluster1Prob); if (cluster == clusterIndex1) { logqMerge += Math.log(cluster1Prob); cluster1Count++; } else if (cluster == clusterIndex2) { logqMerge += Math.log(1 - cluster1Prob); cluster2Count++; } else { throw new RuntimeException("Something is wrong."); } } logqMerge += // paramBaseDistr.calcLogP(removedParam) mergeValue(removedParam, mergedParam, paramBaseDistr) // + modelBaseDistr.calcLogP(removedModel) + mergeDiscreteValue(removedModel, mergedModel, modelDistrInput.get()) + freqsBaseDistr.calcLogP(removedFreqs) // + ratesBaseDistr.calcLogP(removedRates); + mergeValueInLogSpace(removedRates, mergedRates, ratesBaseDistr); if (logqMerge > Double.NEGATIVE_INFINITY) { paramList.mergeParameter(clusterIndex1, clusterIndex2); modelList.mergeParameter(clusterIndex1, clusterIndex2); freqsList.mergeParameter(clusterIndex1, clusterIndex2); ratesList.mergeParameter(clusterIndex1, clusterIndex2); for (int i = 0; i < cluster1Sites.length; i++) { // Point every member in cluster 1 to cluster 2 paramPointers.point(cluster1Sites[i], mergedParam); modelPointers.point(cluster1Sites[i], mergedModel); freqsPointers.point(cluster1Sites[i], mergedFreqs); ratesPointers.point(cluster1Sites[i], mergedRates); } } } catch (Exception e) { throw new RuntimeException(e); } return logqMerge; }
/** * override this for proposals, * * @return log of Hastings Ratio, or Double.NEGATIVE_INFINITY if proposal should not be accepted * */ @Override public double proposal() { testing = isTestInput.get(); speciesTreeNodes = speciesTree.getNodesAsArray(); nLeafNodes = speciesTree.getLeafNodeCount(); nInternalNodes = speciesTree.getInternalNodeCount(); nSpeciesNodes = speciesTree.getNodeCount(); boolean isNarrow = isNarrowInput.get(); double logHastingsRatio = 0.0; if (isNarrow) { // only proceed to rearrange gene trees if the species tree can be changed // doesn't execute if testing if (!testing && !pickNarrow()) return Double.NEGATIVE_INFINITY; int validGP = 0; for (int i = nLeafNodes; i < nSpeciesNodes; ++i) { validGP += isg(speciesTree.getNode(i)); } final int c2 = sisg(yNode) + sisg(cNode); fillNodes(); // fills in movedNodes and graftNodes pruneAndRegraft(yNode, cNode, bNode); final int validGPafter = validGP - c2 + sisg(yNode) + sisg(cNode); logHastingsRatio += Math.log(validGP) - Math.log(validGPafter); } else { // only proceed to rearrange gene trees if the species tree can be changed // doesn't execute if testing if (!testing && !pickWide()) return Double.NEGATIVE_INFINITY; fillNodes(); // fills in movedNodes and graftNodes pruneAndRegraft(yNode, cNode, bNode); } for (final Tree geneTree : geneTreeInput.get()) geneTree.startEditing(null); // hack to stop beast.core.State.Trie memory leak for (int i = 0; i < czBranchCount; i++) { final List<SortedMap<Node, Node>> perBranchMovedNodes = movedNodes.get(i); final SetMultimap<Integer, Node> perBranchGraftNodes = graftNodes.get(i); final double logForward = rearrangeGeneTrees(perBranchMovedNodes, perBranchGraftNodes, true); assert logForward != Double.NEGATIVE_INFINITY; if (logForward == Double.NEGATIVE_INFINITY) return Double.NEGATIVE_INFINITY; else logHastingsRatio += logForward; } // compute reverse move (Hastings ratio denominator) final Node bNodeTmp = bNode; final Node cNodeTmp = cNode; bNode = cNodeTmp; cNode = bNodeTmp; fillNodes(); // fills in movedNodes and graftNodes for reverse move for (int i = 0; i < czBranchCount; i++) { final List<SortedMap<Node, Node>> perBranchMovedNodes = movedNodes.get(i); final SetMultimap<Integer, Node> perBranchGraftNodes = graftNodes.get(i); final double logReverse = rearrangeGeneTrees(perBranchMovedNodes, perBranchGraftNodes, false); assert logReverse != Double.NEGATIVE_INFINITY; if (logReverse == Double.NEGATIVE_INFINITY) return Double.NEGATIVE_INFINITY; else logHastingsRatio -= logReverse; } return logHastingsRatio; }
public Partition hasPartition() { return bHasPartitionsInput.get(); }
/** more elegant getters for resolving Input values* */ public String getName() { return sNameInput.get(); }
@Override public double getArrayValue(int iDim) { return treeInput.get().getDirectAncestorNodeCount(); }
@Override public void log(int nSample, PrintStream out) { final Tree tree = treeInput.get(); out.print(tree.getDirectAncestorNodeCount() + "\t"); }
/** Ensure the class behaves properly, even when inputs are not specified. */ @Override public void initAndValidate() throws Exception { boolean sortNodesAlphabetically = false; if (dataInput.get() != null) { labels = dataInput.get().getTaxaNames(); } else if (m_taxonset.get() != null) { if (labels == null) { labels = m_taxonset.get().asStringList(); } else { // else labels were set by TreeParser c'tor sortNodesAlphabetically = true; } } else { if (isLabelledNewickInput.get()) { if (m_initial.get() != null) { labels = m_initial.get().getTaxonset().asStringList(); } else { labels = new ArrayList<>(); createUnrecognizedTaxa = true; sortNodesAlphabetically = true; } } else { if (m_initial.get() != null) { // try to pick up taxa from initial tree final Tree tree = m_initial.get(); if (tree.m_taxonset.get() != null) { labels = tree.m_taxonset.get().asStringList(); } else { // m_sLabels = null; } } else { // m_sLabels = null; } } // m_bIsLabelledNewick = false; } final String newick = newickInput.get(); if (newick == null || newick.equals("")) { // can happen while initalising Beauti final Node dummy = new Node(); setRoot(dummy); } else { try { setRoot(parseNewick(newickInput.get())); } catch (ParseCancellationException e) { throw new RuntimeException( "TreeParser cannot make sense of the Newick string " + "provided. It gives the following clue:\n" + e.getMessage()); } } super.initAndValidate(); if (sortNodesAlphabetically) { // correct for node ordering: ensure order is alphabetical for (int i = 0; i < getNodeCount() && i < labels.size(); i++) { m_nodes[i].setID(labels.get(i)); } Node[] nodes = new Node[labels.size()]; System.arraycopy(m_nodes, 0, nodes, 0, labels.size()); Arrays.sort(nodes, (o1, o2) -> o1.getID().compareTo(o2.getID())); for (int i = 0; i < labels.size(); i++) { m_nodes[i] = nodes[i]; nodes[i].setNr(i); } } if (m_initial.get() != null) processTraits(m_initial.get().m_traitList.get()); else processTraits(m_traitList.get()); if (timeTraitSet != null) { adjustTreeNodeHeights(root); } else if (adjustTipHeightsInput.get()) { double treeLength = TreeUtils.getTreeLength(this, getRoot()); double extraTreeLength = 0.0; double maxTipHeight = 0.0; // all nodes should be at zero height if no date-trait is available for (int i = 0; i < getLeafNodeCount(); i++) { double height = getNode(i).getHeight(); if (maxTipHeight < height) { maxTipHeight = height; } extraTreeLength += height; getNode(i).setHeight(0); } double scaleFactor = (treeLength + extraTreeLength) / treeLength; final double SCALE_FACTOR_THRESHOLD = 0.001; // if the change in total tree length is more than 0.1% then give the user a warning! if (scaleFactor > 1.0 + SCALE_FACTOR_THRESHOLD) { DecimalFormat format = new DecimalFormat("#.##"); Log.info.println( "WARNING: Adjust tip heights attribute set to 'true' in " + getClass().getSimpleName()); Log.info.println( " has resulted in significant (>" + format.format(SCALE_FACTOR_THRESHOLD * 100.0) + "%) change in tree length."); Log.info.println( " Use " + adjustTipHeightsInput.getName() + "='false' to override this default."); Log.info.printf(" original max tip age = %8.3f\n", maxTipHeight); Log.info.printf(" new max tip age = %8.3f\n", 0.0); Log.info.printf(" original tree length = %8.3f\n", treeLength); Log.info.printf(" new tree length = %8.3f\n", treeLength + extraTreeLength); Log.info.printf(" TL scale factor = %8.3f\n", scaleFactor); } } if (m_taxonset.get() == null && labels != null && isLabelledNewickInput.get()) { m_taxonset.setValue(new TaxonSet(Taxon.createTaxonList(labels)), this); } initStateNodes(); } // init
@Override public void initAndValidate() { threadCount = BeastMCMC.m_nThreads; if (maxNrOfThreadsInput.get() > 0) { threadCount = Math.min(maxNrOfThreadsInput.get(), BeastMCMC.m_nThreads); } String instanceCount = System.getProperty("beast.instance.count"); if (instanceCount != null && instanceCount.length() > 0) { threadCount = Integer.parseInt(instanceCount); } logPByThread = new double[threadCount]; // sanity check: alignment should have same #taxa as tree if (dataInput.get().getTaxonCount() != treeInput.get().getLeafNodeCount()) { throw new IllegalArgumentException( "The number of nodes in the tree does not match the number of sequences"); } treelikelihood = new TreeLikelihood[threadCount]; if (dataInput.get().isAscertained) { Log.warning.println( "Note, can only use single thread per alignment because the alignment is ascertained"); threadCount = 1; } if (threadCount <= 1) { treelikelihood[0] = new TreeLikelihood(); treelikelihood[0].setID(getID() + "0"); treelikelihood[0].initByName( "data", dataInput.get(), "tree", treeInput.get(), "siteModel", siteModelInput.get(), "branchRateModel", branchRateModelInput.get(), "useAmbiguities", useAmbiguitiesInput.get(), "scaling", scalingInput.get() + ""); treelikelihood[0].getOutputs().add(this); likelihoodsInput.get().add(treelikelihood[0]); } else { pool = Executors.newFixedThreadPool(threadCount); calcPatternPoints(dataInput.get().getSiteCount()); for (int i = 0; i < threadCount; i++) { Alignment data = dataInput.get(); String filterSpec = (patternPoints[i] + 1) + "-" + (patternPoints[i + 1]); if (data.isAscertained) { filterSpec += data.excludefromInput.get() + "-" + data.excludetoInput.get() + "," + filterSpec; } treelikelihood[i] = new TreeLikelihood(); treelikelihood[i].setID(getID() + i); treelikelihood[i].getOutputs().add(this); likelihoodsInput.get().add(treelikelihood[i]); FilteredAlignment filter = new FilteredAlignment(); if (i == 0 && dataInput.get() instanceof FilteredAlignment && ((FilteredAlignment) dataInput.get()).constantSiteWeightsInput.get() != null) { filter.initByName( "data", dataInput.get() /*, "userDataType", m_data.get().getDataType()*/, "filter", filterSpec, "constantSiteWeights", ((FilteredAlignment) dataInput.get()).constantSiteWeightsInput.get()); } else { filter.initByName( "data", dataInput.get() /*, "userDataType", m_data.get().getDataType()*/, "filter", filterSpec); } treelikelihood[i].initByName( "data", filter, "tree", treeInput.get(), "siteModel", duplicate((BEASTInterface) siteModelInput.get(), i), "branchRateModel", duplicate(branchRateModelInput.get(), i), "useAmbiguities", useAmbiguitiesInput.get(), "scaling", scalingInput.get() + ""); likelihoodCallers.add(new TreeLikelihoodCaller(treelikelihood[i], i)); } } }
@Override public int getSampleCount() { // Assumes a binary tree! return treeInput.get().getInternalNodeCount(); }
public String getTipText() { return sTipTextInput.get(); }
public InputEditor.ExpandOption forceExpansion() { return forceExpansionInput.get(); }
/** Recalculates all the intervals for the given beast.tree. */ @SuppressWarnings("unchecked") protected void calculateIntervals() { Tree tree = treeInput.get(); final int nodeCount = tree.getNodeCount(); times = new double[nodeCount]; int[] childCounts = new int[nodeCount]; collectTimes(tree, times, childCounts); indices = new int[nodeCount]; HeapSort.sort(times, indices); if (intervals == null || intervals.length != nodeCount) { intervals = new double[nodeCount]; lineageCounts = new int[nodeCount]; lineagesAdded = new List[nodeCount]; lineagesRemoved = new List[nodeCount]; // lineages = new List[nodeCount]; storedIntervals = new double[nodeCount]; storedLineageCounts = new int[nodeCount]; } else { for (List<Node> l : lineagesAdded) { if (l != null) { l.clear(); } } for (List<Node> l : lineagesRemoved) { if (l != null) { l.clear(); } } } // start is the time of the first tip double start = times[indices[0]]; int numLines = 0; int nodeNo = 0; intervalCount = 0; while (nodeNo < nodeCount) { int lineagesRemoved = 0; int lineagesAdded = 0; double finish = times[indices[nodeNo]]; double next; do { final int childIndex = indices[nodeNo]; final int childCount = childCounts[childIndex]; // don't use nodeNo from here on in do loop nodeNo += 1; if (childCount == 0) { addLineage(intervalCount, tree.getNode(childIndex)); lineagesAdded += 1; } else { lineagesRemoved += (childCount - 1); // record removed lineages final Node parent = tree.getNode(childIndex); // assert childCounts[indices[nodeNo]] == beast.tree.getChildCount(parent); // for (int j = 0; j < lineagesRemoved + 1; j++) { for (int j = 0; j < childCount; j++) { Node child = j == 0 ? parent.getLeft() : parent.getRight(); removeLineage(intervalCount, child); } // record added lineages addLineage(intervalCount, parent); // no mix of removed lineages when 0 th if (multifurcationLimit == 0.0) { break; } } if (nodeNo < nodeCount) { next = times[indices[nodeNo]]; } else break; } while (Math.abs(next - finish) <= multifurcationLimit); if (lineagesAdded > 0) { if (intervalCount > 0 || ((finish - start) > multifurcationLimit)) { intervals[intervalCount] = finish - start; lineageCounts[intervalCount] = numLines; intervalCount += 1; } start = finish; } // add sample event numLines += lineagesAdded; if (lineagesRemoved > 0) { intervals[intervalCount] = finish - start; lineageCounts[intervalCount] = numLines; intervalCount += 1; start = finish; } // coalescent event numLines -= lineagesRemoved; } intervalsKnown = true; }
/** Find the input associated with this panel based on the path Input. */ @SuppressWarnings("unchecked") public Input<?> resolveInput(BeautiDoc doc, int iPartition) { try { // if (parentPlugins != null && parentPlugins.size() > 0 && _input != null) // System.err.println("sync " + parentPlugins.get(iPartition) + "[?] = " + // _input.get()); List<BEASTInterface> plugins; if (bHasPartitionsInput.get() == Partition.none) { plugins = new ArrayList<BEASTInterface>(); plugins.add(doc.mcmc.get()); } else { plugins = doc.getPartitions(bHasPartitionsInput.get().toString()); } parentPlugins = new ArrayList<BEASTInterface>(); parentInputs = new ArrayList<Input<?>>(); parentPlugins.add(doc); parentInputs.add(doc.mcmc); type = doc.mcmc.getType(); bIsList = false; for (int i = 0; i < sPathComponents.length; i++) { List<BEASTInterface> oldPlugins = plugins; plugins = new ArrayList<BEASTInterface>(); parentPlugins = new ArrayList<BEASTInterface>(); parentInputs = new ArrayList<Input<?>>(); for (BEASTInterface plugin : oldPlugins) { Input<?> namedInput = plugin.getInput(sPathComponents[i]); type = namedInput.getType(); if (namedInput.get() instanceof List<?>) { bIsList = true; List<?> list = (List<?>) namedInput.get(); if (sConditionalAttribute[i] == null) { for (Object o : list) { BEASTInterface plugin2 = (BEASTInterface) o; plugins.add(plugin2); parentPlugins.add(plugin); parentInputs.add(namedInput); } // throw new Exception ("Don't know which element to pick from the list. List // component should come with a condition. " + m_sPathComponents[i]); } else { int nMatches = 0; for (int j = 0; j < list.size(); j++) { BEASTInterface plugin2 = (BEASTInterface) list.get(j); if (matches(plugin2, sConditionalAttribute[i], sConditionalValue[i])) { plugins.add(plugin2); parentPlugins.add(plugin); parentInputs.add(namedInput); nMatches++; break; } } if (nMatches == 0) { parentInputs.add(namedInput); parentPlugins.add(plugin); } } } else if (namedInput.get() instanceof BEASTInterface) { bIsList = false; if (sConditionalAttribute[i] == null) { plugins.add((BEASTInterface) namedInput.get()); parentPlugins.add(plugin); parentInputs.add(namedInput); } else { if (matches(plugin, sConditionalAttribute[i], sConditionalValue[i])) { // if ((m_sConditionalAttribute[i].equals("id") && // plugin.getID().equals(m_sConditionalValue[i])) || // (m_sConditionalAttribute[i].equals("type") && // plugin.getClass().getName().equals(m_sConditionalValue[i]))) { plugins.add(plugin); parentPlugins.add(plugin); parentInputs.add(namedInput); } } } else { throw new Exception("input " + sPathComponents[i] + " is not a plugin or list"); } } } if (sTypeInput.get() != null) { type = Class.forName(sTypeInput.get()); } // sanity check if (!bIsList && (bHasPartitionsInput.get() == Partition.none) && plugins.size() > 1) { System.err.println("WARNING: multiple plugins match, but hasPartitions=none"); // this makes sure that all mathing plugins are available in one go bIsList = true; // this suppresses syncing parentInputs.clear(); } inputs.clear(); startInputs.clear(); for (BEASTInterface plugin : plugins) { inputs.add(plugin); startInputs.add(plugin); } if (!bIsList) { _input = new FlexibleInput<BEASTInterface>(); } else { _input = new FlexibleInput<ArrayList<BEASTInterface>>(new ArrayList<BEASTInterface>()); } _input.setRule(Validate.REQUIRED); syncTo(iPartition); // if (parentPlugins != null && parentPlugins.size() > 0) // System.err.println("sync " + parentPlugins.get(iPartition) + "[?] = " + // _input.get()); if (bIsList) { checkForDups((List<Object>) _input.get()); } return _input; } catch (Exception e) { System.err.println( "Warning: could not find objects in path " + Arrays.toString(sPathComponents)); } return null; } // resolveInputs
public String getIcon() { return sIconInput.get(); }
public double split(int index1, int index2, int clusterIndex, int[] initClusterSites) { try { double logqSplit = 0.0; // Create a parameter by sampling from the prior // QuietRealParameter newParam = getSample(paramBaseDistr, paramList.getUpper(), // paramList.getLower()); QuietRealParameter newParam = new QuietRealParameter(new Double[5]); // logqSplit += proposeNewValue(newParam, paramBaseDistr, paramList.getUpper(), // paramList.getLower()); double[] oldParamValues = new double[5]; for (int i = 0; i < oldParamValues.length; i++) { oldParamValues[i] = paramList.getValue(clusterIndex, i); } logqSplit += proposeNewValue2( newParam, oldParamValues, paramBaseDistr, paramList.getUpper(), paramList.getLower()); // QuietRealParameter newModel = getSample(modelBaseDistr, modelList.getUpper(), // modelList.getLower()); QuietRealParameter newModel = new QuietRealParameter(new Double[1]); logqSplit += proposeDiscreteValue( newModel, modelList.getValue(clusterIndex, 0), modelDistrInput.get(), modelList.getUpper(), modelList.getLower()); QuietRealParameter newFreqs = getSample(freqsBaseDistr, freqsList.getUpper(), freqsList.getLower()); // QuietRealParameter newRates = getSample(ratesBaseDistr, ratesList.getUpper(), // ratesList.getLower()); QuietRealParameter newRates = new QuietRealParameter(new Double[1]); logqSplit += proposalValueInLogSpace( newRates, ratesList.getValue(clusterIndex, 0), ratesBaseDistr, ratesList.getUpper(), ratesList.getLower()); // Remove the index 1 and index 2 from the cluster int[] clusterSites = new int[initClusterSites.length - 2]; int k = 0; for (int i = 0; i < initClusterSites.length; i++) { if (initClusterSites[i] != index1 && initClusterSites[i] != index2) { clusterSites[k++] = initClusterSites[i]; } } // Form a new cluster with index 1 paramPointers.point(index1, newParam); modelPointers.point(index1, newModel); freqsPointers.point(index1, newFreqs); ratesPointers.point(index1, newRates); // Shuffle the cluster_-{index_1,index_2} to obtain a random permutation Randomizer.shuffle(clusterSites); // Create the weight vector of site patterns according to the order of the shuffled index. /*int[] tempWeights = new int[tempLikelihood.m_data.get().getPatternCount()]; int patIndex; for(int i = 0; i < clusterSites.length; i++){ patIndex = tempLikelihood.m_data.get().getPatternIndex(clusterSites[i]); tempWeights[patIndex] = 1; } tempLikelihood.setPatternWeights(tempWeights);*/ tempLikelihood.setupPatternWeightsFromSites(clusterSites); // Site log likelihoods in the order of the shuffled sites double[] logLik1 = tempLikelihood.calculateLogP(newParam, newModel, newFreqs, newRates, clusterSites); double[] logLik2 = new double[clusterSites.length]; for (int i = 0; i < logLik2.length; i++) { logLik2[i] = dpTreeLikelihood.getSiteLogLikelihood(clusterIndex, clusterSites[i]); } double[] lik1 = new double[logLik1.length]; double[] lik2 = new double[logLik2.length]; double minLog; // scale it so it may be more accurate /*for(int i = 0; i < logLik1.length; i++){ minLog = Math.min(logLik1[i],logLik2[i]); if(minLog == logLik1[i]){ lik1[i] = 1.0; lik2[i] = Math.exp(logLik2[i] - minLog); }else{ lik1[i] = Math.exp(logLik1[i] - minLog); lik2[i] = 1.0; } }*/ for (int i = 0; i < logLik1.length; i++) { lik1[i] = Math.exp(logLik1[i]); lik2[i] = Math.exp(logLik2[i]); // System.out.println(lik1[i]+" "+lik2[i]); } /*for(int i = 0; i < clusterSites.length;i++){ System.out.println("clusterSites: "+clusterSites[i]); } System.out.println("index 1: "+index1+" index2: "+index2);*/ int cluster1Count = 1; int cluster2Count = 1; int[] sitesInCluster1 = new int[initClusterSites.length]; sitesInCluster1[0] = index1; // Assign members of the existing cluster (except for indice 1 and 2) randomly // to the existing and the new cluster double psi1, psi2, newClusterProb, draw; for (int i = 0; i < clusterSites.length; i++) { psi1 = cluster1Count * lik1[i]; psi2 = cluster2Count * lik2[i]; newClusterProb = psi1 / (psi1 + psi2); draw = Randomizer.nextDouble(); if (draw < newClusterProb) { // System.out.println("in new cluster: "+clusterSites[i]); sitesInCluster1[cluster1Count] = clusterSites[i]; // paramPointers.point(clusterSites[i],newParam); // modelPointers.point(clusterSites[i],newModel); // freqsPointers.point(clusterSites[i],newFreqs); // ratesPointers.point(clusterSites[i],newRates); logqSplit += Math.log(newClusterProb); cluster1Count++; } else { logqSplit += Math.log(1.0 - newClusterProb); cluster2Count++; } } // logqSplit += paramBaseDistr.calcLogP(newParam) logqSplit += // modelBaseDistr.calcLogP(newModel) + freqsBaseDistr.calcLogP(newFreqs) // + ratesBaseDistr.calcLogP(newRates) ; // Perform a split paramList = paramListInput.get(this); modelList = modelListInput.get(this); freqsList = freqsListInput.get(this); ratesList = ratesListInput.get(this); paramPointers = paramPointersInput.get(this); modelPointers = modelPointersInput.get(this); freqsPointers = freqsPointersInput.get(this); ratesPointers = ratesPointersInput.get(this); paramList.splitParameter(clusterIndex, newParam); modelList.splitParameter(clusterIndex, newModel); freqsList.splitParameter(clusterIndex, newFreqs); ratesList.splitParameter(clusterIndex, newRates); // Form a new cluster with index 1 paramPointers = paramPointersInput.get(this); modelPointers = modelPointersInput.get(this); freqsPointers = freqsPointersInput.get(this); ratesPointers = ratesPointersInput.get(this); for (int i = 0; i < cluster1Count; i++) { paramPointers.point(sitesInCluster1[i], newParam); modelPointers.point(sitesInCluster1[i], newModel); freqsPointers.point(sitesInCluster1[i], newFreqs); ratesPointers.point(sitesInCluster1[i], newRates); } return -logqSplit; } catch (Exception e) { throw new RuntimeException(e); } }