@SuppressWarnings("unchecked") public void sync(int iPartition) { if (parentInputs.size() > 0 && _input.get() != null) { Input<?> input = parentInputs.get(iPartition); if (bIsList) { List<Object> list = (List<Object>) _input.get(); List<Object> targetList = ((List<Object>) input.get()); // targetList.clear(); // only clear former members for (BEASTInterface plugin : startInputs) { targetList.remove(plugin); } targetList.addAll(list); // sync outputs of items in list for (Object o : list) { if (o instanceof BEASTInterface) { ((BEASTInterface) o).getOutputs().add(parentPlugins.get(iPartition)); } } } else { try { // System.err.println("sync " + parentPlugins.get(iPartition) + "[" + input.getName() + "] // = " + _input.get()); input.setValue(_input.get(), parentPlugins.get(iPartition)); } catch (Exception e) { e.printStackTrace(); } } } }
@Override public void initAndValidate() { scaleFactor = scaleFactorInput.get(); // determine taxon set to choose from if (taxonsetInput.get() != null) { List<String> taxaNames = new ArrayList<>(); for (String taxon : treeInput.get().getTaxaNames()) { taxaNames.add(taxon); } List<String> set = taxonsetInput.get().asStringList(); int nrOfTaxa = set.size(); taxonIndices = new int[nrOfTaxa]; int k = 0; for (String taxon : set) { int taxonIndex = taxaNames.indexOf(taxon); if (taxonIndex < 0) { throw new IllegalArgumentException("Cannot find taxon " + taxon + " in tree"); } taxonIndices[k++] = taxonIndex; } } else { taxonIndices = new int[treeInput.get().getTaxaNames().length]; for (int i = 0; i < taxonIndices.length; i++) { taxonIndices[i] = i; } } }
public void initAndValidate() { testCorrect = testCorrectInput.get(); paramList = paramListInput.get(); modelList = modelListInput.get(); freqsList = freqsListInput.get(); paramPointers = paramPointersInput.get(); modelPointers = modelPointersInput.get(); freqsPointers = freqsPointersInput.get(); pointerCount = paramPointers.getDimension(); dp = dpInput.get(); List<ParametricDistribution> distrs = dp.getBaseDistributions(); paramBaseDistr = distrs.get(0); modelBaseDistr = distrs.get(1); freqsBaseDistr = distrs.get(2); tempLikelihood = tempLikelihoodInput.get(); dpTreeLikelihood = dpTreeLikelihoodInput.get(); modelNetworkMap.put(1.0, new double[] {3.0}); modelNetworkMap.put(2.0, new double[] {3.0}); modelNetworkMap.put(3.0, new double[] {1.0, 2.0, 4.0}); modelNetworkMap.put(4.0, new double[] {3.0, 5.0}); modelNetworkMap.put(5.0, new double[] {4.0}); // System.out.println("is null? "+(modelNetworkMap.get(5.0) == null)); }
private void calcPatternPoints(int nPatterns) { patternPoints = new int[threadCount + 1]; if (proportionsInput.get() == null) { int range = nPatterns / threadCount; for (int i = 0; i < threadCount - 1; i++) { patternPoints[i + 1] = range * (i + 1); } patternPoints[threadCount] = nPatterns; } else { String[] strs = proportionsInput.get().split("\\s+"); double[] proportions = new double[threadCount]; for (int i = 0; i < threadCount; i++) { proportions[i] = Double.parseDouble(strs[i % strs.length]); } // normalise double sum = 0; for (double d : proportions) { sum += d; } for (int i = 0; i < threadCount; i++) { proportions[i] /= sum; } // cummulative for (int i = 1; i < threadCount; i++) { proportions[i] += proportions[i - 1]; } // calc ranges for (int i = 0; i < threadCount; i++) { patternPoints[i + 1] = (int) (proportions[i] * nPatterns + 0.5); } } }
/** * Calculate probability of choosing region affected by the given conversion under the * ClonalOrigin model. * * @param conv conversion region is associated with * @return log probability density */ public double getAffectedRegionProb(Conversion conv) { double logP = 0.0; // Total effective number of possible start sites double alpha = acg.getTotalSequenceLength() + acg.getLoci().size() * deltaInput.get().getValue(); // Calculate probability of converted region. if (conv.getStartSite() == 0) logP += Math.log((deltaInput.get().getValue() + 1) / alpha); else logP += Math.log(1.0 / alpha); // Probability of end site: double probEnd = Math.pow(1.0 - 1.0 / deltaInput.get().getValue(), conv.getEndSite() - conv.getStartSite()) / deltaInput.get().getValue(); // Include probability of going past the end: if (conv.getEndSite() == conv.getLocus().getSiteCount() - 1) probEnd += Math.pow( 1.0 - 1.0 / deltaInput.get().getValue(), conv.getLocus().getSiteCount() - conv.getStartSite()); logP += Math.log(probEnd); return logP; }
/** * Choose region to be affected by this conversion. * * @param conv Conversion object where these sites are stored. * @return log probability density of chosen attachment. */ public double drawAffectedRegion(Conversion conv) { double logP = 0.0; // Total effective number of possible start sites double alpha = acg.getTotalSequenceLength() + acg.getLoci().size() * deltaInput.get().getValue(); // Draw location of converted region. int startSite = -1; int endSite; Locus locus = null; double u = Randomizer.nextDouble() * alpha; for (Locus thisLocus : acg.getLoci()) { if (u < deltaInput.get().getValue() + thisLocus.getSiteCount()) { locus = thisLocus; if (u < deltaInput.get().getValue()) { startSite = 0; logP += Math.log(deltaInput.get().getValue() / alpha); } else { startSite = (int) (u - deltaInput.get().getValue()); logP += Math.log(1.0 / alpha); } break; } u -= deltaInput.get().getValue() + thisLocus.getSiteCount(); } if (locus == null) throw new IllegalStateException( "Programmer error: " + "loop in drawAffectedRegion() fell through."); endSite = startSite + (int) Randomizer.nextGeometric(1.0 / deltaInput.get().getValue()); endSite = Math.min(endSite, locus.getSiteCount() - 1); // Probability of end site: double probEnd = Math.pow(1.0 - 1.0 / deltaInput.get().getValue(), endSite - startSite) / deltaInput.get().getValue(); // Include probability of going past the end: if (endSite == locus.getSiteCount() - 1) probEnd += Math.pow(1.0 - 1.0 / deltaInput.get().getValue(), locus.getSiteCount() - startSite); logP += Math.log(probEnd); conv.setLocus(locus); conv.setStartSite(startSite); conv.setEndSite(endSite); return logP; }
@Override public void initAndValidate() { meanRate = meanRateInput.get(); speciesTreeRatesX = speciesTreeRatesInput.get(); geneTree = geneTreeInput.get(); geneNodeCount = geneTree.getNodeCount(); branchRates = new double[geneNodeCount]; storedBranchRates = new double[geneNodeCount]; needsUpdate = true; }
/** * Evaluate rate multiplier expression for the given variable values. * * @param scalarVarNames Names of scalar variables in expression * @param scalarVarVals Values of scalar variables in expression * @param vectorVarNames Names of vector variables in expression * @param vectorVarVals Values of vector variables in expression * @param functions * @return result of evaluating the expression */ public double evaluate( List<String> scalarVarNames, int[] scalarVarVals, List<String> vectorVarNames, List<Double[]> vectorVarVals, Map<String, Function> functions) { if (visitor == null) { // Parse predicate expression ANTLRInputStream input = new ANTLRInputStream(expInput.get()); MASTERGrammarLexer lexer = new MASTERGrammarLexer(input); CommonTokenStream tokens = new CommonTokenStream(lexer); MASTERGrammarParser parser = new MASTERGrammarParser(tokens); ParseTree parseTree = parser.expression(); visitor = new ExpressionEvaluator(parseTree, scalarVarNames, functions); } for (int i = 0; i < vectorVarNames.size(); i++) visitor.setVectorVar(vectorVarNames.get(i), vectorVarVals.get(i)); Double[] res = visitor.evaluate(scalarVarVals); if (res.length != 1) { throw new IllegalArgumentException("Reaction rate multiplier must be scalar!"); } return res[0]; }
/** convert plain text string to JSON string, replacing some entities * */ private String normalise(Input<?> input, String str) { str = str.replaceAll("\\\\", "\\\\\\\\"); str = str.replaceAll("/", "\\\\/"); str = str.replaceAll("\b", "\\\\b"); str = str.replaceAll("\f", "\\\\f"); str = str.replaceAll("\t", "\\\\t"); str = str.replaceAll("\\r", "\\\\r"); str = str.replaceAll("\"", "\\\\\""); str = str.replaceAll("\n", "\\\\n"); if (input != null && !input.getType().equals(Double.class) && !input.getType().equals(Integer.class) && !input.getType().equals(Boolean.class)) { str = "\"" + str + "\""; } return str; }
/** * @param newick a string representing a tree in newick format * @param adjustTipHeights true if the tip heights should be adjusted to 0 (i.e. contemporaneous) * after reading in tree. * @param allowSingleChildNodes true if internal nodes with single children are allowed * @param isLabeled true if nodes are labeled with taxa labels * @param offset if isLabeled == false and node labeling starts with x then offset should be x. * When isLabeled == true offset should be 1 as by default. * @throws Exception */ public TreeParser( final String newick, final boolean adjustTipHeights, final boolean allowSingleChildNodes, final boolean isLabeled, final int offset) throws Exception { newickInput.setValue(newick, this); isLabelledNewickInput.setValue(isLabeled, this); adjustTipHeightsInput.setValue(adjustTipHeights, this); allowSingleChildInput.setValue(allowSingleChildNodes, this); offsetInput.setValue(offset, this); initAndValidate(); }
@Override public void init(PrintStream out) throws Exception { final Tree tree = treeInput.get(); if (getID() == null || getID().matches("\\s*")) { out.print(tree.getID() + ".SAcount\t"); } else { out.print(getID() + "\t"); } }
/** * Create a tree from the given newick format * * @param taxaNames a list of taxa names to use, or null. If null then IsLabelledNewick will be * set to true * @param newick the newick of the tree * @param offset the offset to map node numbers in newick format to indices in taxaNames. so, * name(node with nodeNumber) = taxaNames[nodeNumber-offset] * @param adjustTipHeightsWhenMissingDateTraits true if tip heights should be adjusted to zero * @throws Exception */ public TreeParser( final List<String> taxaNames, final String newick, final int offset, final boolean adjustTipHeightsWhenMissingDateTraits) throws Exception { if (taxaNames == null) { isLabelledNewickInput.setValue(true, this); } else { m_taxonset.setValue(new TaxonSet(Taxon.createTaxonList(taxaNames)), this); } newickInput.setValue(newick, this); offsetInput.setValue(offset, this); adjustTipHeightsInput.setValue(adjustTipHeightsWhenMissingDateTraits, this); labels = taxaNames; initAndValidate(); }
@Override public void initAndValidate() { interpreter = new Interpreter(); NamedFunction.evalFunctionInputs(interpreter, functionInputs.get()); String script = valueInput.get(); try { interpreter.eval(script); } catch (EvalError e) { throw new RuntimeException(e); } }
/** * create new instance of src object, connecting all inputs from src object Note if input is a * SubstModel, it is duplicated as well. * * @param src object to be copied * @param i index used to extend ID with. * @return copy of src object */ private Object duplicate(BEASTInterface src, int i) { if (src == null) { return null; } BEASTInterface copy; try { copy = src.getClass().newInstance(); copy.setID(src.getID() + "_" + i); } catch (InstantiationException | IllegalAccessException e) { e.printStackTrace(); throw new RuntimeException( "Programmer error: every object in the model should have a default constructor that is publicly accessible: " + src.getClass().getName()); } for (Input<?> input : src.listInputs()) { if (input.get() != null) { if (input.get() instanceof List) { // handle lists // ((List)copy.getInput(input.getName())).clear(); for (Object o : (List<?>) input.get()) { if (o instanceof BEASTInterface) { // make sure it is not already in the list copy.setInputValue(input.getName(), o); } } } else if (input.get() instanceof SubstitutionModel) { // duplicate subst models BEASTInterface substModel = (BEASTInterface) duplicate((BEASTInterface) input.get(), i); copy.setInputValue(input.getName(), substModel); } else { // it is some other value copy.setInputValue(input.getName(), input.get()); } } } copy.initAndValidate(); return copy; }
public double proposal() { double logq = 0.0; // Pick two indcies at random int index1 = Randomizer.nextInt(pointerCount); int index2 = index1; while (index2 == index1) { index2 = Randomizer.nextInt(pointerCount); } int clusterIndex1 = paramPointers.indexInList(index1, paramList); int clusterIndex2 = paramPointers.indexInList(index2, paramList); // If the randomly draw sites are from the same cluster, perform a split-move. if (clusterIndex1 == clusterIndex2) { int[] clusterSites = dpValuableInput.get().getClusterSites(clusterIndex1); double temp = split(index1, index2, clusterIndex1, clusterSites); // System.out.println("split: "+temp); logq += temp; // System.out.println("split: "+temp); } else { // If the the two randomly drawn sites are not from the same cluster, perform a merge-move. int[] cluster1Sites = dpValuableInput.get().getClusterSites(clusterIndex1); int[] cluster2Sites = dpValuableInput.get().getClusterSites(clusterIndex2); // logq = merge(index1, index2,clusterIndex1,clusterIndex2,cluster1Sites,cluster2Sites); double temp = merge(index1, index2, clusterIndex1, clusterIndex2, cluster1Sites, cluster2Sites); // System.out.println("merge: "+temp); logq = temp; } return logq; }
@Override public void initAndValidate() throws Exception { sPathComponents = sPathInput.get().split("/"); if (sPathComponents[0].equals("")) { sPathComponents = new String[0]; } sConditionalAttribute = new String[sPathComponents.length]; sConditionalValue = new String[sPathComponents.length]; for (int i = 0; i < sPathComponents.length; i++) { int j = sPathComponents[i].indexOf('['); if (j >= 0) { String sConditionalComponents = sPathComponents[i].substring(j + 1, sPathComponents[i].lastIndexOf(']')); String[] sStrs = sConditionalComponents.split("="); sConditionalAttribute[i] = sStrs[0]; sConditionalValue[i] = sStrs[1].substring(1, sStrs[1].length() - 1); sPathComponents[i] = sPathComponents[i].substring(0, j); } } inputs = new ArrayList<BEASTInterface>(); startInputs = new ArrayList<BEASTInterface>(); BEASTObjectPanel.getID(this); }
@Override public void initAndValidate() { threadCount = BeastMCMC.m_nThreads; if (maxNrOfThreadsInput.get() > 0) { threadCount = Math.min(maxNrOfThreadsInput.get(), BeastMCMC.m_nThreads); } String instanceCount = System.getProperty("beast.instance.count"); if (instanceCount != null && instanceCount.length() > 0) { threadCount = Integer.parseInt(instanceCount); } logPByThread = new double[threadCount]; // sanity check: alignment should have same #taxa as tree if (dataInput.get().getTaxonCount() != treeInput.get().getLeafNodeCount()) { throw new IllegalArgumentException( "The number of nodes in the tree does not match the number of sequences"); } treelikelihood = new TreeLikelihood[threadCount]; if (dataInput.get().isAscertained) { Log.warning.println( "Note, can only use single thread per alignment because the alignment is ascertained"); threadCount = 1; } if (threadCount <= 1) { treelikelihood[0] = new TreeLikelihood(); treelikelihood[0].setID(getID() + "0"); treelikelihood[0].initByName( "data", dataInput.get(), "tree", treeInput.get(), "siteModel", siteModelInput.get(), "branchRateModel", branchRateModelInput.get(), "useAmbiguities", useAmbiguitiesInput.get(), "scaling", scalingInput.get() + ""); treelikelihood[0].getOutputs().add(this); likelihoodsInput.get().add(treelikelihood[0]); } else { pool = Executors.newFixedThreadPool(threadCount); calcPatternPoints(dataInput.get().getSiteCount()); for (int i = 0; i < threadCount; i++) { Alignment data = dataInput.get(); String filterSpec = (patternPoints[i] + 1) + "-" + (patternPoints[i + 1]); if (data.isAscertained) { filterSpec += data.excludefromInput.get() + "-" + data.excludetoInput.get() + "," + filterSpec; } treelikelihood[i] = new TreeLikelihood(); treelikelihood[i].setID(getID() + i); treelikelihood[i].getOutputs().add(this); likelihoodsInput.get().add(treelikelihood[i]); FilteredAlignment filter = new FilteredAlignment(); if (i == 0 && dataInput.get() instanceof FilteredAlignment && ((FilteredAlignment) dataInput.get()).constantSiteWeightsInput.get() != null) { filter.initByName( "data", dataInput.get() /*, "userDataType", m_data.get().getDataType()*/, "filter", filterSpec, "constantSiteWeights", ((FilteredAlignment) dataInput.get()).constantSiteWeightsInput.get()); } else { filter.initByName( "data", dataInput.get() /*, "userDataType", m_data.get().getDataType()*/, "filter", filterSpec); } treelikelihood[i].initByName( "data", filter, "tree", treeInput.get(), "siteModel", duplicate((BEASTInterface) siteModelInput.get(), i), "branchRateModel", duplicate(branchRateModelInput.get(), i), "useAmbiguities", useAmbiguitiesInput.get(), "scaling", scalingInput.get() + ""); likelihoodCallers.add(new TreeLikelihoodCaller(treelikelihood[i], i)); } } }
public TreeParser(final Alignment alignment, final String newick) throws Exception { dataInput.setValue(alignment, this); newickInput.setValue(newick, this); initAndValidate(); }
public Partition hasPartition() { return bHasPartitionsInput.get(); }
@Override public double getArrayValue(int iDim) { return treeInput.get().getDirectAncestorNodeCount(); }
/** Find the input associated with this panel based on the path Input. */ @SuppressWarnings("unchecked") public Input<?> resolveInput(BeautiDoc doc, int iPartition) { try { // if (parentPlugins != null && parentPlugins.size() > 0 && _input != null) // System.err.println("sync " + parentPlugins.get(iPartition) + "[?] = " + // _input.get()); List<BEASTInterface> plugins; if (bHasPartitionsInput.get() == Partition.none) { plugins = new ArrayList<BEASTInterface>(); plugins.add(doc.mcmc.get()); } else { plugins = doc.getPartitions(bHasPartitionsInput.get().toString()); } parentPlugins = new ArrayList<BEASTInterface>(); parentInputs = new ArrayList<Input<?>>(); parentPlugins.add(doc); parentInputs.add(doc.mcmc); type = doc.mcmc.getType(); bIsList = false; for (int i = 0; i < sPathComponents.length; i++) { List<BEASTInterface> oldPlugins = plugins; plugins = new ArrayList<BEASTInterface>(); parentPlugins = new ArrayList<BEASTInterface>(); parentInputs = new ArrayList<Input<?>>(); for (BEASTInterface plugin : oldPlugins) { Input<?> namedInput = plugin.getInput(sPathComponents[i]); type = namedInput.getType(); if (namedInput.get() instanceof List<?>) { bIsList = true; List<?> list = (List<?>) namedInput.get(); if (sConditionalAttribute[i] == null) { for (Object o : list) { BEASTInterface plugin2 = (BEASTInterface) o; plugins.add(plugin2); parentPlugins.add(plugin); parentInputs.add(namedInput); } // throw new Exception ("Don't know which element to pick from the list. List // component should come with a condition. " + m_sPathComponents[i]); } else { int nMatches = 0; for (int j = 0; j < list.size(); j++) { BEASTInterface plugin2 = (BEASTInterface) list.get(j); if (matches(plugin2, sConditionalAttribute[i], sConditionalValue[i])) { plugins.add(plugin2); parentPlugins.add(plugin); parentInputs.add(namedInput); nMatches++; break; } } if (nMatches == 0) { parentInputs.add(namedInput); parentPlugins.add(plugin); } } } else if (namedInput.get() instanceof BEASTInterface) { bIsList = false; if (sConditionalAttribute[i] == null) { plugins.add((BEASTInterface) namedInput.get()); parentPlugins.add(plugin); parentInputs.add(namedInput); } else { if (matches(plugin, sConditionalAttribute[i], sConditionalValue[i])) { // if ((m_sConditionalAttribute[i].equals("id") && // plugin.getID().equals(m_sConditionalValue[i])) || // (m_sConditionalAttribute[i].equals("type") && // plugin.getClass().getName().equals(m_sConditionalValue[i]))) { plugins.add(plugin); parentPlugins.add(plugin); parentInputs.add(namedInput); } } } else { throw new Exception("input " + sPathComponents[i] + " is not a plugin or list"); } } } if (sTypeInput.get() != null) { type = Class.forName(sTypeInput.get()); } // sanity check if (!bIsList && (bHasPartitionsInput.get() == Partition.none) && plugins.size() > 1) { System.err.println("WARNING: multiple plugins match, but hasPartitions=none"); // this makes sure that all mathing plugins are available in one go bIsList = true; // this suppresses syncing parentInputs.clear(); } inputs.clear(); startInputs.clear(); for (BEASTInterface plugin : plugins) { inputs.add(plugin); startInputs.add(plugin); } if (!bIsList) { _input = new FlexibleInput<BEASTInterface>(); } else { _input = new FlexibleInput<ArrayList<BEASTInterface>>(new ArrayList<BEASTInterface>()); } _input.setRule(Validate.REQUIRED); syncTo(iPartition); // if (parentPlugins != null && parentPlugins.size() > 0) // System.err.println("sync " + parentPlugins.get(iPartition) + "[?] = " + // _input.get()); if (bIsList) { checkForDups((List<Object>) _input.get()); } return _input; } catch (Exception e) { System.err.println( "Warning: could not find objects in path " + Arrays.toString(sPathComponents)); } return null; } // resolveInputs
public InputEditor.ExpandOption forceExpansion() { return forceExpansionInput.get(); }
public String getTipText() { return sTipTextInput.get(); }
public String getIcon() { return sIconInput.get(); }
@Override public void log(int nSample, PrintStream out) { final Tree tree = treeInput.get(); out.print(tree.getDirectAncestorNodeCount() + "\t"); }
@Override public boolean requiresRecalculation() { needsUpdate = geneTreeInput.isDirty() || speciesTreeRatesInput.isDirty() || meanRateInput.isDirty(); return needsUpdate; }
public double merge( int index1, int index2, int clusterIndex1, int clusterIndex2, int[] cluster1Sites, int[] cluster2Sites) { double logqMerge = 0.0; HashMap<Integer, Integer> siteMap = new HashMap<Integer, Integer>(); // The value of the merged cluster will have that of cluster 2 before the merge. QuietRealParameter mergedParam = paramList.getParameter(clusterIndex2); QuietRealParameter mergedModel = modelList.getParameter(clusterIndex2); QuietRealParameter mergedFreqs = freqsList.getParameter(clusterIndex2); QuietRealParameter mergedRates = ratesList.getParameter(clusterIndex2); // Create a vector that combines the site indices of the two clusters int[] mergedClusterSites = new int[cluster1Sites.length + cluster2Sites.length - 2]; int k = 0; for (int i = 0; i < cluster1Sites.length; i++) { // Point every member in cluster 1 to cluster 2 // paramPointers.point(cluster1Sites[i],mergedParam); // modelPointers.point(cluster1Sites[i],mergedModel); // freqsPointers.point(cluster1Sites[i],mergedFreqs); // ratesPointers.point(cluster1Sites[i],mergedRates); if (cluster1Sites[i] != index1) { // For all members that are not index 1, // record the cluster in which they have been before the merge, // and assign them to the combined vector. siteMap.put(cluster1Sites[i], clusterIndex1); mergedClusterSites[k++] = cluster1Sites[i]; } } for (int i = 0; i < cluster2Sites.length; i++) { // All members in cluster 2 remains in cluster2 so no new pointer assignments if (cluster2Sites[i] != index2) { // For all members that are not index 2, // record the cluster in which they have been before the merge, // and assign them to the combined vector. siteMap.put(cluster2Sites[i], clusterIndex2); mergedClusterSites[k++] = cluster2Sites[i]; } } try { // Create a weight vector of patterns to inform the temporary tree likelihood // which set of pattern likelihoods are to be computed. // int[] tempWeights = dpTreeLikelihood.getClusterWeights(clusterIndex1); /*int[] tempWeights = new int[tempLikelihood.m_data.get().getPatternCount()]; for(int i = 0; i < cluster1Sites.length; i++){ int patIndex = tempLikelihood.m_data.get().getPatternIndex(cluster1Sites[i]); tempWeights[patIndex] = 1; } tempLikelihood.setPatternWeights(tempWeights); double[] cluster1SitesCluster2ParamLogLik = tempLikelihood.calculateLogP( mergedParam, mergedModel, mergedFreqs, mergedRates, cluster1Sites, index1 ); */ k = 0; int[] sCluster1Sites = new int[cluster1Sites.length - 1]; for (int i = 0; i < cluster1Sites.length; i++) { if (cluster1Sites[i] != index1) { sCluster1Sites[k++] = cluster1Sites[i]; } } tempLikelihood.setupPatternWeightsFromSites(sCluster1Sites); double[] cluster1SitesCluster2ParamLogLik = tempLikelihood.calculateLogP( mergedParam, mergedModel, mergedFreqs, mergedRates, sCluster1Sites); // tempWeights = dpTreeLikelihood.getClusterWeights(clusterIndex2); /*tempWeights = new int[tempLikelihood.m_data.get().getPatternCount()]; for(int i = 0; i < cluster2Sites.length; i++){ int patIndex = tempLikelihood.m_data.get().getPatternIndex(cluster2Sites[i]); tempWeights[patIndex] = 1; } tempLikelihood.setPatternWeights(tempWeights); RealParameter removedParam = paramList.getParameter(clusterIndex1); RealParameter removedModel = modelList.getParameter(clusterIndex1); RealParameter removedFreqs = freqsList.getParameter(clusterIndex1); RealParameter removedRates = ratesList.getParameter(clusterIndex1); double[] cluster2SitesCluster1ParamLogLik = tempLikelihood.calculateLogP( removedParam, removedModel, removedFreqs, removedRates, cluster2Sites, index2 );*/ k = 0; int[] sCluster2Sites = new int[cluster2Sites.length - 1]; for (int i = 0; i < cluster2Sites.length; i++) { if (cluster2Sites[i] != index2) { sCluster2Sites[k++] = cluster2Sites[i]; } } tempLikelihood.setupPatternWeightsFromSites(sCluster2Sites); RealParameter removedParam = paramList.getParameter(clusterIndex1); RealParameter removedModel = modelList.getParameter(clusterIndex1); RealParameter removedFreqs = freqsList.getParameter(clusterIndex1); RealParameter removedRates = ratesList.getParameter(clusterIndex1); double[] cluster2SitesCluster1ParamLogLik = tempLikelihood.calculateLogP( removedParam, removedModel, removedFreqs, removedRates, sCluster2Sites); // System.out.println("populate logLik1:"); double[] logLik1 = new double[mergedClusterSites.length]; for (int i = 0; i < (cluster1Sites.length - 1); i++) { // System.out.println(clusterIndex1+" "+mergedClusterSites[i]); logLik1[i] = dpTreeLikelihood.getSiteLogLikelihood(clusterIndex1, mergedClusterSites[i]); } System.arraycopy( cluster2SitesCluster1ParamLogLik, 0, logLik1, cluster1Sites.length - 1, cluster2SitesCluster1ParamLogLik.length); double[] logLik2 = new double[mergedClusterSites.length]; System.arraycopy( cluster1SitesCluster2ParamLogLik, 0, logLik2, 0, cluster1SitesCluster2ParamLogLik.length); // System.out.println("populate logLik2:"); for (int i = cluster1SitesCluster2ParamLogLik.length; i < logLik2.length; i++) { // System.out.println(clusterIndex2+" // "+mergedClusterSites[i-cluster1SitesCluster2ParamLogLik.length]); logLik2[i] = dpTreeLikelihood.getSiteLogLikelihood(clusterIndex2, mergedClusterSites[i]); } double[] lik1 = new double[logLik1.length]; double[] lik2 = new double[logLik2.length]; // scale it so it may be more accuate double minLog; /*for(int i = 0; i < logLik1.length; i++){ minLog = Math.min(logLik1[i],logLik2[i]); if(minLog == logLik1[i]){ lik1[i] = 1.0; lik2[i] = Math.exp(logLik2[i] - minLog); }else{ lik1[i] = Math.exp(logLik1[i] - minLog); lik2[i] = 1.0; } }*/ for (int i = 0; i < logLik1.length; i++) { lik1[i] = Math.exp(logLik1[i]); lik2[i] = Math.exp(logLik2[i]); // System.out.println(lik1[i]+" "+lik2[i]); } // Create a set of indices for random permutation int[] shuffle = new int[mergedClusterSites.length]; for (int i = 0; i < shuffle.length; i++) { shuffle[i] = i; } Randomizer.shuffle(shuffle); int cluster1Count = 1; int cluster2Count = 1; int cluster; double psi1, psi2, cluster1Prob; for (int i = 0; i < mergedClusterSites.length; i++) { cluster = siteMap.get(mergedClusterSites[shuffle[i]]); psi1 = cluster1Count * lik1[shuffle[i]]; psi2 = cluster2Count * lik2[shuffle[i]]; /*testCorrectness(i,cluster, clusterIndex1,clusterIndex2,shuffle, mergedClusterSites, lik1,lik2);*/ cluster1Prob = psi1 / (psi1 + psi2); // System.out.println(cluster1Prob); if (cluster == clusterIndex1) { logqMerge += Math.log(cluster1Prob); cluster1Count++; } else if (cluster == clusterIndex2) { logqMerge += Math.log(1 - cluster1Prob); cluster2Count++; } else { throw new RuntimeException("Something is wrong."); } } logqMerge += // paramBaseDistr.calcLogP(removedParam) mergeValue(removedParam, mergedParam, paramBaseDistr) // + modelBaseDistr.calcLogP(removedModel) + mergeDiscreteValue(removedModel, mergedModel, modelDistrInput.get()) + freqsBaseDistr.calcLogP(removedFreqs) // + ratesBaseDistr.calcLogP(removedRates); + mergeValueInLogSpace(removedRates, mergedRates, ratesBaseDistr); if (logqMerge > Double.NEGATIVE_INFINITY) { paramList.mergeParameter(clusterIndex1, clusterIndex2); modelList.mergeParameter(clusterIndex1, clusterIndex2); freqsList.mergeParameter(clusterIndex1, clusterIndex2); ratesList.mergeParameter(clusterIndex1, clusterIndex2); for (int i = 0; i < cluster1Sites.length; i++) { // Point every member in cluster 1 to cluster 2 paramPointers.point(cluster1Sites[i], mergedParam); modelPointers.point(cluster1Sites[i], mergedModel); freqsPointers.point(cluster1Sites[i], mergedFreqs); ratesPointers.point(cluster1Sites[i], mergedRates); } } } catch (Exception e) { throw new RuntimeException(e); } return logqMerge; }
@Override public void initStateNodes() throws Exception { typeLabel = typeLabelInput.get(); // nTypes = nTypesInput.get(); BeastTreeFromMaster masterTree = masterTreeInput.get(); TraitSet typeTrait = new TraitSet(); TraitSet dateTrait = new TraitSet(); String types = ""; String dates = ""; for (Node beastNode : masterTree.getExternalNodes()) { dates += beastNode.getID() + "=" + beastNode.getHeight() + ","; types += beastNode.getID() + "=" + (int) beastNode.getMetaData("location") + ","; } dates = dates.substring(0, dates.length() - 1); types = types.substring(0, types.length() - 1); typeTrait.initByName("value", types, "taxa", m_taxonset.get(), "traitname", "type"); dateTrait.initByName("value", dates, "taxa", m_taxonset.get(), "traitname", "date-backward"); SCMigrationModel migModel = new SCMigrationModel(); Double[] temp = new Double[nTypes.get()]; Arrays.fill(temp, muInput.get()); migModel.setInputValue("rateMatrix", new RealParameter(temp)); Arrays.fill(temp, popSizeInput.get()); migModel.setInputValue("popSizes", new RealParameter(temp)); migModel.initAndValidate(); if (random.get()) { tree = new StructuredCoalescentMultiTypeTree(); tree.setInputValue("migrationModel", migModel); } else { Node oldRoot = masterTree.getRoot(); MultiTypeNode newRoot = new MultiTypeNode(); newRoot.height = oldRoot.height; newRoot.nTypeChanges = 0; newRoot.changeTimes.addAll(new ArrayList<Double>()); newRoot.changeTypes.addAll(new ArrayList<Integer>()); newRoot.nodeType = 0; newRoot.labelNr = oldRoot.labelNr; newRoot.addChild(copyFromFlatNode(oldRoot.getLeft())); newRoot.addChild(copyFromFlatNode(oldRoot.getRight())); tree = new MultiTypeTree(newRoot); } tree.setInputValue("trait", typeTrait); tree.setInputValue("trait", dateTrait); tree.initAndValidate(); setInputValue("trait", dateTrait); setInputValue("trait", typeTrait); assignFromWithoutID(tree); }
/** Ensure the class behaves properly, even when inputs are not specified. */ @Override public void initAndValidate() throws Exception { boolean sortNodesAlphabetically = false; if (dataInput.get() != null) { labels = dataInput.get().getTaxaNames(); } else if (m_taxonset.get() != null) { if (labels == null) { labels = m_taxonset.get().asStringList(); } else { // else labels were set by TreeParser c'tor sortNodesAlphabetically = true; } } else { if (isLabelledNewickInput.get()) { if (m_initial.get() != null) { labels = m_initial.get().getTaxonset().asStringList(); } else { labels = new ArrayList<>(); createUnrecognizedTaxa = true; sortNodesAlphabetically = true; } } else { if (m_initial.get() != null) { // try to pick up taxa from initial tree final Tree tree = m_initial.get(); if (tree.m_taxonset.get() != null) { labels = tree.m_taxonset.get().asStringList(); } else { // m_sLabels = null; } } else { // m_sLabels = null; } } // m_bIsLabelledNewick = false; } final String newick = newickInput.get(); if (newick == null || newick.equals("")) { // can happen while initalising Beauti final Node dummy = new Node(); setRoot(dummy); } else { try { setRoot(parseNewick(newickInput.get())); } catch (ParseCancellationException e) { throw new RuntimeException( "TreeParser cannot make sense of the Newick string " + "provided. It gives the following clue:\n" + e.getMessage()); } } super.initAndValidate(); if (sortNodesAlphabetically) { // correct for node ordering: ensure order is alphabetical for (int i = 0; i < getNodeCount() && i < labels.size(); i++) { m_nodes[i].setID(labels.get(i)); } Node[] nodes = new Node[labels.size()]; System.arraycopy(m_nodes, 0, nodes, 0, labels.size()); Arrays.sort(nodes, (o1, o2) -> o1.getID().compareTo(o2.getID())); for (int i = 0; i < labels.size(); i++) { m_nodes[i] = nodes[i]; nodes[i].setNr(i); } } if (m_initial.get() != null) processTraits(m_initial.get().m_traitList.get()); else processTraits(m_traitList.get()); if (timeTraitSet != null) { adjustTreeNodeHeights(root); } else if (adjustTipHeightsInput.get()) { double treeLength = TreeUtils.getTreeLength(this, getRoot()); double extraTreeLength = 0.0; double maxTipHeight = 0.0; // all nodes should be at zero height if no date-trait is available for (int i = 0; i < getLeafNodeCount(); i++) { double height = getNode(i).getHeight(); if (maxTipHeight < height) { maxTipHeight = height; } extraTreeLength += height; getNode(i).setHeight(0); } double scaleFactor = (treeLength + extraTreeLength) / treeLength; final double SCALE_FACTOR_THRESHOLD = 0.001; // if the change in total tree length is more than 0.1% then give the user a warning! if (scaleFactor > 1.0 + SCALE_FACTOR_THRESHOLD) { DecimalFormat format = new DecimalFormat("#.##"); Log.info.println( "WARNING: Adjust tip heights attribute set to 'true' in " + getClass().getSimpleName()); Log.info.println( " has resulted in significant (>" + format.format(SCALE_FACTOR_THRESHOLD * 100.0) + "%) change in tree length."); Log.info.println( " Use " + adjustTipHeightsInput.getName() + "='false' to override this default."); Log.info.printf(" original max tip age = %8.3f\n", maxTipHeight); Log.info.printf(" new max tip age = %8.3f\n", 0.0); Log.info.printf(" original tree length = %8.3f\n", treeLength); Log.info.printf(" new tree length = %8.3f\n", treeLength + extraTreeLength); Log.info.printf(" TL scale factor = %8.3f\n", scaleFactor); } } if (m_taxonset.get() == null && labels != null && isLabelledNewickInput.get()) { m_taxonset.setValue(new TaxonSet(Taxon.createTaxonList(labels)), this); } initStateNodes(); } // init
/** more elegant getters for resolving Input values* */ public String getName() { return sNameInput.get(); }