/** * @param name the name of the covarion substitution model * @param dataType the data type * @param freqModel the equlibrium frequencies * @param hiddenClassRates the relative rates of the hidden categories (first hidden category has * rate 1.0 so this parameter has dimension one less than number of hidden categories. each * hidden category. * @param switchingRates rate of switching between hidden categories */ public AbstractCovarionDNAModel( String name, HiddenNucleotides dataType, Parameter hiddenClassRates, Parameter switchingRates, FrequencyModel freqModel) { super(name, dataType, freqModel); hiddenClassCount = dataType.getHiddenClassCount(); this.hiddenClassRates = hiddenClassRates; this.switchingRates = switchingRates; assert hiddenClassRates.getDimension() == hiddenClassCount - 1; int hiddenClassCount = getHiddenClassCount(); int switchingClassCount = hiddenClassCount * (hiddenClassCount - 1) / 2; if (switchingRates.getDimension() != switchingClassCount) { throw new IllegalArgumentException( "switching rate parameter must have " + switchingClassCount + " rates for " + hiddenClassCount + " classes"); } addVariable(switchingRates); addVariable(hiddenClassRates); constructRateMatrixMap(); }
protected double calculateLogFieldLikelihood() { if (!intervalsKnown) { // intervalsKnown -> false when handleModelChanged event occurs in super. wrapSetupIntervals(); setupGMRFWeights(); intervalsKnown = true; } double currentLike = 0; DenseVector diagonal1 = new DenseVector(fieldLength); DenseVector currentGamma = new DenseVector(popSizeParameter.getParameterValues()); SymmTridiagMatrix currentQ = getScaledWeightMatrix( precisionParameter.getParameterValue(0), lambdaParameter.getParameterValue(0)); currentQ.mult(currentGamma, diagonal1); // currentLike += 0.5 * logGeneralizedDeterminant(currentQ) - 0.5 * // currentGamma.dot(diagonal1); currentLike += 0.5 * (fieldLength - 1) * Math.log(precisionParameter.getParameterValue(0)) - 0.5 * currentGamma.dot(diagonal1); if (lambdaParameter.getParameterValue(0) == 1) { currentLike -= (fieldLength - 1) / 2.0 * LOG_TWO_TIMES_PI; } else { currentLike -= fieldLength / 2.0 * LOG_TWO_TIMES_PI; } return currentLike; }
private void printInformtion(Parameter par) { StringBuffer sb = new StringBuffer("parameter \n"); for (int j = 0; j < par.getDimension(); j++) { sb.append(par.getParameterValue(j)); } Logger.getLogger("dr.evomodel").info(sb.toString()); };
public DriftedLocationsStatistic( MatrixParameter locationsParameter, Parameter offsetsParameter, Parameter locationDriftParameter) { this.locationsParameter = locationsParameter; locationsParameter.addParameterListener(this); this.offsetsParameter = offsetsParameter; offsetsParameter.addParameterListener(this); this.locationDriftParameter = locationDriftParameter; locationDriftParameter.addParameterListener(this); }
public double calculateWeightedSSE() { double weightedSSE = 0; double currentPopSize = popSizeParameter.getParameterValue(0); double currentInterval = coalescentIntervals[0]; for (int j = 1; j < fieldLength; j++) { double nextPopSize = popSizeParameter.getParameterValue(j); double nextInterval = coalescentIntervals[j]; double delta = nextPopSize - currentPopSize; double weight = (currentInterval + nextInterval) / 2.0; weightedSSE += delta * delta / weight; currentPopSize = nextPopSize; currentInterval = nextInterval; } return weightedSSE; }
/** Constructor */ public SampleStateAndCategoryModel( Parameter muParameter, Parameter categoriesParameter, Vector substitutionModels) { super(SampleStateAndCategoryModelParser.SAMPLE_STATE_AND_CATEGORY_MODEL); this.substitutionModels = substitutionModels; for (int i = 0; i < substitutionModels.size(); i++) { addModel((SubstitutionModel) substitutionModels.elementAt(i)); } this.categoryCount = substitutionModels.size(); sitesInCategory = new int[categoryCount]; // stateCount = // ((SubstitutionModel)substitutionModels.elementAt(0)).getDataType().getStateCount(); this.muParameter = muParameter; addVariable(muParameter); muParameter.addBounds(new Parameter.DefaultBounds(1000.0, 0.0, 1)); this.categoriesParameter = categoriesParameter; addVariable(categoriesParameter); if (categoryCount > 1) { for (int i = 0; i < categoryCount; i++) { Parameter p = (Parameter) ((YangCodonModel) substitutionModels.elementAt(i)).getVariable(0); Parameter lower = null; Parameter upper = null; if (i == 0) { upper = (Parameter) ((YangCodonModel) substitutionModels.elementAt(i + 1)).getVariable(0); p.addBounds(new omegaBounds(lower, upper)); } else { if (i == (categoryCount - 1)) { lower = (Parameter) ((YangCodonModel) substitutionModels.elementAt(i - 1)).getVariable(0); p.addBounds(new omegaBounds(lower, upper)); } else { upper = (Parameter) ((YangCodonModel) substitutionModels.elementAt(i + 1)).getVariable(0); lower = (Parameter) ((YangCodonModel) substitutionModels.elementAt(i - 1)).getVariable(0); p.addBounds(new omegaBounds(lower, upper)); } } } } }
/** Calculates the actual rates corresponding to the category indices. */ protected void setupRates() { // System.out.println("BRRRTTZZZ " + distributionIndexParameter.getValue(0)); for (int i = 0; i < tree.getNodeCount(); i++) { // rates[i] = distributionModel.quantile(rateCategoryQuantiles.getNodeValue( // rateCategoryQuantiles.getTreeModel(), rateCategoryQuantiles.getTreeModel().getNode(i) )); if (!tree.isRoot(tree.getNode(i))) { if (useQuantilesForRates) { /* Using quantiles to represent rates */ rates[tree.getNode(i).getNumber()] = distributionModels[(int) Math.round(distributionIndexParameter.getValue(0))].quantile( rateCategoryQuantiles.getNodeValue(tree, tree.getNode(i))); } else { /* Not using quantiles to represent rates. This is practically useless for anything else other than simulation */ rates[tree.getNode(i).getNumber()] = rateCategoryQuantiles.getNodeValue(tree, tree.getNode(i)); } } } /*System.out.print(distributionModels[(int) Math.round(distributionIndexParameter.getValue(0))].getClass().getName() + "\t" + (int) Math.round(distributionIndexParameter.getValue(0)) + "\t" + rates[1] + "\t" + rateCategoryQuantiles.getNodeValue(tree, tree.getNode(1)));// + "\t" + distributionModels[(int) Math.round(distributionIndexParameter.getValue(0))].); if(distributionModels[(int) Math.round(distributionIndexParameter.getValue(0))].getClass().getName().equals("dr.inference.distribution.LogNormalDistributionModel")) { LogNormalDistributionModel lndm = (LogNormalDistributionModel) distributionModels[(int) Math.round(distributionIndexParameter.getValue(0))]; System.out.println("\t" + lndm.getS()); } else if (distributionModels[(int) Math.round(distributionIndexParameter.getValue(0))].getClass().getName().equals("dr.inference.distribution.InverseGaussianDistributionModel")) { InverseGaussianDistributionModel lndm = (InverseGaussianDistributionModel) distributionModels[(int) Math.round(distributionIndexParameter.getValue(0))]; System.out.println("\t" + lndm.getS()); }*/ if (normalize) computeFactor(); }
/** @return the lower limit of this hypervolume in the given dimension. */ public Double getLowerLimit(int dimension) { if (dimension != 0) throw new RuntimeException("omega parameters have wrong dimension " + dimension); if (lowerOmega == null) return OMEGA_MIN_VALUE; else return lowerOmega.getParameterValue(dimension); }
public Object parseXMLObject(XMLObject xo) throws XMLParseException { // final int overSampling = xo.getAttribute(OVERSAMPLING, 1); final boolean normalize = xo.getAttribute(NORMALIZE, false); final double normalizeBranchRateTo = xo.getAttribute(NORMALIZE_BRANCH_RATE_TO, Double.NaN); TreeModel tree = (TreeModel) xo.getChild(TreeModel.class); ParametricDistributionModel distributionModel = (ParametricDistributionModel) xo.getElementFirstChild(DISTRIBUTION); // Parameter rateCategoryParameter = (Parameter) xo.getElementFirstChild(RATE_CATEGORIES); Parameter rateCategoryQuantilesParameter = (Parameter) xo.getElementFirstChild(RATE_CATEGORY_QUANTILES); Logger.getLogger("dr.evomodel").info("Using continuous relaxed clock model."); // Logger.getLogger("dr.evomodel").info(" over sampling = " + overSampling); Logger.getLogger("dr.evomodel") .info(" parametric model = " + distributionModel.getModelName()); // Logger.getLogger("dr.evomodel").info(" rate categories = " + // rateCategoryParameter.getDimension()); Logger.getLogger("dr.evomodel") .info(" rate categories = " + rateCategoryQuantilesParameter.getDimension()); if (normalize) { Logger.getLogger("dr.evomodel") .info(" mean rate is normalized to " + normalizeBranchRateTo); } if (xo.hasAttribute(SINGLE_ROOT_RATE)) { // singleRootRate = xo.getBooleanAttribute(SINGLE_ROOT_RATE); Logger.getLogger("dr.evomodel").warning(" WARNING: single root rate is not implemented!"); } /* if (xo.hasAttribute(NORMALIZED_MEAN)) { dbr.setNormalizedMean(xo.getDoubleAttribute(NORMALIZED_MEAN)); }*/ return new ContinuousBranchRates( tree, /*rateCategoryParameter, */ rateCategoryQuantilesParameter, distributionModel, /*overSampling,*/ normalize, normalizeBranchRateTo); }
public int[] connected(int i, Parameter clusteringParameter) { int n = clusteringParameter.getDimension(); int[] visited = new int[n + 1]; visited[0] = i + 1; int tv = 1; for (int j = 0; j < n; j++) { if (visited[j] != 0) { int curr = visited[j] - 1; /*look forward */ int forward = (int) clusteringParameter.getParameterValue(curr); visited[tv] = forward + 1; tv++; // Check to see if is isn't already on the list for (int ii = 0; ii < tv - 1; ii++) { if (visited[ii] == forward + 1) { tv--; visited[tv] = 0; } } /*look back */ for (int jj = 0; jj < n; jj++) { if ((int) clusteringParameter.getParameterValue(jj) == curr) { visited[tv] = jj + 1; tv++; for (int ii = 0; ii < tv - 1; ii++) { if (visited[ii] == jj + 1) { tv--; visited[tv] = 0; } } } } } } return visited; }
public double getStatisticValue(int dim) { double val; // x is location count, y is location dimension int x = getColumnIndex(dim); int y = getRowIndex(dim); Parameter loc = locationsParameter.getParameter(x); if (y == 0) { val = loc.getParameterValue(y) + locationDriftParameter.getParameterValue(0) * offsetsParameter.getParameterValue(x); } else { val = loc.getParameterValue(y); } return val; }
/** Construct demographic model with default settings */ public ExponentialLogisticModel( String name, Parameter N0Parameter, Parameter logisticGrowthParameter, Parameter logisticShapeParameter, Parameter exponentialGrowthParameter, Parameter transistionTimeParameter, double alpha, Type units) { super(name); exponentialLogistic = new ExponentialLogistic(units); this.N0Parameter = N0Parameter; addVariable(N0Parameter); N0Parameter.addBounds(new Parameter.DefaultBounds(Double.POSITIVE_INFINITY, 0.0, 1)); this.logisticGrowthParameter = logisticGrowthParameter; addVariable(logisticGrowthParameter); logisticGrowthParameter.addBounds( new Parameter.DefaultBounds(Double.POSITIVE_INFINITY, 0.0, 1)); this.logisticShapeParameter = logisticShapeParameter; addVariable(logisticShapeParameter); logisticShapeParameter.addBounds(new Parameter.DefaultBounds(Double.POSITIVE_INFINITY, 0.0, 1)); this.exponentialGrowthParameter = exponentialGrowthParameter; addVariable(exponentialGrowthParameter); exponentialGrowthParameter.addBounds( new Parameter.DefaultBounds(Double.POSITIVE_INFINITY, 0.0, 1)); this.transistionTimeParameter = transistionTimeParameter; addVariable(transistionTimeParameter); transistionTimeParameter.addBounds( new Parameter.DefaultBounds(Double.POSITIVE_INFINITY, 0.0, 1)); this.alpha = alpha; setUnits(units); }
public PopsIOSpeciesTreeModel( PopsIOSpeciesBindings piosb, Parameter popPriorScale, PriorComponent[] priorComponents) { super(PopsIOSpeciesTreeModelParser.PIO_SPECIES_TREE); this.piosb = piosb; this.popPriorScale = popPriorScale; addVariable(popPriorScale); popPriorScale.addBounds(new Parameter.DefaultBounds(Double.POSITIVE_INFINITY, 0.0, 1)); this.priorComponents = priorComponents; PopsIOSpeciesBindings.SpInfo[] species = piosb.getSpecies(); int nTaxa = species.length; int nNodes = 2 * nTaxa - 1; pionodes = new PopsIONode[nNodes]; for (int n = 0; n < nNodes; n++) { pionodes[n] = new PopsIONode(n); } ArrayList<Integer> tojoin = new ArrayList<Integer>(nTaxa); for (int n = 0; n < nTaxa; n++) { pionodes[n].setTaxon(species[n].name); pionodes[n].setHeight(0.0); pionodes[n].setUnion(piosb.tipUnionFromTaxon(pionodes[n].getTaxon())); tojoin.add(n); } double rate = 1.0; double treeheight = 0.0; for (int i = 0; i < nTaxa - 1; i++) { int numtojoin = tojoin.size(); int j = MathUtils.nextInt(numtojoin); Integer child0 = tojoin.get(j); tojoin.remove(j); int k = MathUtils.nextInt(numtojoin - 1); Integer child1 = tojoin.get(k); tojoin.remove(k); pionodes[nTaxa + i].addChildren(pionodes[child0], pionodes[child1]); pionodes[nTaxa + i].setHeight(treeheight + randomnodeheight(numtojoin * rate)); treeheight = pionodes[nTaxa + i].getHeight(); tojoin.add(nTaxa + i); } rootn = pionodes.length - 1; double scale = 0.99 * piosb.initialMinGeneNodeHeight() / pionodes[rootn].height; scaleAllHeights(scale); pionodes[rootn].fillinUnionsInSubtree(piosb.getSpecies().length); stree = makeSimpleTree(); Logger.getLogger("dr.evomodel.speciation.popsio") .info( "\tConstructing a PopsIO Species Tree Model, please cite:\n" + Citable.Utils.getCitationString(this)); }
protected double getRawBranchRate(final Tree tree, final NodeRef node) { double rate = 0.0; double[] processValues = getProcessValues(tree, node); double totalTime = 0; if (indicatorParameter != null) { double absRate = rateParameter.getParameterValue(0); for (int i = 0; i < indicatorParameter.getDimension(); i++) { int index = (int) indicatorParameter.getParameterValue(i); if (index == 0) { rate += absRate * processValues[i]; } else { rate += (absRate * (1.0 + relativeRatesParameter.getParameterValue(index - 1))) * processValues[i]; } totalTime += processValues[i]; } } else { for (int i = 0; i < rateParameter.getDimension(); i++) { rate += rateParameter.getParameterValue(i) * processValues[i]; totalTime += processValues[i]; } } rate /= totalTime; return rate; }
public DistanceDependentCRPGibbsOperator( Parameter links, Parameter assignments, Parameter chiParameter, NPAntigenicLikelihood Likelihood, double weight) { this.links = links; this.assignments = assignments; this.modelLikelihood = Likelihood; this.chiParameter = chiParameter; this.depMatrix = Likelihood.getLogDepMatrix(); for (int i = 0; i < links.getDimension(); i++) { links.setParameterValue(i, i); } setWeight(weight); // double[][] x=modelLikelihood.getData(); // modelLikelihood.printInformtion(x[0][0]); this.m = new double[2]; m[0] = modelLikelihood.priorMean.getParameterValue(0); m[1] = modelLikelihood.priorMean.getParameterValue(1); this.v0 = 2; this.k0 = modelLikelihood.priorPrec.getParameterValue(0) / modelLikelihood.clusterPrec.getParameterValue(0); this.T0Inv = new double[2][2]; T0Inv[0][0] = v0 / modelLikelihood.clusterPrec.getParameterValue(0); T0Inv[1][1] = v0 / modelLikelihood.clusterPrec.getParameterValue(0); T0Inv[1][0] = 0.0; T0Inv[0][1] = 0.0; this.logDetT0 = -Math.log(T0Inv[0][0] * T0Inv[1][1]); }
private void restrictNodePartials(int nodeIndex) { Parameter restrictionParameter = partialsMap[nodeIndex]; if (restrictionParameter == null) { return; } getPartials(nodeIndex, partials); double[] restriction = restrictionParameter.getParameterValues(); final int partialsLengthPerCategory = stateCount * patternCount; if (restriction.length == partialsLengthPerCategory) { for (int i = 0; i < categoryCount; i++) { componentwiseMultiply( partials, partialsLengthPerCategory * i, restriction, 0, partialsLengthPerCategory); } } else { componentwiseMultiply(partials, 0, restriction, 0, partialsLengthPerCategory * categoryCount); } setPartials(nodeIndex, partials); }
public Object parseXMLObject(XMLObject xo) throws XMLParseException { XMLObject cxo = xo.getChild(COUNTS); Parameter counts = (Parameter) cxo.getChild(Parameter.class); DirichletDistribution dirichlet = new DirichletDistribution(counts.getParameterValues()); MultivariateDistributionLikelihood likelihood = new MultivariateDistributionLikelihood(dirichlet); cxo = xo.getChild(DATA); for (int j = 0; j < cxo.getChildCount(); j++) { if (cxo.getChild(j) instanceof Parameter) { likelihood.addData((Parameter) cxo.getChild(j)); } else { throw new XMLParseException( "illegal element in " + xo.getName() + " element " + cxo.getName()); } } return likelihood; }
protected void getDiagonalRates(double[] diagonalRates) { double kappa = substitutionModel.getKappa(); double[] freq = substitutionModel.getFrequencyModel().getFrequencies(); double mutationRate = mutationParameter.getParameterValue(0); double beta = 0.5 / ((freq[0] + freq[2]) * (freq[1] + freq[3]) + kappa * (freq[0] * freq[2] + freq[1] * freq[3])); diagonalRates[0] = ((freq[1] + freq[3]) + freq[2] * kappa) * mutationRate * beta; diagonalRates[1] = ((freq[0] + freq[2]) + freq[3] * kappa) * mutationRate * beta; diagonalRates[2] = ((freq[1] + freq[3]) + freq[0] * kappa) * mutationRate * beta; diagonalRates[3] = ((freq[0] + freq[2]) + freq[1] * kappa) * mutationRate * beta; }
public Object parseXMLObject(XMLObject xo) throws XMLParseException { Parameter kappaParam; Parameter switchingRates; Parameter hiddenClassRates; FrequencyModel freqModel; kappaParam = (Parameter) xo.getElementFirstChild(KAPPA); switchingRates = (Parameter) xo.getElementFirstChild(AbstractCovarionDNAModel.SWITCHING_RATES); hiddenClassRates = (Parameter) xo.getElementFirstChild(AbstractCovarionDNAModel.HIDDEN_CLASS_RATES); freqModel = (FrequencyModel) xo.getElementFirstChild(AbstractCovarionDNAModel.FREQUENCIES); if (!(freqModel.getDataType() instanceof HiddenNucleotides)) { throw new IllegalArgumentException("Datatype must be hidden nucleotides!!"); } HiddenNucleotides dataType = (HiddenNucleotides) freqModel.getDataType(); int hiddenStateCount = dataType.getHiddenClassCount(); int switchingRatesCount = hiddenStateCount * (hiddenStateCount - 1) / 2; if (switchingRates.getDimension() != switchingRatesCount) { throw new IllegalArgumentException( "switching rates parameter must have " + switchingRatesCount + " dimensions, for " + hiddenStateCount + " hidden categories"); } CovarionHKY model = new CovarionHKY(dataType, kappaParam, hiddenClassRates, switchingRates, freqModel); System.out.println(model); return model; }
/** Construct demographic model with default settings */ public EmergingEpidemicModel( String name, Parameter growthRateParameter, Parameter generationTimeParameter, Parameter generationShapeParameter, Parameter offspringDispersionParameter, TreeModel treeModel, Type units) { super(name); exponentialGrowth = new ExponentialGrowth(units); this.growthRateParameter = growthRateParameter; addVariable(growthRateParameter); growthRateParameter.addBounds(new Parameter.DefaultBounds(Double.MAX_VALUE, 0.0, 1)); this.generationTimeParameter = generationTimeParameter; addVariable(generationTimeParameter); generationTimeParameter.addBounds(new Parameter.DefaultBounds(Double.MAX_VALUE, 0.0, 1)); this.generationShapeParameter = generationShapeParameter; addVariable(generationShapeParameter); generationShapeParameter.addBounds(new Parameter.DefaultBounds(Double.MAX_VALUE, 0.0, 1)); this.offspringDispersionParameter = offspringDispersionParameter; addVariable(offspringDispersionParameter); offspringDispersionParameter.addBounds(new Parameter.DefaultBounds(Double.MAX_VALUE, 0.0, 1)); this.treeModel = treeModel; addModel(treeModel); addStatistic(new N0Statistic("N0")); addStatistic(new RStatistic("R")); setUnits(units); }
public MultivariateNormalIndependenceSampler( Parameter parameter, SelfControlledCaseSeries sccs, double setSizeMean, double weight, double scaleFactor, CoercionMode mode) { super(mode); this.scaleFactor = scaleFactor; this.parameter = parameter; setWeight(weight); dim = parameter.getDimension(); setWeight(weight); this.sccs = sccs; this.setSizeMean = setSizeMean; }
public double logLhoodAllGeneTreesInSpeciesTree() { double[] llhoodcpts = new double[priorComponents.length]; double totalweight = 0.0; for (int i = 0; i < priorComponents.length; i++) { totalweight += priorComponents[i].weight; } for (int i = 0; i < priorComponents.length; i++) { llhoodcpts[i] = Math.log(priorComponents[i].weight / totalweight); double sigma = popPriorScale.getParameterValue(0); double alpha = priorComponents[i].alpha; double beta = sigma * priorComponents[i].beta; llhoodcpts[i] += logLhoodAllGeneTreesInSpeciesSubtree(pionodes[rootn], alpha, beta); } return logsumexp(llhoodcpts); }
/** * Calculates the log likelihood of this set of coalescent intervals, given a demographic model. * * @return coalescent part of density */ protected double calculateLogCoalescentLikelihood() { if (!intervalsKnown) { // intervalsKnown -> false when handleModelChanged event occurs in super. wrapSetupIntervals(); setupGMRFWeights(); intervalsKnown = true; } // Matrix operations taken from block update sampler to calculate data likelihood and field // prior double currentLike = 0; double[] currentGamma = popSizeParameter.getParameterValues(); for (int i = 0; i < fieldLength; i++) { currentLike += -currentGamma[i] - sufficientStatistics[i] * Math.exp(-currentGamma[i]); } return currentLike; // + // LogNormalDistribution.logPdf(Math.exp(popSizeParameter.getParameterValue(coalescentIntervals.length - 1)), mu, sigma); }
/** change the parameter and return the hastings ratio. */ public double doOperation() throws OperatorFailedException { double[] mean = sccs.getMode(); double[] currentValue = parameter.getParameterValues(); double[] newValue = new double[dim]; Set<Integer> updateSet = new HashSet<Integer>(); if (setSizeMean != -1.0) { final int listLength = Poisson.nextPoisson(setSizeMean); while (updateSet.size() < listLength) { int newInt = MathUtils.nextInt(parameter.getDimension()); if (!updateSet.contains(newInt)) { updateSet.add(newInt); } } } else { for (int i = 0; i < dim; ++i) { updateSet.add(i); } } double logq = 0; for (Integer i : updateSet) { newValue[i] = mean[i] + scaleFactor * MathUtils.nextGaussian(); if (UPDATE_ALL) { parameter.setParameterValueQuietly(i, newValue[i]); } else { parameter.setParameterValue(i, newValue[i]); } logq += (NormalDistribution.logPdf(currentValue[i], mean[i], scaleFactor) - NormalDistribution.logPdf(newValue[i], mean[i], scaleFactor)); } // for (Integer i : updateSet) { // parameter.setParameterValueQuietly(i, newValue[i]); // } if (UPDATE_ALL) { parameter.setParameterValueNotifyChangedAll(0, parameter.getParameterValue(0)); } return logq; }
public Object parseXMLObject(XMLObject xo) throws XMLParseException { final boolean onLogitScale = xo.getAttribute(ON_LOGIT_SCALE, false); XMLObject cxo = xo.getChild(COUNTS); Parameter countsParam = (Parameter) cxo.getChild(Parameter.class); cxo = xo.getChild(PROPORTION); Parameter proportionParam = (Parameter) cxo.getChild(Parameter.class); if (proportionParam.getDimension() != 1 && proportionParam.getDimension() != countsParam.getDimension()) { throw new XMLParseException( "Proportion dimension (" + proportionParam.getDimension() + ") " + "must equal 1 or counts dimension (" + countsParam.getDimension() + ")"); } cxo = xo.getChild(TRIALS); Parameter trialsParam; if (cxo.hasAttribute(VALUES)) { int[] tmp = cxo.getIntegerArrayAttribute(VALUES); double[] v = new double[tmp.length]; for (int i = 0; i < tmp.length; ++i) { v[i] = tmp[i]; } trialsParam = new Parameter.Default(v); } else { trialsParam = (Parameter) cxo.getChild(Parameter.class); } if (trialsParam.getDimension() != countsParam.getDimension()) { throw new XMLParseException( "Trials dimension (" + trialsParam.getDimension() + ") must equal counts dimension (" + countsParam.getDimension() + ")"); } return new BinomialLikelihood(trialsParam, proportionParam, countsParam, onLogitScale); }
public void initializationReport() { System.out.println("Creating a GMRF smoothed skyride model:"); System.out.println("\tPopulation sizes: " + popSizeParameter.getDimension()); System.out.println( "\tIf you publish results using this model, please reference: Minin, Bloomquist and Suchard (2008) Molecular Biology and Evolution, 25, 1459-1471."); }
public GMRFSkyrideLikelihood( List<Tree> treeList, Parameter popParameter, Parameter groupParameter, Parameter precParameter, Parameter lambda, Parameter beta, MatrixParameter dMatrix, boolean timeAwareSmoothing, boolean rescaleByRootHeight) { super(GMRFSkyrideLikelihoodParser.SKYLINE_LIKELIHOOD); this.popSizeParameter = popParameter; this.groupSizeParameter = groupParameter; this.precisionParameter = precParameter; this.lambdaParameter = lambda; this.betaParameter = beta; this.dMatrix = dMatrix; this.timeAwareSmoothing = timeAwareSmoothing; this.rescaleByRootHeight = rescaleByRootHeight; addVariable(popSizeParameter); addVariable(precisionParameter); addVariable(lambdaParameter); if (betaParameter != null) { addVariable(betaParameter); } setTree(treeList); int correctFieldLength = getCorrectFieldLength(); if (popSizeParameter.getDimension() <= 1) { // popSize dimension hasn't been set yet, set it here: popSizeParameter.setDimension(correctFieldLength); } fieldLength = popSizeParameter.getDimension(); if (correctFieldLength != fieldLength) { throw new IllegalArgumentException( "Population size parameter should have length " + correctFieldLength); } // Field length must be set by this point wrapSetupIntervals(); coalescentIntervals = new double[fieldLength]; storedCoalescentIntervals = new double[fieldLength]; sufficientStatistics = new double[fieldLength]; storedSufficientStatistics = new double[fieldLength]; setupGMRFWeights(); addStatistic(new DeltaStatistic()); initializationReport(); /* Force all entries in groupSizeParameter = 1 for compatibility with Tracer */ if (groupSizeParameter != null) { for (int i = 0; i < groupSizeParameter.getDimension(); i++) groupSizeParameter.setParameterValue(i, 1.0); } }
public String getOperatorName() { return "independentNormalDistribution(" + parameter.getVariableName() + ")"; }
public Object parseXMLObject(XMLObject xo) throws XMLParseException { Parameter ratesParameter = null; FrequencyModel freqModel = null; if (xo.hasChildNamed(FREQUENCIES)) { XMLObject cxo = xo.getChild(FREQUENCIES); freqModel = (FrequencyModel) cxo.getChild(FrequencyModel.class); } DataType dataType = DataTypeUtils.getDataType(xo); if (dataType == null) dataType = (DataType) xo.getChild(DataType.class); // if (xo.hasAttribute(DataType.DATA_TYPE)) { // String dataTypeStr = xo.getStringAttribute(DataType.DATA_TYPE); // if (dataTypeStr.equals(Nucleotides.DESCRIPTION)) { // dataType = Nucleotides.INSTANCE; // } else if (dataTypeStr.equals(AminoAcids.DESCRIPTION)) { // dataType = AminoAcids.INSTANCE; // } else if (dataTypeStr.equals(Codons.DESCRIPTION)) { // dataType = Codons.UNIVERSAL; // } else if (dataTypeStr.equals(TwoStates.DESCRIPTION)) { // dataType = TwoStates.INSTANCE; // } // } if (dataType == null) dataType = freqModel.getDataType(); if (dataType != freqModel.getDataType()) { throw new XMLParseException( "Data type of " + getParserName() + " element does not match that of its frequencyModel."); } XMLObject cxo = xo.getChild(RATES); ratesParameter = (Parameter) cxo.getChild(Parameter.class); int states = dataType.getStateCount(); Logger.getLogger("dr.evomodel") .info(" General Substitution Model (stateCount=" + states + ")"); boolean hasRelativeRates = cxo.hasChildNamed(RELATIVE_TO) || (cxo.hasAttribute(RELATIVE_TO) && cxo.getIntegerAttribute(RELATIVE_TO) > 0); int nonReversibleRateCount = ((dataType.getStateCount() - 1) * dataType.getStateCount()); int reversibleRateCount = (nonReversibleRateCount / 2); boolean isNonReversible = ratesParameter.getDimension() == nonReversibleRateCount; boolean hasIndicator = xo.hasChildNamed(INDICATOR); if (!hasRelativeRates) { Parameter indicatorParameter = null; if (ratesParameter.getDimension() != reversibleRateCount && ratesParameter.getDimension() != nonReversibleRateCount) { throw new XMLParseException( "Rates parameter in " + getParserName() + " element should have " + (reversibleRateCount) + " dimensions for reversible model or " + nonReversibleRateCount + " dimensions for non-reversible. " + "However parameter dimension is " + ratesParameter.getDimension()); } if (hasIndicator) { // this is using BSSVS cxo = xo.getChild(INDICATOR); indicatorParameter = (Parameter) cxo.getChild(Parameter.class); if (indicatorParameter.getDimension() != ratesParameter.getDimension()) { throw new XMLParseException( "Rates and indicator parameters in " + getParserName() + " element must be the same dimension."); } boolean randomize = xo.getAttribute( dr.evomodelxml.substmodel.ComplexSubstitutionModelParser.RANDOMIZE, false); if (randomize) { BayesianStochasticSearchVariableSelection.Utils.randomize( indicatorParameter, dataType.getStateCount(), !isNonReversible); } } if (isNonReversible) { // if (xo.hasChildNamed(ROOT_FREQ)) { // cxo = xo.getChild(ROOT_FREQ); // FrequencyModel rootFreq = (FrequencyModel) // cxo.getChild(FrequencyModel.class); // // if (dataType != rootFreq.getDataType()) { // throw new XMLParseException("Data type of " + getParserName() + " // element does not match that of its rootFrequencyModel."); // } // // Logger.getLogger("dr.evomodel").info(" Using BSSVS Complex // Substitution Model"); // return new SVSComplexSubstitutionModel(getParserName(), dataType, // freqModel, ratesParameter, indicatorParameter); // // } else { // throw new XMLParseException("Non-reversible model missing " + // ROOT_FREQ + " element"); // } Logger.getLogger("dr.evomodel").info(" Using BSSVS Complex Substitution Model"); return new SVSComplexSubstitutionModel( getParserName(), dataType, freqModel, ratesParameter, indicatorParameter); } else { Logger.getLogger("dr.evomodel").info(" Using BSSVS General Substitution Model"); return new SVSGeneralSubstitutionModel( getParserName(), dataType, freqModel, ratesParameter, indicatorParameter); } } else { // if we have relativeTo attribute then we use the old GeneralSubstitutionModel if (ratesParameter.getDimension() != reversibleRateCount - 1) { throw new XMLParseException( "Rates parameter in " + getParserName() + " element should have " + (reversibleRateCount - 1) + " dimensions. However parameter dimension is " + ratesParameter.getDimension()); } int relativeTo = 0; if (hasRelativeRates) { relativeTo = cxo.getIntegerAttribute(RELATIVE_TO) - 1; } if (relativeTo < 0 || relativeTo >= reversibleRateCount) { throw new XMLParseException(RELATIVE_TO + " must be 1 or greater"); } else { int t = relativeTo; int s = states - 1; int row = 0; while (t >= s) { t -= s; s -= 1; row += 1; } int col = t + row + 1; Logger.getLogger("dr.evomodel") .info(" Rates relative to " + dataType.getCode(row) + "<->" + dataType.getCode(col)); } if (ratesParameter == null) { if (reversibleRateCount == 1) { // simplest model for binary traits... } else { throw new XMLParseException("No rates parameter found in " + getParserName()); } } return new GeneralSubstitutionModel( getParserName(), dataType, freqModel, ratesParameter, relativeTo); } }
public double getBranchRate(final Tree tree, final NodeRef node) { return rateParameter.getParameterValue(0); }