protected double getRawBranchRate(final Tree tree, final NodeRef node) { double rate = 0.0; double[] processValues = getProcessValues(tree, node); double totalTime = 0; if (indicatorParameter != null) { double absRate = rateParameter.getParameterValue(0); for (int i = 0; i < indicatorParameter.getDimension(); i++) { int index = (int) indicatorParameter.getParameterValue(i); if (index == 0) { rate += absRate * processValues[i]; } else { rate += (absRate * (1.0 + relativeRatesParameter.getParameterValue(index - 1))) * processValues[i]; } totalTime += processValues[i]; } } else { for (int i = 0; i < rateParameter.getDimension(); i++) { rate += rateParameter.getParameterValue(i) * processValues[i]; totalTime += processValues[i]; } } rate /= totalTime; return rate; }
protected double calculateLogFieldLikelihood() { if (!intervalsKnown) { // intervalsKnown -> false when handleModelChanged event occurs in super. wrapSetupIntervals(); setupGMRFWeights(); intervalsKnown = true; } double currentLike = 0; DenseVector diagonal1 = new DenseVector(fieldLength); DenseVector currentGamma = new DenseVector(popSizeParameter.getParameterValues()); SymmTridiagMatrix currentQ = getScaledWeightMatrix( precisionParameter.getParameterValue(0), lambdaParameter.getParameterValue(0)); currentQ.mult(currentGamma, diagonal1); // currentLike += 0.5 * logGeneralizedDeterminant(currentQ) - 0.5 * // currentGamma.dot(diagonal1); currentLike += 0.5 * (fieldLength - 1) * Math.log(precisionParameter.getParameterValue(0)) - 0.5 * currentGamma.dot(diagonal1); if (lambdaParameter.getParameterValue(0) == 1) { currentLike -= (fieldLength - 1) / 2.0 * LOG_TWO_TIMES_PI; } else { currentLike -= fieldLength / 2.0 * LOG_TWO_TIMES_PI; } return currentLike; }
public double calculateWeightedSSE() { double weightedSSE = 0; double currentPopSize = popSizeParameter.getParameterValue(0); double currentInterval = coalescentIntervals[0]; for (int j = 1; j < fieldLength; j++) { double nextPopSize = popSizeParameter.getParameterValue(j); double nextInterval = coalescentIntervals[j]; double delta = nextPopSize - currentPopSize; double weight = (currentInterval + nextInterval) / 2.0; weightedSSE += delta * delta / weight; currentPopSize = nextPopSize; currentInterval = nextInterval; } return weightedSSE; }
/** @return the lower limit of this hypervolume in the given dimension. */ public Double getLowerLimit(int dimension) { if (dimension != 0) throw new RuntimeException("omega parameters have wrong dimension " + dimension); if (lowerOmega == null) return OMEGA_MIN_VALUE; else return lowerOmega.getParameterValue(dimension); }
private void printInformtion(Parameter par) { StringBuffer sb = new StringBuffer("parameter \n"); for (int j = 0; j < par.getDimension(); j++) { sb.append(par.getParameterValue(j)); } Logger.getLogger("dr.evomodel").info(sb.toString()); };
public int[] connected(int i, Parameter clusteringParameter) { int n = clusteringParameter.getDimension(); int[] visited = new int[n + 1]; visited[0] = i + 1; int tv = 1; for (int j = 0; j < n; j++) { if (visited[j] != 0) { int curr = visited[j] - 1; /*look forward */ int forward = (int) clusteringParameter.getParameterValue(curr); visited[tv] = forward + 1; tv++; // Check to see if is isn't already on the list for (int ii = 0; ii < tv - 1; ii++) { if (visited[ii] == forward + 1) { tv--; visited[tv] = 0; } } /*look back */ for (int jj = 0; jj < n; jj++) { if ((int) clusteringParameter.getParameterValue(jj) == curr) { visited[tv] = jj + 1; tv++; for (int ii = 0; ii < tv - 1; ii++) { if (visited[ii] == jj + 1) { tv--; visited[tv] = 0; } } } } } } return visited; }
public double getStatisticValue(int dim) { double val; // x is location count, y is location dimension int x = getColumnIndex(dim); int y = getRowIndex(dim); Parameter loc = locationsParameter.getParameter(x); if (y == 0) { val = loc.getParameterValue(y) + locationDriftParameter.getParameterValue(0) * offsetsParameter.getParameterValue(x); } else { val = loc.getParameterValue(y); } return val; }
protected void getDiagonalRates(double[] diagonalRates) { double kappa = substitutionModel.getKappa(); double[] freq = substitutionModel.getFrequencyModel().getFrequencies(); double mutationRate = mutationParameter.getParameterValue(0); double beta = 0.5 / ((freq[0] + freq[2]) * (freq[1] + freq[3]) + kappa * (freq[0] * freq[2] + freq[1] * freq[3])); diagonalRates[0] = ((freq[1] + freq[3]) + freq[2] * kappa) * mutationRate * beta; diagonalRates[1] = ((freq[0] + freq[2]) + freq[3] * kappa) * mutationRate * beta; diagonalRates[2] = ((freq[1] + freq[3]) + freq[0] * kappa) * mutationRate * beta; diagonalRates[3] = ((freq[0] + freq[2]) + freq[1] * kappa) * mutationRate * beta; }
public double logLhoodAllGeneTreesInSpeciesTree() { double[] llhoodcpts = new double[priorComponents.length]; double totalweight = 0.0; for (int i = 0; i < priorComponents.length; i++) { totalweight += priorComponents[i].weight; } for (int i = 0; i < priorComponents.length; i++) { llhoodcpts[i] = Math.log(priorComponents[i].weight / totalweight); double sigma = popPriorScale.getParameterValue(0); double alpha = priorComponents[i].alpha; double beta = sigma * priorComponents[i].beta; llhoodcpts[i] += logLhoodAllGeneTreesInSpeciesSubtree(pionodes[rootn], alpha, beta); } return logsumexp(llhoodcpts); }
/** change the parameter and return the hastings ratio. */ public double doOperation() throws OperatorFailedException { double[] mean = sccs.getMode(); double[] currentValue = parameter.getParameterValues(); double[] newValue = new double[dim]; Set<Integer> updateSet = new HashSet<Integer>(); if (setSizeMean != -1.0) { final int listLength = Poisson.nextPoisson(setSizeMean); while (updateSet.size() < listLength) { int newInt = MathUtils.nextInt(parameter.getDimension()); if (!updateSet.contains(newInt)) { updateSet.add(newInt); } } } else { for (int i = 0; i < dim; ++i) { updateSet.add(i); } } double logq = 0; for (Integer i : updateSet) { newValue[i] = mean[i] + scaleFactor * MathUtils.nextGaussian(); if (UPDATE_ALL) { parameter.setParameterValueQuietly(i, newValue[i]); } else { parameter.setParameterValue(i, newValue[i]); } logq += (NormalDistribution.logPdf(currentValue[i], mean[i], scaleFactor) - NormalDistribution.logPdf(newValue[i], mean[i], scaleFactor)); } // for (Integer i : updateSet) { // parameter.setParameterValueQuietly(i, newValue[i]); // } if (UPDATE_ALL) { parameter.setParameterValueNotifyChangedAll(0, parameter.getParameterValue(0)); } return logq; }
public double getBranchRate(final Tree tree, final NodeRef node) { return rateParameter.getParameterValue(0); }
/** change the parameter and return the hastings ratio. */ public final double doOperation() { int index = MathUtils.nextInt(links.getDimension()); int oldGroup = (int) assignments.getParameterValue(index); /* * Set index customer link to index and all connected to it to a new assignment (min value empty) */ int minEmp = minEmpty(modelLikelihood.getLogLikelihoodsVector()); links.setParameterValue(index, index); int[] visited = connected(index, links); int ii = 0; while (visited[ii] != 0) { assignments.setParameterValue(visited[ii] - 1, minEmp); ii++; } /* * Adjust likvector for group separated */ modelLikelihood.setLogLikelihoodsVector(oldGroup, getLogLikGroup(oldGroup)); modelLikelihood.setLogLikelihoodsVector(minEmp, getLogLikGroup(minEmp)); int maxFull = maxFull(modelLikelihood.getLogLikelihoodsVector()); double[] liks = modelLikelihood.getLogLikelihoodsVector(); /* * computing likelihoods of joint groups */ double[] crossedLiks = new double[maxFull + 1]; for (int ll = 0; ll < maxFull + 1; ll++) { if (ll != minEmp) { crossedLiks[ll] = getLogLik2Group(ll, minEmp); } } /* * Add logPrior */ double[] logP = new double[links.getDimension()]; for (int jj = 0; jj < links.getDimension(); jj++) { logP[jj] += depMatrix[index][jj]; int n = (int) assignments.getParameterValue(jj); if (n != minEmp) { logP[jj] += crossedLiks[n] - liks[n] - liks[minEmp]; } } logP[index] = Math.log(chiParameter.getParameterValue(0)); /* * possibilidade de mandar p zero as probs muito pequenas */ /* * Gibbs sampling */ this.rescale(logP); // Improve numerical stability this.exp(logP); // Transform back to probability-scale int k = MathUtils.randomChoicePDF(logP); links.setParameterValue(index, k); int newGroup = (int) assignments.getParameterValue(k); ii = 0; while (visited[ii] != 0) { assignments.setParameterValue(visited[ii] - 1, newGroup); ii++; } /* * updating conditional likelihood vector */ modelLikelihood.setLogLikelihoodsVector(newGroup, getLogLikGroup(newGroup)); if (newGroup != minEmp) { modelLikelihood.setLogLikelihoodsVector(minEmp, 0); } sampleMeans(maxFull); return 0.0; }
public void sampleMeans(int maxFull) { double[][] means = new double[maxFull + 2][2]; // sample mean vector for each cluster for (int i = 0; i < maxFull + 1; i++) { // Find all elements in cluster int ngroup = 0; for (int ii = 0; ii < assignments.getDimension(); ii++) { if ((int) assignments.getParameterValue(ii) == i) { ngroup++; } } if (ngroup != 0) { double[][] group = new double[ngroup][2]; double[] groupMean = new double[2]; int count = 0; for (int ii = 0; ii < assignments.getDimension(); ii++) { if ((int) assignments.getParameterValue(ii) == i) { group[count][0] = modelLikelihood.getData()[ii][0]; group[count][1] = modelLikelihood.getData()[ii][1]; groupMean[0] += group[count][0]; groupMean[1] += group[count][1]; count += 1; } } groupMean[0] /= ngroup; groupMean[1] /= ngroup; double kn = k0 + ngroup; double vn = v0 + ngroup; double[][] sumdif = new double[2][2]; for (int jj = 0; jj < ngroup; jj++) { sumdif[0][0] += (group[jj][0] - groupMean[0]) * (group[jj][0] - groupMean[0]); sumdif[0][1] += (group[jj][0] - groupMean[0]) * (group[jj][1] - groupMean[1]); sumdif[1][0] += (group[jj][0] - groupMean[0]) * (group[jj][1] - groupMean[1]); sumdif[1][1] += (group[jj][1] - groupMean[1]) * (group[jj][1] - groupMean[1]); } double[][] TnInv = new double[2][2]; TnInv[0][0] = T0Inv[0][0] + ngroup * (k0 / kn) * (groupMean[0] - m[0]) * (groupMean[0] - m[0]) + sumdif[0][0]; TnInv[0][1] = T0Inv[0][1] + ngroup * (k0 / kn) * (groupMean[1] - m[1]) * (groupMean[0] - m[0]) + sumdif[0][1]; TnInv[1][0] = T0Inv[1][0] + ngroup * (k0 / kn) * (groupMean[0] - m[0]) * (groupMean[1] - m[1]) + sumdif[1][0]; TnInv[1][1] = T0Inv[1][1] + ngroup * (k0 / kn) * (groupMean[1] - m[1]) * (groupMean[1] - m[1]) + sumdif[1][1]; Matrix Tn = new SymmetricMatrix(TnInv).inverse(); double[] posteriorMean = new double[2]; // compute posterior mean posteriorMean[0] = (k0 * m[0] + ngroup * groupMean[0]) / (k0 + ngroup); posteriorMean[1] = (k0 * m[1] + ngroup * groupMean[1]) / (k0 + ngroup); // compute posterior Precision double[][] posteriorPrecision = new WishartDistribution(vn, Tn.toComponents()).nextWishart(); posteriorPrecision[0][0] *= kn; posteriorPrecision[1][0] *= kn; posteriorPrecision[0][1] *= kn; posteriorPrecision[1][1] *= kn; double[] sample = new MultivariateNormalDistribution(posteriorMean, posteriorPrecision) .nextMultivariateNormal(); means[i][0] = sample[0]; means[i][1] = sample[1]; } } // Fill in cluster means for each observation for (int j = 0; j < assignments.getDimension(); j++) { double[] group = new double[2]; group[0] = means[(int) assignments.getParameterValue(j)][0]; group[1] = means[(int) assignments.getParameterValue(j)][1]; modelLikelihood.setMeans(j, group); } }
public double getLogLik2Group(int group1, int group2) { double L = 0.0; int ngroup1 = 0; for (int i = 0; i < assignments.getDimension(); i++) { if ((int) assignments.getParameterValue(i) == group1) { ngroup1++; } } int ngroup2 = 0; for (int i = 0; i < assignments.getDimension(); i++) { if ((int) assignments.getParameterValue(i) == group2) { ngroup2++; } } int ngroup = (ngroup1 + ngroup2); if (ngroup != 0) { double[][] group = new double[ngroup][2]; double mean[] = new double[2]; int count = 0; for (int i = 0; i < assignments.getDimension(); i++) { if ((int) assignments.getParameterValue(i) == group1) { group[count][0] = modelLikelihood.getData()[i][0]; group[count][1] = modelLikelihood.getData()[i][1]; mean[0] += group[count][0]; mean[1] += group[count][1]; count += 1; } } for (int i = 0; i < assignments.getDimension(); i++) { if ((int) assignments.getParameterValue(i) == group2) { group[count][0] = modelLikelihood.getData()[i][0]; group[count][1] = modelLikelihood.getData()[i][1]; mean[0] += group[count][0]; mean[1] += group[count][1]; count += 1; } } mean[0] /= ngroup; mean[1] /= ngroup; double kn = k0 + ngroup; double vn = v0 + ngroup; double[][] sumdif = new double[2][2]; for (int i = 0; i < ngroup; i++) { sumdif[0][0] += (group[i][0] - mean[0]) * (group[i][0] - mean[0]); sumdif[0][1] += (group[i][0] - mean[0]) * (group[i][1] - mean[1]); sumdif[1][0] += (group[i][0] - mean[0]) * (group[i][1] - mean[1]); sumdif[1][1] += (group[i][1] - mean[1]) * (group[i][1] - mean[1]); } double[][] TnInv = new double[2][2]; TnInv[0][0] = T0Inv[0][0] + ngroup * (k0 / kn) * (mean[0] - m[0]) * (mean[0] - m[0]) + sumdif[0][0]; TnInv[0][1] = T0Inv[0][1] + ngroup * (k0 / kn) * (mean[1] - m[1]) * (mean[0] - m[0]) + sumdif[0][1]; TnInv[1][0] = T0Inv[1][0] + ngroup * (k0 / kn) * (mean[0] - m[0]) * (mean[1] - m[1]) + sumdif[1][0]; TnInv[1][1] = T0Inv[1][1] + ngroup * (k0 / kn) * (mean[1] - m[1]) * (mean[1] - m[1]) + sumdif[1][1]; double logDetTn = -Math.log(TnInv[0][0] * TnInv[1][1] - TnInv[0][1] * TnInv[1][0]); L += -(ngroup) * Math.log(Math.PI); L += Math.log(k0) - Math.log(kn); L += (vn / 2) * logDetTn - (v0 / 2) * logDetT0; L += GammaFunction.lnGamma(vn / 2) + GammaFunction.lnGamma((vn / 2) - 0.5); L += -GammaFunction.lnGamma(v0 / 2) - GammaFunction.lnGamma((v0 / 2) - 0.5); } return L; }