コード例 #1
0
  void postOrderTraverse(
      MultivariateTraitTree treeModel,
      NodeRef node,
      double[][] precisionMatrix,
      double logDetPrecisionMatrix,
      boolean cacheOuterProducts) {

    final int thisNumber = node.getNumber();

    if (treeModel.isExternal(node)) {

      // Fill in precision scalar, traitValues already filled in

      if (missingTraits.isCompletelyMissing(thisNumber)) {
        upperPrecisionCache[thisNumber] = 0;
        lowerPrecisionCache[thisNumber] = 0; // Needed in the pre-order traversal
      } else { // not missing tip trait
        upperPrecisionCache[thisNumber] =
            (1.0 / getRescaledBranchLengthForPrecision(node))
                * Math.pow(cacheHelper.getOUFactor(node), 2);
        lowerPrecisionCache[thisNumber] = Double.POSITIVE_INFINITY;
      }
      return;
    }

    final NodeRef childNode0 = treeModel.getChild(node, 0);
    final NodeRef childNode1 = treeModel.getChild(node, 1);

    postOrderTraverse(
        treeModel, childNode0, precisionMatrix, logDetPrecisionMatrix, cacheOuterProducts);
    postOrderTraverse(
        treeModel, childNode1, precisionMatrix, logDetPrecisionMatrix, cacheOuterProducts);

    final int childNumber0 = childNode0.getNumber();
    final int childNumber1 = childNode1.getNumber();
    final int meanOffset0 = dim * childNumber0;
    final int meanOffset1 = dim * childNumber1;
    final int meanThisOffset = dim * thisNumber;

    final double precision0 = upperPrecisionCache[childNumber0];
    final double precision1 = upperPrecisionCache[childNumber1];
    final double totalPrecision = precision0 + precision1;

    lowerPrecisionCache[thisNumber] = totalPrecision;

    // Multiply child0 and child1 densities

    // Delegate this!
    cacheHelper.computeMeanCaches(
        meanThisOffset,
        meanOffset0,
        meanOffset1,
        totalPrecision,
        precision0,
        precision1,
        missingTraits,
        node,
        childNode0,
        childNode1);
    //        if (totalPrecision == 0) {
    //            System.arraycopy(zeroDimVector, 0, meanCache, meanThisOffset, dim);
    //        } else {
    //            // Delegate in case either child is partially missing
    //            // computeCorrectedWeightedAverage
    //            missingTraits.computeWeightedAverage(meanCache,
    //                    meanOffset0, precision0,
    //                    meanOffset1, precision1,
    //                    meanThisOffset, dim);
    //        }
    // In this delegation, you can call
    // getShiftForBranchLength(node);

    if (!treeModel.isRoot(node)) {
      // Integrate out trait value at this node
      double thisPrecision = 1.0 / getRescaledBranchLengthForPrecision(node);
      if (Double.isInfinite(thisPrecision)) {
        upperPrecisionCache[thisNumber] = totalPrecision;
      } else {
        upperPrecisionCache[thisNumber] =
            (totalPrecision * thisPrecision / (totalPrecision + thisPrecision))
                * Math.pow(cacheHelper.getOUFactor(node), 2);
      }
    }

    // Compute logRemainderDensity

    logRemainderDensityCache[thisNumber] = 0;

    if (precision0 != 0 && precision1 != 0) {

      incrementRemainderDensities(
          precisionMatrix,
          logDetPrecisionMatrix,
          thisNumber,
          meanThisOffset,
          meanOffset0,
          meanOffset1,
          precision0,
          precision1,
          cacheHelper.getOUFactor(childNode0),
          cacheHelper.getOUFactor(childNode1),
          cacheOuterProducts);
    }
  }
コード例 #2
0
  private void preOrderTraverseSample(
      MultivariateTraitTree treeModel,
      NodeRef node,
      int parentIndex,
      double[][] treePrecision,
      double[][] treeVariance) {

    final int thisIndex = node.getNumber();

    if (treeModel.isRoot(node)) {
      // draw root

      double[] rootMean = new double[dimTrait];
      final int rootIndex = treeModel.getRoot().getNumber();
      double rootPrecision = lowerPrecisionCache[rootIndex];

      for (int datum = 0; datum < numData; datum++) {
        // System.arraycopy(meanCache, thisIndex * dim + datum * dimTrait, rootMean, 0, dimTrait);
        System.arraycopy(
            cacheHelper.getMeanCache(), thisIndex * dim + datum * dimTrait, rootMean, 0, dimTrait);

        double[][] variance =
            computeMarginalRootMeanAndVariance(
                rootMean, treePrecision, treeVariance, rootPrecision);

        double[] draw =
            MultivariateNormalDistribution.nextMultivariateNormalVariance(rootMean, variance);

        if (DEBUG_PREORDER) {
          Arrays.fill(draw, 1.0);
        }

        System.arraycopy(draw, 0, drawnStates, rootIndex * dim + datum * dimTrait, dimTrait);

        if (DEBUG) {
          System.err.println("Root mean: " + new Vector(rootMean));
          System.err.println("Root var : " + new Matrix(variance));
          System.err.println("Root draw: " + new Vector(draw));
        }
      }
    } else { // draw conditional on parentState

      if (!missingTraits.isCompletelyMissing(thisIndex)
          && !missingTraits.isPartiallyMissing(thisIndex)) {

        // System.arraycopy(meanCache, thisIndex * dim, drawnStates, thisIndex * dim, dim);
        System.arraycopy(
            cacheHelper.getMeanCache(), thisIndex * dim, drawnStates, thisIndex * dim, dim);

      } else {

        if (missingTraits.isPartiallyMissing(thisIndex)) {
          throw new RuntimeException("Partially missing values are not yet implemented");
        }
        // This code should work for sampling a missing tip trait as well, but needs testing

        // parent trait at drawnStates[parentOffset]
        double precisionToParent = 1.0 / getRescaledBranchLengthForPrecision(node);
        double precisionOfNode = lowerPrecisionCache[thisIndex];
        double totalPrecision = precisionOfNode + precisionToParent;

        double[] mean = Ay; // temporary storage
        double[][] var = tmpM; // temporary storage

        for (int datum = 0; datum < numData; datum++) {

          int parentOffset = parentIndex * dim + datum * dimTrait;
          int thisOffset = thisIndex * dim + datum * dimTrait;

          if (DEBUG) {
            double[] parentValue = new double[dimTrait];
            System.arraycopy(drawnStates, parentOffset, parentValue, 0, dimTrait);
            System.err.println("Parent draw: " + new Vector(parentValue));
            if (parentValue[0] != drawnStates[parentOffset]) {
              throw new RuntimeException("Error in setting indices");
            }
          }

          for (int i = 0; i < dimTrait; i++) {
            mean[i] =
                (drawnStates[parentOffset + i] * precisionToParent
                        //  + meanCache[thisOffset + i] * precisionOfNode) / totalPrecision;
                        + cacheHelper.getMeanCache()[thisOffset + i] * precisionOfNode)
                    / totalPrecision;
            for (int j = 0; j < dimTrait; j++) {
              var[i][j] = treeVariance[i][j] / totalPrecision;
            }
          }
          double[] draw = MultivariateNormalDistribution.nextMultivariateNormalVariance(mean, var);
          System.arraycopy(draw, 0, drawnStates, thisOffset, dimTrait);

          if (DEBUG) {
            System.err.println("Int prec: " + totalPrecision);
            System.err.println("Int mean: " + new Vector(mean));
            System.err.println("Int var : " + new Matrix(var));
            System.err.println("Int draw: " + new Vector(draw));
            System.err.println("");
          }
        }
      }
    }

    if (peel() && !treeModel.isExternal(node)) {
      preOrderTraverseSample(
          treeModel, treeModel.getChild(node, 0), thisIndex, treePrecision, treeVariance);
      preOrderTraverseSample(
          treeModel, treeModel.getChild(node, 1), thisIndex, treePrecision, treeVariance);
    }
  }
コード例 #3
0
  public IntegratedMultivariateTraitLikelihood(
      String traitName,
      MultivariateTraitTree treeModel,
      MultivariateDiffusionModel diffusionModel,
      CompoundParameter traitParameter,
      Parameter deltaParameter,
      List<Integer> missingIndices,
      boolean cacheBranches,
      boolean scaleByTime,
      boolean useTreeLength,
      BranchRateModel rateModel,
      List<BranchRateModel> optimalValues,
      BranchRateModel strengthOfSelection,
      Model samplingDensity,
      boolean reportAsMultivariate,
      boolean reciprocalRates) {

    super(
        traitName,
        treeModel,
        diffusionModel,
        traitParameter,
        deltaParameter,
        missingIndices,
        cacheBranches,
        scaleByTime,
        useTreeLength,
        rateModel,
        optimalValues,
        strengthOfSelection,
        samplingDensity,
        reportAsMultivariate,
        reciprocalRates);

    // Delegate caches to helper
    meanCache = new double[dim * treeModel.getNodeCount()];

    if (optimalValues != null) {
      cacheHelper =
          new OUCacheHelper(
              dim * treeModel.getNodeCount(), cacheBranches); // new DriftCacheHelper ....
    } else {
      cacheHelper = new CacheHelper(dim * treeModel.getNodeCount(), cacheBranches);
    }

    drawnStates = new double[dim * treeModel.getNodeCount()];
    upperPrecisionCache = new double[treeModel.getNodeCount()];
    lowerPrecisionCache = new double[treeModel.getNodeCount()];
    logRemainderDensityCache = new double[treeModel.getNodeCount()];

    if (cacheBranches) {
      storedMeanCache = new double[dim * treeModel.getNodeCount()];
      storedUpperPrecisionCache = new double[treeModel.getNodeCount()];
      storedLowerPrecisionCache = new double[treeModel.getNodeCount()];
      storedLogRemainderDensityCache = new double[treeModel.getNodeCount()];
    }

    // Set up reusable temporary storage
    Ay = new double[dimTrait];
    tmpM = new double[dimTrait][dimTrait];
    tmp2 = new double[dimTrait];

    zeroDimVector = new double[dim];

    missingTraits = new MissingTraits.CompletelyMissing(treeModel, missingIndices, dim);
    setTipDataValuesForAllNodes();
  }