// compute scale factor
  private void computeFactor() {

    // scale mean rate to 1.0 or separate parameter

    double treeRate = 0.0;
    double treeTime = 0.0;

    // normalizeBranchRateTo = 1.0;
    for (int i = 0; i < treeModel.getNodeCount(); i++) {
      NodeRef node = treeModel.getNode(i);
      if (!treeModel.isRoot(node)) {
        // d                int rateCategory = (int)
        // Math.round(rateCategories.getNodeValue(treeModel, node));
        // d                 treeRate += rates[rateCategory] * treeModel.getBranchLength(node);
        treeTime += treeModel.getBranchLength(node);

        // d              System.out.println("rates and time\t" + rates[rateCategory] + "\t" +
        // treeModel.getBranchLength(node));
      }
    }
    // treeRate /= treeTime;

    scaleFactor = normalizeBranchRateTo / (treeRate / treeTime);
    System.out.println("scaleFactor\t\t\t\t\t" + scaleFactor);
  }
Beispiel #2
0
  public void forwardIBD() {
    int numNodes = treeModel.getNodeCount();
    int stateCount = substitutionModel.getStateCount();
    getDiagonalRates(diag);
    for (int nodeId = 0; nodeId < numNodes; ++nodeId) {
      NodeRef node = treeModel.getNode(nodeId);
      NodeRef parent = treeModel.getParent(node);
      if (parent == null) { // handle the root

      } else if (treeModel.isExternal(node)) { // Handle the tip
        double branchTime =
            branchRateModel.getBranchRate(treeModel, node)
                * (treeModel.getNodeHeight(parent) - treeModel.getNodeHeight(node));

        for (int state = 0; state < stateCount; ++state) {
          ibdForward[nodeId][state] = Math.exp(-diag[state] * branchTime);
        }
      } else { // Handle internal node
        double branchTime =
            branchRateModel.getBranchRate(treeModel, node)
                * (treeModel.getNodeHeight(parent) - treeModel.getNodeHeight(node));

        int childCount = treeModel.getChildCount(node);
        for (int state = 0; state < stateCount; ++state) {
          ibdForward[nodeId][state] = 0;
          for (int child = 0; child < childCount; ++child) {
            int childNodeId = treeModel.getChild(node, child).getNumber();
            ibdForward[nodeId][state] += ibdForward[childNodeId][state];
          }
          ibdForward[nodeId][state] *= Math.exp(-diag[state] * branchTime);
        }
      }
    }
  }
Beispiel #3
0
 public void backwardIBD(NodeRef node) {
   int stateCount = substitutionModel.getStateCount();
   if (node == null) {
     node = treeModel.getRoot();
     int nodeId = node.getNumber();
     for (int state = 0; state < stateCount; ++state) {
       ibdBackward[nodeId][state] = 0;
     }
   }
   getDiagonalRates(diag);
   int childCount = treeModel.getChildCount(node);
   int nodeId = node.getNumber();
   for (int child = 0; child < childCount; ++child) {
     NodeRef childNode = treeModel.getChild(node, child);
     int childNodeId = childNode.getNumber();
     double branchTime =
         branchRateModel.getBranchRate(treeModel, childNode)
             * (treeModel.getNodeHeight(node) - treeModel.getNodeHeight(childNode));
     for (int state = 0; state < stateCount; ++state) {
       ibdBackward[childNodeId][state] = ibdBackward[nodeId][state];
       for (int sibling = 0; sibling < childCount; ++sibling) {
         if (sibling != child) {
           int siblingId = treeModel.getChild(node, sibling).getNumber();
           ibdBackward[childNodeId][state] += ibdForward[siblingId][state];
         }
       }
       ibdBackward[childNodeId][state] *= Math.exp(-diag[state] * branchTime);
     }
   }
   for (int child = 0; child < childCount; ++child) {
     NodeRef childNode = treeModel.getChild(node, child);
     backwardIBD(childNode);
   }
 }
Beispiel #4
0
  public void expectedIBD() {
    int stateCount = substitutionModel.getStateCount();
    int nodeCount = treeModel.getNodeCount();
    if (ibdweights == null) {
      ibdweights = new double[treeModel.getExternalNodeCount()];
      ibdForward = new double[nodeCount][stateCount];
      ibdBackward = new double[nodeCount][stateCount];
      diag = new double[stateCount];
    }

    double[] freq = substitutionModel.getFrequencyModel().getFrequencies();
    forwardIBD();
    backwardIBD(null);
    int numTips = treeModel.getExternalNodeCount();
    for (int i = 0; i < numTips; ++i) {
      ibdweights[i] = 0;
      for (int j = 0; j < stateCount; ++j) {
        ibdweights[i] += ibdBackward[i][j] * freq[j];
      }
    }
  }
  private boolean eligibleForMove(NodeRef node, TreeModel tree, BranchMapModel branchMap) {
    // to be eligible for this move, the node's parent and grandparent, or parent and other child,
    // must be in the same partition (so removing the parent has no effect on the remaining links of
    // the TT),
    // and the node and its parent must be in different partitions (such that the move does not
    // disconnect anything)

    return ((tree.getParent(tree.getParent(node)) != null
                && branchMap.get(tree.getParent(node).getNumber())
                    == branchMap.get(tree.getParent(tree.getParent(node)).getNumber()))
            || branchMap.get(tree.getParent(node).getNumber())
                == branchMap.get(getOtherChild(tree, tree.getParent(node), node).getNumber()))
        && branchMap.get(tree.getParent(node).getNumber()) != branchMap.get(node.getNumber());
  }
  private void setup() {

    DataType dataType = baseSubstitutionModel.getDataType();
    FrequencyModel freqModel = baseSubstitutionModel.getFrequencyModel();
    Parameter kappaParameter = new Parameter.Default("kappa", 1, baseSubstitutionModel.getKappa());

    substitutionModels = new LinkedList<SubstitutionModel>();
    branchAssignmentMap = new LinkedHashMap<NodeRef, Integer>();

    int branchClass = 0;
    for (NodeRef node : treeModel.getNodes()) {
      if (!treeModel.isRoot(node)) {

        double nodeHeight = treeModel.getNodeHeight(node);
        double parentHeight = treeModel.getNodeHeight(treeModel.getParent(node));

        double time = 0.5 * (parentHeight + nodeHeight);

        double baseOmega = baseSubstitutionModel.getOmega();

        double fixed = baseOmega * time;

        double epsilon =
            (Math.log(1 - random.nextDouble())
                / (-rate)); // Math.exp((random.nextGaussian() * stdev + mean));

        double value = fixed + epsilon;

        Parameter omegaParameter = new Parameter.Default("omega", 1, value);

        GY94CodonModel gy94 =
            new GY94CodonModel((Codons) dataType, omegaParameter, kappaParameter, freqModel);

        substitutionModels.add(gy94);
        branchAssignmentMap.put(node, branchClass);
        branchClass++;
      } // END: root check
    } // END: nodes loop
  } // END: setup
  public static void checkTree(TreeModel treeModel) {

    // todo Should only be run if there exists a zero-length interval

    //        TreeModel treeModel = (TreeModel) tree;
    for (int i = 0; i < treeModel.getInternalNodeCount(); i++) {
      NodeRef node = treeModel.getInternalNode(i);
      if (node != treeModel.getRoot()) {
        double parentHeight = treeModel.getNodeHeight(treeModel.getParent(node));
        double childHeight0 = treeModel.getNodeHeight(treeModel.getChild(node, 0));
        double childHeight1 = treeModel.getNodeHeight(treeModel.getChild(node, 1));
        double maxChild = childHeight0;
        if (childHeight1 > maxChild) maxChild = childHeight1;
        double newHeight = maxChild + MathUtils.nextDouble() * (parentHeight - maxChild);
        treeModel.setNodeHeight(node, newHeight);
      }
    }
    treeModel.pushTreeChangedEvent();
  }
  public void proposeTree() throws OperatorFailedException {
    TreeModel tree = c2cLikelihood.getTreeModel();
    BranchMapModel branchMap = c2cLikelihood.getBranchMap();
    NodeRef i;
    double oldMinAge, newMinAge, newRange, oldRange, newAge, q;
    // choose a random node avoiding root, and nodes that are ineligible for this move because they
    // have nowhere to
    // go
    final int nodeCount = tree.getNodeCount();
    do {
      i = tree.getNode(MathUtils.nextInt(nodeCount));
    } while (tree.getRoot() == i || !eligibleForMove(i, tree, branchMap));
    final NodeRef iP = tree.getParent(i);

    // this one can go anywhere

    NodeRef j = tree.getNode(MathUtils.nextInt(tree.getNodeCount()));
    NodeRef k = tree.getParent(j);

    while ((k != null && tree.getNodeHeight(k) <= tree.getNodeHeight(i)) || (i == j)) {
      j = tree.getNode(MathUtils.nextInt(tree.getNodeCount()));
      k = tree.getParent(j);
    }

    if (iP == tree.getRoot() || j == tree.getRoot()) {
      throw new OperatorFailedException("Root changes not allowed!");
    }

    if (k == iP || j == iP || k == i) throw new OperatorFailedException("move failed");

    final NodeRef CiP = getOtherChild(tree, iP, i);
    NodeRef PiP = tree.getParent(iP);

    newMinAge = Math.max(tree.getNodeHeight(i), tree.getNodeHeight(j));
    newRange = tree.getNodeHeight(k) - newMinAge;
    newAge = newMinAge + (MathUtils.nextDouble() * newRange);
    oldMinAge = Math.max(tree.getNodeHeight(i), tree.getNodeHeight(CiP));
    oldRange = tree.getNodeHeight(PiP) - oldMinAge;
    q = newRange / Math.abs(oldRange);

    // need to account for the random repainting of iP

    if (branchMap.get(PiP.getNumber()) != branchMap.get(CiP.getNumber())) {
      q *= 0.5;
    }

    if (branchMap.get(k.getNumber()) != branchMap.get(j.getNumber())) {
      q *= 2;
    }

    tree.beginTreeEdit();

    if (j == tree.getRoot()) {

      // 1. remove edges <iP, CiP>
      tree.removeChild(iP, CiP);
      tree.removeChild(PiP, iP);

      // 2. add edges <k, iP>, <iP, j>, <PiP, CiP>
      tree.addChild(iP, j);
      tree.addChild(PiP, CiP);

      // iP is the new root
      tree.setRoot(iP);

    } else if (iP == tree.getRoot()) {

      // 1. remove edges <k, j>, <iP, CiP>, <PiP, iP>
      tree.removeChild(k, j);
      tree.removeChild(iP, CiP);

      // 2. add edges <k, iP>, <iP, j>, <PiP, CiP>
      tree.addChild(iP, j);
      tree.addChild(k, iP);

      // CiP is the new root
      tree.setRoot(CiP);

    } else {
      // 1. remove edges <k, j>, <iP, CiP>, <PiP, iP>
      tree.removeChild(k, j);
      tree.removeChild(iP, CiP);
      tree.removeChild(PiP, iP);

      // 2. add edges <k, iP>, <iP, j>, <PiP, CiP>
      tree.addChild(iP, j);
      tree.addChild(k, iP);
      tree.addChild(PiP, CiP);
    }

    tree.setNodeHeight(iP, newAge);

    tree.endTreeEdit();

    //
    logq = Math.log(q);

    // repaint the parent to match either its new parent or its new child (50% chance of each).

    if (MathUtils.nextInt(2) == 0) {
      branchMap.set(iP.getNumber(), branchMap.get(k.getNumber()), true);
    } else {
      branchMap.set(iP.getNumber(), branchMap.get(j.getNumber()), true);
    }

    if (DEBUG) {
      c2cLikelihood.checkPartitions();
    }
  }
  protected BeagleTreeLikelihood createTreeLikelihood(
      PatternList patternList, //
      TreeModel treeModel, //
      BranchModel branchModel, //
      GammaSiteRateModel siteRateModel, //
      BranchRateModel branchRateModel, //
      TipStatesModel tipStatesModel, //
      boolean useAmbiguities, //
      PartialsRescalingScheme scalingScheme, //
      Map<
              Set<String>, //
              Parameter>
          partialsRestrictions, //
      XMLObject xo //
      ) throws XMLParseException {

    boolean integrateGainRate = xo.getBooleanAttribute(INTEGRATE_GAIN_RATE);

    useAmbiguities = true; // TODO No effect

    if (scalingScheme != PartialsRescalingScheme.NONE) {
      throw new XMLParseException(
          "No rescaling scheme is currently support by the mutation-death model " + xo.getId());
    }

    Parameter mu = ((MutationDeathModel) siteRateModel.getSubstitutionModel()).getDeathParameter();
    Parameter lam;
    if (!integrateGainRate) {
      lam = (Parameter) xo.getElementFirstChild(IMMIGRATION_RATE);
    } else {
      lam = new Parameter.Default("gainRate", 1.0, 0.001, 1.999);
    }
    AbstractObservationProcess observationProcess = null;

    Logger.getLogger("dr.evolution")
        .info("\n ---------------------------------\nCreating a BEAGLE ALSTreeLikelihood model.");
    for (int i = 0; i < xo.getChildCount(); ++i) {
      Object cxo = xo.getChild(i);
      if (cxo instanceof XMLObject && ((XMLObject) cxo).getName().equals(OBSERVATION_PROCESS)) {
        if (((XMLObject) cxo).getStringAttribute(OBSERVATION_TYPE).equals("singleTip")) {
          String taxonName = ((XMLObject) cxo).getStringAttribute(OBSERVATION_TAXON);
          Taxon taxon = treeModel.getTaxon(treeModel.getTaxonIndex(taxonName));
          observationProcess =
              new SingleTipObservationProcess(
                  treeModel, patternList, siteRateModel, branchRateModel, mu, lam, taxon);
          Logger.getLogger("dr.evolution").info("All traits are assumed extant in " + taxonName);
        } else { // "anyTip" observation process
          observationProcess =
              new AnyTipObservationProcess(
                  ANY_TIP, treeModel, patternList, siteRateModel, branchRateModel, mu, lam);
          Logger.getLogger("dr.evolution")
              .info("Observed traits are assumed to be extant in at least one tip node.");
        }

        observationProcess.setIntegrateGainRate(integrateGainRate);
      }
    }
    Logger.getLogger("dr.evolution")
        .info(
            "\tIf you publish results using Acquisition-Loss-Mutation (ALS) Model likelihood, please reference Alekseyenko, Lee and Suchard (2008) Syst. Biol 57: 772-784.\n---------------------------------\n");

    return new ALSBeagleTreeLikelihood(
        observationProcess,
        patternList,
        treeModel,
        branchModel,
        siteRateModel,
        branchRateModel,
        tipStatesModel,
        useAmbiguities,
        scalingScheme,
        partialsRestrictions);
  }
  public MixtureModelBranchRates(
      TreeModel tree,
      Parameter rateCategoryQuantilesParameter,
      ParametricDistributionModel[] models,
      Parameter distributionIndexParameter,
      boolean useQuantilesForRates,
      boolean normalize,
      double normalizeBranchRateTo) {
    super(MixtureModelBranchRatesParser.MIXTURE_MODEL_BRANCH_RATES);

    this.useQuantilesForRates = useQuantilesForRates;

    this.rateCategoryQuantiles =
        new TreeParameterModel(tree, rateCategoryQuantilesParameter, false);

    rates = new double[tree.getNodeCount()];

    this.normalize = normalize;

    this.treeModel = tree;
    this.distributionModels = models;
    this.normalizeBranchRateTo = normalizeBranchRateTo;

    this.tree = new SimpleTree(tree);

    this.distributionIndexParameter = distributionIndexParameter;
    addVariable(this.distributionIndexParameter);

    // Force the boundaries of rateCategoryParameter to match the category count
    // d Parameter.DefaultBounds bound = new Parameter.DefaultBounds(categoryCount - 1, 0,
    // rateCategoryParameter.getDimension());
    // d rateCategoryParameter.addBounds(bound);
    // rateCategoryQuantilesParameter.;

    Parameter.DefaultBounds bound =
        new Parameter.DefaultBounds(1.0, 0.0, rateCategoryQuantilesParameter.getDimension());
    rateCategoryQuantilesParameter.addBounds(bound);

    Parameter.DefaultBounds bound2 = new Parameter.DefaultBounds(models.length, 0.0, 1);
    distributionIndexParameter.addBounds(bound2);
    distributionIndexParameter.setParameterValue(0, 0);

    // Parameter distributionIndexParameter;

    for (ParametricDistributionModel distributionModel : distributionModels) {
      addModel(distributionModel);
    }
    // AR - commented out: changes to the tree are handled by model changed events fired by
    // rateCategories
    //        addModel(tree);
    // d addModel(rateCategories);

    addModel(rateCategoryQuantiles);

    // addModel(treeModel); // Maybe
    // AR - commented out: changes to rateCategoryParameter are handled by model changed events
    // fired by rateCategories
    //        addVariable(rateCategoryParameter);

    if (normalize) {
      tree.addModelListener(
          new ModelListener() {

            public void modelChangedEvent(Model model, Object object, int index) {
              computeFactor();
            }

            public void modelRestored(Model model) {
              computeFactor();
            }
          });
    }

    setupRates();
  }
  public NewBeagleTreeLikelihood(
      PatternList patternList,
      TreeModel treeModel,
      BranchModel branchModel,
      SiteModel siteModel,
      BranchRateModel branchRateModel,
      TipStatesModel tipStatesModel,
      boolean useAmbiguities,
      PartialsRescalingScheme rescalingScheme,
      Map<Set<String>, Parameter> partialsRestrictions) {

    super(BeagleTreeLikelihoodParser.TREE_LIKELIHOOD, patternList, treeModel);

    try {
      final Logger logger = Logger.getLogger("dr.evomodel");

      logger.info("Using BEAGLE TreeLikelihood");

      this.siteModel = siteModel;
      addModel(this.siteModel);

      this.branchModel = branchModel;
      addModel(this.branchModel);

      if (branchRateModel != null) {
        this.branchRateModel = branchRateModel;
        logger.info("  Branch rate model used: " + branchRateModel.getModelName());
      } else {
        this.branchRateModel = new DefaultBranchRateModel();
      }
      addModel(this.branchRateModel);

      this.tipStatesModel = tipStatesModel;

      this.categoryCount = this.siteModel.getCategoryCount();

      this.tipCount = treeModel.getExternalNodeCount();

      internalNodeCount = nodeCount - tipCount;

      int compactPartialsCount = tipCount;
      if (useAmbiguities) {
        // if we are using ambiguities then we don't use tip partials
        compactPartialsCount = 0;
      }

      // one partials buffer for each tip and two for each internal node (for store restore)
      partialBufferHelper = new BufferIndexHelper(nodeCount, tipCount);

      // one scaling buffer for each internal node plus an extra for the accumulation, then doubled
      // for store/restore
      scaleBufferHelper = new BufferIndexHelper(getScaleBufferCount(), 0);

      // Attempt to get the resource order from the System Property
      if (resourceOrder == null) {
        resourceOrder = parseSystemPropertyIntegerArray(RESOURCE_ORDER_PROPERTY);
      }
      if (preferredOrder == null) {
        preferredOrder = parseSystemPropertyIntegerArray(PREFERRED_FLAGS_PROPERTY);
      }
      if (requiredOrder == null) {
        requiredOrder = parseSystemPropertyIntegerArray(REQUIRED_FLAGS_PROPERTY);
      }
      if (scalingOrder == null) {
        scalingOrder = parseSystemPropertyStringArray(SCALING_PROPERTY);
      }
      if (extraBufferOrder == null) {
        extraBufferOrder = parseSystemPropertyIntegerArray(EXTRA_BUFFER_COUNT_PROPERTY);
      }

      int extraBufferCount = -1; // default
      if (extraBufferOrder.size() > 0) {
        extraBufferCount = extraBufferOrder.get(instanceCount % extraBufferOrder.size());
      }
      substitutionModelDelegate =
          new SubstitutionModelDelegate(treeModel, branchModel, extraBufferCount);

      // first set the rescaling scheme to use from the parser
      this.rescalingScheme = rescalingScheme;
      int[] resourceList = null;
      long preferenceFlags = 0;
      long requirementFlags = 0;

      if (scalingOrder.size() > 0) {
        this.rescalingScheme =
            PartialsRescalingScheme.parseFromString(
                scalingOrder.get(instanceCount % scalingOrder.size()));
      }

      if (resourceOrder.size() > 0) {
        // added the zero on the end so that a CPU is selected if requested resource fails
        resourceList = new int[] {resourceOrder.get(instanceCount % resourceOrder.size()), 0};
        if (resourceList[0] > 0) {
          preferenceFlags |=
              BeagleFlag.PROCESSOR_GPU.getMask(); // Add preference weight against CPU
        }
      }

      if (preferredOrder.size() > 0) {
        preferenceFlags = preferredOrder.get(instanceCount % preferredOrder.size());
      }

      if (requiredOrder.size() > 0) {
        requirementFlags = requiredOrder.get(instanceCount % requiredOrder.size());
      }

      // Define default behaviour here
      if (this.rescalingScheme == PartialsRescalingScheme.DEFAULT) {
        // if GPU: the default is dynamic scaling in BEAST
        if (resourceList != null && resourceList[0] > 1) {
          this.rescalingScheme = DEFAULT_RESCALING_SCHEME;
        } else { // if CPU: just run as fast as possible
          //                    this.rescalingScheme = PartialsRescalingScheme.NONE;
          // Dynamic should run as fast as none until first underflow
          this.rescalingScheme = DEFAULT_RESCALING_SCHEME;
        }
      }

      if (this.rescalingScheme == PartialsRescalingScheme.AUTO) {
        preferenceFlags |= BeagleFlag.SCALING_AUTO.getMask();
        useAutoScaling = true;
      } else {
        //                preferenceFlags |= BeagleFlag.SCALING_MANUAL.getMask();
      }
      String r = System.getProperty(RESCALE_FREQUENCY_PROPERTY);
      if (r != null) {
        rescalingFrequency = Integer.parseInt(r);
        if (rescalingFrequency < 1) {
          rescalingFrequency = RESCALE_FREQUENCY;
        }
      }

      if (preferenceFlags == 0 && resourceList == null) { // else determine dataset characteristics
        if (stateCount == 4 && patternList.getPatternCount() < 10000) // TODO determine good cut-off
        preferenceFlags |= BeagleFlag.PROCESSOR_CPU.getMask();
      }

      if (BeagleFlag.VECTOR_SSE.isSet(preferenceFlags) && stateCount != 4) {
        // @todo SSE doesn't seem to work for larger state spaces so for now we override the
        // SSE option.
        preferenceFlags &= ~BeagleFlag.VECTOR_SSE.getMask();
        preferenceFlags |= BeagleFlag.VECTOR_NONE.getMask();

        if (stateCount > 4 && this.rescalingScheme == PartialsRescalingScheme.DYNAMIC) {
          this.rescalingScheme = PartialsRescalingScheme.DELAYED;
        }
      }

      if (!BeagleFlag.PRECISION_SINGLE.isSet(preferenceFlags)) {
        // if single precision not explicitly set then prefer double
        preferenceFlags |= BeagleFlag.PRECISION_DOUBLE.getMask();
      }

      if (substitutionModelDelegate.canReturnComplexDiagonalization()) {
        requirementFlags |= BeagleFlag.EIGEN_COMPLEX.getMask();
      }

      instanceCount++;

      beagle =
          BeagleFactory.loadBeagleInstance(
              tipCount,
              partialBufferHelper.getBufferCount(),
              compactPartialsCount,
              stateCount,
              patternCount,
              substitutionModelDelegate.getEigenBufferCount(),
              substitutionModelDelegate.getMatrixBufferCount(),
              categoryCount,
              scaleBufferHelper.getBufferCount(), // Always allocate; they may become necessary
              resourceList,
              preferenceFlags,
              requirementFlags);

      InstanceDetails instanceDetails = beagle.getDetails();
      ResourceDetails resourceDetails = null;

      if (instanceDetails != null) {
        resourceDetails = BeagleFactory.getResourceDetails(instanceDetails.getResourceNumber());
        if (resourceDetails != null) {
          StringBuilder sb = new StringBuilder("  Using BEAGLE resource ");
          sb.append(resourceDetails.getNumber()).append(": ");
          sb.append(resourceDetails.getName()).append("\n");
          if (resourceDetails.getDescription() != null) {
            String[] description = resourceDetails.getDescription().split("\\|");
            for (String desc : description) {
              if (desc.trim().length() > 0) {
                sb.append("    ").append(desc.trim()).append("\n");
              }
            }
          }
          sb.append("    with instance flags: ").append(instanceDetails.toString());
          logger.info(sb.toString());
        } else {
          logger.info(
              "  Error retrieving BEAGLE resource for instance: " + instanceDetails.toString());
        }
      } else {
        logger.info(
            "  No external BEAGLE resources available, or resource list/requirements not met, using Java implementation");
      }
      logger.info(
          "  " + (useAmbiguities ? "Using" : "Ignoring") + " ambiguities in tree likelihood.");
      logger.info("  With " + patternList.getPatternCount() + " unique site patterns.");

      if (tipStatesModel != null) {
        tipStatesModel.setTree(treeModel);

        if (tipStatesModel.getModelType() == TipStatesModel.Type.PARTIALS) {
          tipPartials = new double[patternCount * stateCount];
        } else {
          tipStates = new int[patternCount];
        }

        addModel(tipStatesModel);
      }

      for (int i = 0; i < tipCount; i++) {
        // Find the id of tip i in the patternList
        String id = treeModel.getTaxonId(i);
        int index = patternList.getTaxonIndex(id);

        if (index == -1) {
          throw new TaxonList.MissingTaxonException(
              "Taxon, "
                  + id
                  + ", in tree, "
                  + treeModel.getId()
                  + ", is not found in patternList, "
                  + patternList.getId());
        } else {
          if (tipStatesModel != null) {
            // using a tipPartials model.
            // First set the observed states:
            tipStatesModel.setStates(patternList, index, i, id);

            if (tipStatesModel.getModelType() == TipStatesModel.Type.PARTIALS) {
              // Then set the tip partials as determined by the model:
              setPartials(beagle, tipStatesModel, i);
            } else {
              // or the tip states:
              tipStatesModel.getTipStates(i, tipStates);
              beagle.setTipStates(i, tipStates);
            }

          } else {
            if (useAmbiguities) {
              setPartials(beagle, patternList, index, i);
            } else {
              setStates(beagle, patternList, index, i);
            }
          }
        }
      }

      if (patternList instanceof AscertainedSitePatterns) {
        ascertainedSitePatterns = true;
      }

      this.partialsRestrictions = partialsRestrictions;
      //            hasRestrictedPartials = (partialsRestrictions != null);
      if (hasRestrictedPartials) {
        numRestrictedPartials = partialsRestrictions.size();
        updateRestrictedNodePartials = true;
        partialsMap = new Parameter[treeModel.getNodeCount()];
        partials = new double[stateCount * patternCount * categoryCount];
      } else {
        numRestrictedPartials = 0;
        updateRestrictedNodePartials = false;
      }

      beagle.setPatternWeights(patternWeights);

      String rescaleMessage = "  Using rescaling scheme : " + this.rescalingScheme.getText();
      if (this.rescalingScheme == PartialsRescalingScheme.AUTO
          && resourceDetails != null
          && (resourceDetails.getFlags() & BeagleFlag.SCALING_AUTO.getMask()) == 0) {
        // If auto scaling in BEAGLE is not supported then do it here
        this.rescalingScheme = PartialsRescalingScheme.DYNAMIC;
        rescaleMessage =
            "  Auto rescaling not supported in BEAGLE, using : " + this.rescalingScheme.getText();
      }
      if (this.rescalingScheme == PartialsRescalingScheme.DYNAMIC) {
        rescaleMessage += " (rescaling every " + rescalingFrequency + " evaluations)";
      }
      logger.info(rescaleMessage);

      if (this.rescalingScheme == PartialsRescalingScheme.DYNAMIC) {
        everUnderflowed = false; // If false, BEAST does not rescale until first under-/over-flow.
      }

      updateSubstitutionModel = true;
      updateSiteModel = true;

    } catch (TaxonList.MissingTaxonException mte) {
      throw new RuntimeException(mte.toString());
    }
    this.useAmbiguities = useAmbiguities;
    hasInitialized = true;
  }
  /** Constructor. */
  public CenancestorTreeLikelihood(
      PatternList patternList,
      TreeModel treeModel,
      SiteModel siteModel,
      CenancestorBranchRateModel branchRateModel,
      TipStatesModel tipStatesModel,
      Parameter cenancestorHeight,
      Parameter cenancestorBranch,
      // Parameter asStatistic,
      boolean useAmbiguities,
      boolean allowMissingTaxa,
      boolean storePartials,
      boolean forceJavaCore,
      boolean forceRescaling) {

    super(CenancestorTreeLikelihoodParser.TREE_LIKELIHOOD, patternList, treeModel);

    this.storePartials = storePartials;
    nodeCount = treeModel.getNodeCount() + 1;
    updateNode = new boolean[nodeCount];
    for (int i = 0; i < nodeCount; i++) {
      updateNode[i] = true;
    }

    try {
      this.siteModel = siteModel;
      addModel(siteModel);

      this.frequencyModel = siteModel.getFrequencyModel();
      addModel(frequencyModel);

      this.tipStatesModel = tipStatesModel;

      integrateAcrossCategories = siteModel.integrateAcrossCategories();

      this.categoryCount = siteModel.getCategoryCount();

      this.cenancestorHeight = cenancestorHeight;
      addVariable(cenancestorHeight);
      cenancestorHeight.addBounds(
          new Parameter.DefaultBounds(
              Double.POSITIVE_INFINITY,
              0.0,
              1)); // TODO The lower bound should be the maximum leaf height

      this.cenancestorBranch = cenancestorBranch;
      cenancestorBranch.addBounds(
          new Parameter.DefaultBounds(
              Double.POSITIVE_INFINITY,
              0.0,
              1)); // TODO The upper bound should be the maximum leaf height
      addVariable(cenancestorBranch);

      // if (asStatistic == cenancestorHeight){
      //	this.branchRules=true;
      // }

      //	if (branchRules==true){
      updateCenancestorHeight(); // Trying to avoid improper initial values
      //	}
      // 	else{
      //		updateCenancestorBranch();
      //	}

      final Logger logger = Logger.getLogger("dr.evomodel");
      String coreName = "Java general";

      /** TODO: Check if is worthy to implement other datatypes. */
      final DataType dataType = patternList.getDataType();

      if (dataType instanceof dr.evolution.datatype.TwoStates) {
        coreName = "Java cenancestor binary";
        cenancestorlikelihoodCore =
            new GeneralCenancestorLikelihoodCore(patternList.getStateCount());
      } else if (dataType instanceof dr.evolution.datatype.GeneralDataType) {
        coreName = "Java cenancestor CNV";
        cenancestorlikelihoodCore =
            new GeneralCenancestorLikelihoodCore(patternList.getStateCount());
      }

      /*            if (integrateAcrossCategories) {

          final DataType dataType = patternList.getDataType();

          if (dataType instanceof dr.evolution.datatype.Nucleotides) {

              if (!forceJavaCore && NativeNucleotideLikelihoodCore.isAvailable()) {
                  coreName = "native nucleotide";
                  likelihoodCore = new NativeNucleotideLikelihoodCore();
              } else {
                  coreName = "Java nucleotide";
                  likelihoodCore = new NucleotideLikelihoodCore();
              }

          } else if (dataType instanceof dr.evolution.datatype.AminoAcids) {
              if (!forceJavaCore && NativeAminoAcidLikelihoodCore.isAvailable()) {
                  coreName = "native amino acid";
                  likelihoodCore = new NativeAminoAcidLikelihoodCore();
              } else {
                  coreName = "Java amino acid";
                  likelihoodCore = new AminoAcidLikelihoodCore();
              }

              // The codon core was out of date and did nothing more than the general core...
          } else if (dataType instanceof dr.evolution.datatype.Codons) {
              if (!forceJavaCore && NativeGeneralLikelihoodCore.isAvailable()) {
                  coreName = "native general";
                  likelihoodCore = new NativeGeneralLikelihoodCore(patternList.getStateCount());
              } else {
                  coreName = "Java general";
                  likelihoodCore = new GeneralLikelihoodCore(patternList.getStateCount());
              }
              useAmbiguities = true;
          } else {
              if (!forceJavaCore && NativeGeneralLikelihoodCore.isAvailable()) {
                  coreName = "native general";
                  likelihoodCore = new NativeGeneralLikelihoodCore(patternList.getStateCount());
              } else {
                  	coreName = "Java general";
                  	likelihoodCore = new GeneralLikelihoodCore(patternList.getStateCount());
              }
          }
      } else {
          likelihoodCore = new GeneralLikelihoodCore(patternList.getStateCount());
      }*/
      {
        final String id = getId();
        logger.info(
            "TreeLikelihood("
                + ((id != null) ? id : treeModel.getId())
                + ") using "
                + coreName
                + " likelihood core");

        logger.info(
            "  " + (useAmbiguities ? "Using" : "Ignoring") + " ambiguities in tree likelihood.");
        logger.info("  With " + patternList.getPatternCount() + " unique site patterns.");
      }

      if (branchRateModel != null) {
        this.branchRateModel = branchRateModel;
        logger.info("Branch rate model used: " + branchRateModel.getModelName());
      } else {
        this.branchRateModel = new DefaultCenancestorBranchRateModel();
      }
      addModel(this.branchRateModel);

      probabilities = new double[stateCount * stateCount];

      cenancestorlikelihoodCore.initialize(
          nodeCount, patternCount, categoryCount, integrateAcrossCategories);

      int extNodeCount = treeModel.getExternalNodeCount();
      int intNodeCount = treeModel.getInternalNodeCount();

      if (tipStatesModel != null) {
        tipStatesModel.setTree(treeModel);

        tipPartials = new double[patternCount * stateCount];

        for (int i = 0; i < extNodeCount; i++) {
          // Find the id of tip i in the patternList
          String id = treeModel.getTaxonId(i);
          int index = patternList.getTaxonIndex(id);

          if (index == -1) {
            throw new TaxonList.MissingTaxonException(
                "Taxon, "
                    + id
                    + ", in tree, "
                    + treeModel.getId()
                    + ", is not found in patternList, "
                    + patternList.getId());
          }

          tipStatesModel.setStates(patternList, index, i, id);
          cenancestorlikelihoodCore.createNodePartials(i);
        }

        addModel(tipStatesModel);
      } else {
        for (int i = 0; i < extNodeCount; i++) {
          // Find the id of tip i in the patternList
          String id = treeModel.getTaxonId(i);
          int index = patternList.getTaxonIndex(id);

          if (index == -1) {
            if (!allowMissingTaxa) {
              throw new TaxonList.MissingTaxonException(
                  "Taxon, "
                      + id
                      + ", in tree, "
                      + treeModel.getId()
                      + ", is not found in patternList, "
                      + patternList.getId());
            }
            if (useAmbiguities) {
              setMissingPartials((LikelihoodCore) cenancestorlikelihoodCore, i);
            } else {
              setMissingStates((LikelihoodCore) cenancestorlikelihoodCore, i);
            }
          } else {
            if (useAmbiguities) {
              setPartials(
                  (LikelihoodCore) cenancestorlikelihoodCore, patternList, categoryCount, index, i);
            } else {
              setStates((LikelihoodCore) cenancestorlikelihoodCore, patternList, index, i);
            }
          }
        }
      }
      for (int i = 0; i <= intNodeCount; i++) { // Added one step for the cenancestor
        cenancestorlikelihoodCore.createNodePartials(extNodeCount + i);
      }

      if (forceRescaling) {
        cenancestorlikelihoodCore.setUseScaling(true);
        logger.info("  Forcing use of partials rescaling.");
      }

    } catch (TaxonList.MissingTaxonException mte) {
      throw new RuntimeException(mte.toString());
    }

    addStatistic(new SiteLikelihoodsStatistic());
  }