Example #1
0
  public void writeAdditionalLogToFile(
      XMLWriter writer,
      BranchRatesModelGenerator branchRatesModelGenerator,
      SubstitutionModelGenerator substitutionModelGenerator) {
    if (options.hasDiscreteIntegerTraitsExcludeSpecies()) {
      writer.writeComment("write rate matrix log to file");

      String fileName =
          options.logFileName.substring(0, options.logFileName.indexOf(".log")) + "_rateMatrix.log";
      writer.writeOpenTag(
          LoggerParser.LOG,
          new Attribute[] {
            new Attribute.Default<String>(XMLParser.ID, "rateMatrixLog"),
            new Attribute.Default<String>(LoggerParser.LOG_EVERY, options.logEvery + ""),
            new Attribute.Default<String>(LoggerParser.FILE_NAME, fileName)
          });

      for (PartitionSubstitutionModel model : options.getPartitionTraitsSubstitutionModels()) {
        substitutionModelGenerator.writeRateLog(model, writer);
      }

      for (PartitionClockModel model : options.getPartitionTraitsClockModels()) {
        branchRatesModelGenerator.writeLog(model, writer);
      }

      writer.writeCloseTag(LoggerParser.LOG);
    }
  }
  /**
   * Write the marginalLikelihoodEstimator, pathSamplingAnalysis and steppingStoneSamplingAnalysis
   * blocks.
   *
   * @param writer XMLWriter
   */
  public void writeMLE(XMLWriter writer, MarginalLikelihoodEstimationOptions options) {

    if (options.performMLE) {

      writer.writeComment("Define marginal likelihood estimator (PS/SS) settings");

      List<Attribute> attributes = new ArrayList<Attribute>();
      // attributes.add(new Attribute.Default<String>(XMLParser.ID, "mcmc"));
      attributes.add(
          new Attribute.Default<Integer>(
              MarginalLikelihoodEstimator.CHAIN_LENGTH, options.mleChainLength));
      attributes.add(
          new Attribute.Default<Integer>(
              MarginalLikelihoodEstimator.PATH_STEPS, options.pathSteps));
      attributes.add(
          new Attribute.Default<String>(
              MarginalLikelihoodEstimator.PATH_SCHEME, options.pathScheme));
      if (!options.pathScheme.equals(MarginalLikelihoodEstimator.LINEAR)) {
        attributes.add(
            new Attribute.Default<Double>(
                MarginalLikelihoodEstimator.ALPHA, options.schemeParameter));
      }

      writer.writeOpenTag(MarginalLikelihoodEstimator.MARGINAL_LIKELIHOOD_ESTIMATOR, attributes);

      writer.writeOpenTag("samplers");
      writer.writeIDref("mcmc", "mcmc");
      writer.writeCloseTag("samplers");

      attributes = new ArrayList<Attribute>();
      attributes.add(new Attribute.Default<String>(XMLParser.ID, "pathLikelihood"));
      writer.writeOpenTag(PathLikelihood.PATH_LIKELIHOOD, attributes);
      writer.writeOpenTag(PathLikelihood.SOURCE);
      writer.writeIDref(CompoundLikelihoodParser.POSTERIOR, CompoundLikelihoodParser.POSTERIOR);
      writer.writeCloseTag(PathLikelihood.SOURCE);
      writer.writeOpenTag(PathLikelihood.DESTINATION);
      writer.writeIDref(CompoundLikelihoodParser.PRIOR, CompoundLikelihoodParser.PRIOR);
      writer.writeCloseTag(PathLikelihood.DESTINATION);
      writer.writeCloseTag(PathLikelihood.PATH_LIKELIHOOD);

      attributes = new ArrayList<Attribute>();
      attributes.add(new Attribute.Default<String>(XMLParser.ID, "MLELog"));
      attributes.add(new Attribute.Default<Integer>("logEvery", options.mleLogEvery));
      attributes.add(new Attribute.Default<String>("fileName", options.mleFileName));
      writer.writeOpenTag("log", attributes);
      writer.writeIDref("pathLikelihood", "pathLikelihood");
      writer.writeCloseTag("log");

      writer.writeCloseTag(MarginalLikelihoodEstimator.MARGINAL_LIKELIHOOD_ESTIMATOR);

      writer.writeComment("Path sampling estimator from collected samples");
      attributes = new ArrayList<Attribute>();
      attributes.add(new Attribute.Default<String>("fileName", options.mleFileName));
      writer.writeOpenTag(PathSamplingAnalysis.PATH_SAMPLING_ANALYSIS, attributes);
      writer.writeTag(
          "likelihoodColumn", new Attribute.Default<String>("name", "pathLikelihood.delta"), true);
      writer.writeTag(
          "thetaColumn", new Attribute.Default<String>("name", "pathLikelihood.theta"), true);
      writer.writeCloseTag(PathSamplingAnalysis.PATH_SAMPLING_ANALYSIS);

      writer.writeComment("Stepping-stone sampling estimator from collected samples");
      attributes = new ArrayList<Attribute>();
      attributes.add(new Attribute.Default<String>("fileName", options.mleFileName));
      writer.writeOpenTag(
          SteppingStoneSamplingAnalysis.STEPPING_STONE_SAMPLING_ANALYSIS, attributes);
      writer.writeTag(
          "likelihoodColumn", new Attribute.Default<String>("name", "pathLikelihood.delta"), true);
      writer.writeTag(
          "thetaColumn", new Attribute.Default<String>("name", "pathLikelihood.theta"), true);
      writer.writeCloseTag(SteppingStoneSamplingAnalysis.STEPPING_STONE_SAMPLING_ANALYSIS);

    } else if (options.performMLEGSS) {

      // First define necessary components for the tree working prior
      if (options.choiceTreeWorkingPrior.equals("Product of exponential distributions")) {
        // more general product of exponentials needs to be constructed

        if (DEBUG) {
          System.err.println("productOfExponentials selected: " + options.choiceTreeWorkingPrior);
        }

        List<Attribute> attributes = new ArrayList<Attribute>();
        attributes.add(new Attribute.Default<String>(XMLParser.ID, "exponentials"));
        attributes.add(new Attribute.Default<String>("fileName", beautiOptions.logFileName));
        attributes.add(
            new Attribute.Default<String>("burnin", "" + beautiOptions.chainLength * 0.10));
        attributes.add(
            new Attribute.Default<String>("parameterColumn", "coalescentEventsStatistic"));
        attributes.add(
            new Attribute.Default<String>(
                "dimension", "" + (beautiOptions.taxonList.getTaxonCount() - 1)));

        writer.writeOpenTag(
            TreeWorkingPriorParsers.PRODUCT_OF_EXPONENTIALS_POSTERIOR_MEANS_LOESS, attributes);
        writer.writeTag(
            TreeModel.TREE_MODEL,
            new Attribute.Default<String>(XMLParser.ID, TreeModel.TREE_MODEL),
            true);
        writer.writeCloseTag(TreeWorkingPriorParsers.PRODUCT_OF_EXPONENTIALS_POSTERIOR_MEANS_LOESS);

      } else {
        // matching coalescent model has to be constructed
        // getting the coalescent model
        if (DEBUG) {
          System.err.println(
              "matching coalescent model selected: " + options.choiceTreeWorkingPrior);
          System.err.println(beautiOptions.getPartitionTreePriors().get(0).getNodeHeightPrior());
        }
        /*for (PartitionTreePrior prior : options.getPartitionTreePriors()) {
            treePriorGenerator.writeTreePriorModel(prior, writer);
            writer.writeText("");
        }*/
        // TODO: extend for more than 1 coalescent model?
        TreePriorType nodeHeightPrior =
            beautiOptions.getPartitionTreePriors().get(0).getNodeHeightPrior();

        switch (nodeHeightPrior) {
          case CONSTANT:
            writer.writeComment("A working prior for the constant population size model.");
            writer.writeOpenTag(
                ConstantPopulationModelParser.CONSTANT_POPULATION_MODEL,
                new Attribute[] {
                  new Attribute.Default<String>(XMLParser.ID, modelPrefix + "constantReference"),
                  new Attribute.Default<String>(
                      "units", Units.Utils.getDefaultUnitName(beautiOptions.units))
                });

            writer.writeOpenTag(ConstantPopulationModelParser.POPULATION_SIZE);
            writeParameter(
                "constantReference.popSize",
                "constant.popSize",
                beautiOptions.logFileName,
                (int) (options.mleChainLength * 0.10),
                writer);
            writer.writeCloseTag(ConstantPopulationModelParser.POPULATION_SIZE);
            writer.writeCloseTag(ConstantPopulationModelParser.CONSTANT_POPULATION_MODEL);

            writer.writeComment("A working prior for the coalescent.");
            writer.writeOpenTag(
                CoalescentLikelihoodParser.COALESCENT_LIKELIHOOD,
                new Attribute[] {
                  new Attribute.Default<String>(XMLParser.ID, modelPrefix + "coalescentReference")
                });
            writer.writeOpenTag(CoalescentLikelihoodParser.MODEL);
            writer.writeIDref(
                ConstantPopulationModelParser.CONSTANT_POPULATION_MODEL,
                beautiOptions.getPartitionTreePriors().get(0).getPrefix() + "constantReference");
            writer.writeCloseTag(CoalescentLikelihoodParser.MODEL);
            writer.writeOpenTag(CoalescentLikelihoodParser.POPULATION_TREE);
            writer.writeIDref(TreeModel.TREE_MODEL, modelPrefix + TreeModel.TREE_MODEL);
            writer.writeCloseTag(CoalescentLikelihoodParser.POPULATION_TREE);
            writer.writeCloseTag(CoalescentLikelihoodParser.COALESCENT_LIKELIHOOD);

            break;

          case EXPONENTIAL:
            writer.writeComment("A working prior for the exponential growth model.");
            writer.writeOpenTag(
                ExponentialGrowthModelParser.EXPONENTIAL_GROWTH_MODEL,
                new Attribute[] {
                  new Attribute.Default<String>(XMLParser.ID, modelPrefix + "exponentialReference"),
                  new Attribute.Default<String>(
                      "units", Units.Utils.getDefaultUnitName(beautiOptions.units))
                });

            writer.writeOpenTag(ExponentialGrowthModelParser.POPULATION_SIZE);
            writeParameter(
                "exponentialReference.popSize",
                "exponential.popSize",
                beautiOptions.logFileName,
                (int) (options.mleChainLength * 0.10),
                writer);
            writer.writeCloseTag(ExponentialGrowthModelParser.POPULATION_SIZE);
            writer.writeOpenTag(ExponentialGrowthModelParser.GROWTH_RATE);
            writeParameter(
                "exponentialReference.growthRate",
                "exponential.growthRate",
                beautiOptions.logFileName,
                (int) (options.mleChainLength * 0.10),
                writer);
            writer.writeCloseTag(ExponentialGrowthModelParser.GROWTH_RATE);
            writer.writeCloseTag(ExponentialGrowthModelParser.EXPONENTIAL_GROWTH_MODEL);

            writer.writeComment("A working prior for the coalescent.");
            writer.writeOpenTag(
                CoalescentLikelihoodParser.COALESCENT_LIKELIHOOD,
                new Attribute[] {
                  new Attribute.Default<String>(XMLParser.ID, modelPrefix + "coalescentReference")
                });
            writer.writeOpenTag(CoalescentLikelihoodParser.MODEL);
            writer.writeIDref(
                ExponentialGrowthModelParser.EXPONENTIAL_GROWTH_MODEL,
                beautiOptions.getPartitionTreePriors().get(0).getPrefix() + "constantReference");
            writer.writeCloseTag(CoalescentLikelihoodParser.MODEL);
            writer.writeOpenTag(CoalescentLikelihoodParser.POPULATION_TREE);
            writer.writeIDref(TreeModel.TREE_MODEL, modelPrefix + TreeModel.TREE_MODEL);
            writer.writeCloseTag(CoalescentLikelihoodParser.POPULATION_TREE);
            writer.writeCloseTag(CoalescentLikelihoodParser.COALESCENT_LIKELIHOOD);

            break;

          case LOGISTIC:
            writer.writeComment("A working prior for the logistic growth model.");
            writer.writeOpenTag(
                LogisticGrowthModelParser.LOGISTIC_GROWTH_MODEL,
                new Attribute[] {
                  new Attribute.Default<String>(XMLParser.ID, modelPrefix + "logisticReference"),
                  new Attribute.Default<String>(
                      "units", Units.Utils.getDefaultUnitName(beautiOptions.units))
                });

            writer.writeOpenTag(LogisticGrowthModelParser.POPULATION_SIZE);
            writeParameter(
                "logisticReference.popSize",
                "logistic.popSize",
                beautiOptions.logFileName,
                (int) (options.mleChainLength * 0.10),
                writer);
            writer.writeCloseTag(LogisticGrowthModelParser.POPULATION_SIZE);
            writer.writeOpenTag(LogisticGrowthModelParser.GROWTH_RATE);
            writeParameter(
                "logisticReference.growthRate",
                "logistic.growthRate",
                beautiOptions.logFileName,
                (int) (options.mleChainLength * 0.10),
                writer);
            writer.writeCloseTag(LogisticGrowthModelParser.GROWTH_RATE);
            writer.writeOpenTag(LogisticGrowthModelParser.TIME_50);
            writeParameter(
                "logisticReference.t50",
                "logistic.t50",
                beautiOptions.logFileName,
                (int) (options.mleChainLength * 0.10),
                writer);
            writer.writeCloseTag(LogisticGrowthModelParser.TIME_50);
            writer.writeCloseTag(LogisticGrowthModelParser.LOGISTIC_GROWTH_MODEL);

            writer.writeComment("A working prior for the coalescent.");
            writer.writeOpenTag(
                CoalescentLikelihoodParser.COALESCENT_LIKELIHOOD,
                new Attribute[] {
                  new Attribute.Default<String>(XMLParser.ID, modelPrefix + "coalescentReference")
                });
            writer.writeOpenTag(CoalescentLikelihoodParser.MODEL);
            writer.writeIDref(
                LogisticGrowthModelParser.LOGISTIC_GROWTH_MODEL,
                beautiOptions.getPartitionTreePriors().get(0).getPrefix() + "constantReference");
            writer.writeCloseTag(CoalescentLikelihoodParser.MODEL);
            writer.writeOpenTag(CoalescentLikelihoodParser.POPULATION_TREE);
            writer.writeIDref(TreeModel.TREE_MODEL, modelPrefix + TreeModel.TREE_MODEL);
            writer.writeCloseTag(CoalescentLikelihoodParser.POPULATION_TREE);
            writer.writeCloseTag(CoalescentLikelihoodParser.COALESCENT_LIKELIHOOD);

            break;

          case EXPANSION:
            writer.writeComment("A working prior for the expansion growth model.");
            writer.writeOpenTag(
                ExpansionModelParser.EXPANSION_MODEL,
                new Attribute[] {
                  new Attribute.Default<String>(XMLParser.ID, modelPrefix + "expansionReference"),
                  new Attribute.Default<String>(
                      "units", Units.Utils.getDefaultUnitName(beautiOptions.units))
                });

            writer.writeOpenTag(ExpansionModelParser.POPULATION_SIZE);
            writeParameter(
                "expansionReference.popSize",
                "expansion.popSize",
                beautiOptions.logFileName,
                (int) (options.mleChainLength * 0.10),
                writer);
            writer.writeCloseTag(ExpansionModelParser.POPULATION_SIZE);
            writer.writeOpenTag(ExpansionModelParser.GROWTH_RATE);
            writeParameter(
                "expansionReference.growthRate",
                "expansion.growthRate",
                beautiOptions.logFileName,
                (int) (options.mleChainLength * 0.10),
                writer);
            writer.writeCloseTag(ExpansionModelParser.GROWTH_RATE);
            writer.writeOpenTag(ExpansionModelParser.ANCESTRAL_POPULATION_PROPORTION);
            writeParameter(
                "expansionReference.ancestralProportion",
                "expansion.ancestralProportion",
                beautiOptions.logFileName,
                (int) (options.mleChainLength * 0.10),
                writer);
            writer.writeCloseTag(ExpansionModelParser.ANCESTRAL_POPULATION_PROPORTION);
            writer.writeCloseTag(ExpansionModelParser.EXPANSION_MODEL);

            writer.writeComment("A working prior for the coalescent.");
            writer.writeOpenTag(
                CoalescentLikelihoodParser.COALESCENT_LIKELIHOOD,
                new Attribute[] {
                  new Attribute.Default<String>(XMLParser.ID, modelPrefix + "coalescentReference")
                });
            writer.writeOpenTag(CoalescentLikelihoodParser.MODEL);
            writer.writeIDref(
                ExpansionModelParser.EXPANSION_MODEL,
                beautiOptions.getPartitionTreePriors().get(0).getPrefix() + "constantReference");
            writer.writeCloseTag(CoalescentLikelihoodParser.MODEL);
            writer.writeOpenTag(CoalescentLikelihoodParser.POPULATION_TREE);
            writer.writeIDref(TreeModel.TREE_MODEL, modelPrefix + TreeModel.TREE_MODEL);
            writer.writeCloseTag(CoalescentLikelihoodParser.POPULATION_TREE);
            writer.writeCloseTag(CoalescentLikelihoodParser.COALESCENT_LIKELIHOOD);

            break;

          default:

            // Do not switch to product of exponentials as the coalescentEventsStatistic has not
            // been logged
            // TODO: show menu that explains mismatch between prior and working prior?
            // TODO: but show it when the MCM option is wrongfully being selected, don't do anything
            // here

        }
      }

      writer.writeComment("Define marginal likelihood estimator (GSS) settings");

      List<Attribute> attributes = new ArrayList<Attribute>();
      attributes.add(
          new Attribute.Default<Integer>(
              MarginalLikelihoodEstimator.CHAIN_LENGTH, options.mleChainLength));
      attributes.add(
          new Attribute.Default<Integer>(
              MarginalLikelihoodEstimator.PATH_STEPS, options.pathSteps));
      attributes.add(
          new Attribute.Default<String>(
              MarginalLikelihoodEstimator.PATH_SCHEME, options.pathScheme));
      if (!options.pathScheme.equals(MarginalLikelihoodEstimator.LINEAR)) {
        attributes.add(
            new Attribute.Default<Double>(
                MarginalLikelihoodEstimator.ALPHA, options.schemeParameter));
      }

      writer.writeOpenTag(MarginalLikelihoodEstimator.MARGINAL_LIKELIHOOD_ESTIMATOR, attributes);

      writer.writeOpenTag("samplers");
      writer.writeIDref("mcmc", "mcmc");
      writer.writeCloseTag("samplers");

      attributes = new ArrayList<Attribute>();
      attributes.add(new Attribute.Default<String>(XMLParser.ID, "pathLikelihood"));
      writer.writeOpenTag(PathLikelihood.PATH_LIKELIHOOD, attributes);
      writer.writeOpenTag(PathLikelihood.SOURCE);
      writer.writeIDref(CompoundLikelihoodParser.POSTERIOR, CompoundLikelihoodParser.POSTERIOR);
      writer.writeCloseTag(PathLikelihood.SOURCE);
      writer.writeOpenTag(PathLikelihood.DESTINATION);
      writer.writeOpenTag(CompoundLikelihoodParser.WORKING_PRIOR);

      ArrayList<Parameter> parameters = beautiOptions.selectParameters();

      for (Parameter param : parameters) {
        if (DEBUG) {
          System.err.println(param.toString() + "   " + param.priorType.toString());
        }
        // should leave out those parameters set by the coalescent
        if (param.priorType != PriorType.NONE_TREE_PRIOR) {
          // TODO: frequencies is multidimensional, is that automatically dealt with?
          writer.writeOpenTag(
              WorkingPriorParsers.NORMAL_REFERENCE_PRIOR,
              new Attribute[] {
                new Attribute.Default<String>("fileName", beautiOptions.logFileName),
                new Attribute.Default<String>("parameterColumn", param.getName()),
                new Attribute.Default<String>("burnin", "" + beautiOptions.chainLength * 0.10)
              });
          writeParameterIdref(writer, param);
          writer.writeCloseTag(WorkingPriorParsers.NORMAL_REFERENCE_PRIOR);
        }
      }

      if (options.choiceTreeWorkingPrior.equals("Product of exponential distributions")) {
        writer.writeIDref("productOfExponentialsPosteriorMeansLoess", "exponentials");
      } else {
        writer.writeIDref(CoalescentLikelihoodParser.COALESCENT_LIKELIHOOD, "coalescentReference");
      }

      writer.writeCloseTag(CompoundLikelihoodParser.WORKING_PRIOR);
      writer.writeCloseTag(PathLikelihood.DESTINATION);
      writer.writeCloseTag(PathLikelihood.PATH_LIKELIHOOD);

      attributes = new ArrayList<Attribute>();
      attributes.add(new Attribute.Default<String>(XMLParser.ID, "MLELog"));
      attributes.add(new Attribute.Default<Integer>("logEvery", options.mleLogEvery));
      attributes.add(new Attribute.Default<String>("fileName", options.mleFileName));
      writer.writeOpenTag("log", attributes);
      writer.writeIDref("pathLikelihood", "pathLikelihood");
      writer.writeCloseTag("log");

      writer.writeCloseTag(MarginalLikelihoodEstimator.MARGINAL_LIKELIHOOD_ESTIMATOR);

      writer.writeComment("Generalized stepping-stone sampling estimator from collected samples");
      attributes = new ArrayList<Attribute>();
      attributes.add(new Attribute.Default<String>("fileName", options.mleFileName));
      writer.writeOpenTag(
          GeneralizedSteppingStoneSamplingAnalysis.GENERALIZED_STEPPING_STONE_SAMPLING_ANALYSIS,
          attributes);
      writer.writeTag(
          "sourceColumn", new Attribute.Default<String>("name", "pathLikelihood.source"), true);
      writer.writeTag(
          "destinationColumn",
          new Attribute.Default<String>("name", "pathLikelihood.destination"),
          true);
      writer.writeTag(
          "thetaColumn", new Attribute.Default<String>("name", "pathLikelihood.theta"), true);
      writer.writeCloseTag(
          GeneralizedSteppingStoneSamplingAnalysis.GENERALIZED_STEPPING_STONE_SAMPLING_ANALYSIS);
    }
  }
Example #3
0
  /**
   * write log to screen
   *
   * @param writer XMLWriter
   * @param branchRatesModelGenerator BranchRatesModelGenerator
   */
  public void writeLogToScreen(
      XMLWriter writer,
      BranchRatesModelGenerator branchRatesModelGenerator,
      SubstitutionModelGenerator substitutionModelGenerator) {
    writer.writeComment("write log to screen");

    writer.writeOpenTag(
        LoggerParser.LOG,
        new Attribute[] {
          new Attribute.Default<String>(XMLParser.ID, "screenLog"),
          new Attribute.Default<String>(LoggerParser.LOG_EVERY, options.echoEvery + "")
        });

    if (options.hasData()) {
      writer.writeOpenTag(
          ColumnsParser.COLUMN,
          new Attribute[] {
            new Attribute.Default<String>(ColumnsParser.LABEL, "Posterior"),
            new Attribute.Default<String>(ColumnsParser.DECIMAL_PLACES, "4"),
            new Attribute.Default<String>(ColumnsParser.WIDTH, "12")
          });
      writer.writeIDref(CompoundLikelihoodParser.POSTERIOR, "posterior");
      writer.writeCloseTag(ColumnsParser.COLUMN);
    }

    writer.writeOpenTag(
        ColumnsParser.COLUMN,
        new Attribute[] {
          new Attribute.Default<String>(ColumnsParser.LABEL, "Prior"),
          new Attribute.Default<String>(ColumnsParser.DECIMAL_PLACES, "4"),
          new Attribute.Default<String>(ColumnsParser.WIDTH, "12")
        });
    writer.writeIDref(CompoundLikelihoodParser.PRIOR, "prior");
    writer.writeCloseTag(ColumnsParser.COLUMN);

    if (options.hasData()) {
      writer.writeOpenTag(
          ColumnsParser.COLUMN,
          new Attribute[] {
            new Attribute.Default<String>(ColumnsParser.LABEL, "Likelihood"),
            new Attribute.Default<String>(ColumnsParser.DECIMAL_PLACES, "4"),
            new Attribute.Default<String>(ColumnsParser.WIDTH, "12")
          });
      writer.writeIDref(CompoundLikelihoodParser.LIKELIHOOD, "likelihood");
      writer.writeCloseTag(ColumnsParser.COLUMN);
    }

    if (options.useStarBEAST) { // species
      writer.writeOpenTag(
          ColumnsParser.COLUMN,
          new Attribute[] {
            new Attribute.Default<String>(ColumnsParser.LABEL, "PopMean"),
            new Attribute.Default<String>(ColumnsParser.DECIMAL_PLACES, "4"),
            new Attribute.Default<String>(ColumnsParser.WIDTH, "12")
          });
      writer.writeIDref(
          ParameterParser.PARAMETER,
          TraitData.TRAIT_SPECIES + "." + options.starBEASTOptions.POP_MEAN);
      writer.writeCloseTag(ColumnsParser.COLUMN);
    }

    for (PartitionTreeModel model : options.getPartitionTreeModels()) {
      writer.writeOpenTag(
          ColumnsParser.COLUMN,
          new Attribute[] {
            new Attribute.Default<String>(
                ColumnsParser.LABEL, model.getPrefix() + TreeModelParser.ROOT_HEIGHT),
            new Attribute.Default<String>(ColumnsParser.SIGNIFICANT_FIGURES, "6"),
            new Attribute.Default<String>(ColumnsParser.WIDTH, "12")
          });

      writer.writeIDref(
          ParameterParser.PARAMETER,
          model.getPrefix() + TreeModel.TREE_MODEL + "." + TreeModelParser.ROOT_HEIGHT);

      writer.writeCloseTag(ColumnsParser.COLUMN);
    }

    for (PartitionClockModel model : options.getPartitionClockModels()) {
      writer.writeOpenTag(
          ColumnsParser.COLUMN,
          new Attribute[] {
            new Attribute.Default<String>(
                ColumnsParser.LABEL, branchRatesModelGenerator.getClockRateString(model)),
            new Attribute.Default<String>(ColumnsParser.SIGNIFICANT_FIGURES, "6"),
            new Attribute.Default<String>(ColumnsParser.WIDTH, "12")
          });

      branchRatesModelGenerator.writeAllClockRateRefs(model, writer);
      //        if (options.clockModelOptions.getRateOptionClockModel() == FixRateType.FIX_MEAN) {
      //            writer.writeIDref(ParameterParser.PARAMETER, "allClockRates");
      //            for (PartitionClockModel model : options.getPartitionClockModels()) {
      //                if (model.getClockType() == ClockType.UNCORRELATED_LOGNORMAL)
      //                    writer.writeIDref(ParameterParser.PARAMETER, model.getPrefix() +
      // ClockType.UCLD_STDEV);
      //            }
      //        } else {
      //            for (PartitionClockModel model : options.getPartitionClockModels()) {
      //                branchRatesModelGenerator.writeAllClockRateRefs(model, writer);
      //            }
      //        }
      writer.writeCloseTag(ColumnsParser.COLUMN);
    }

    if (options.hasDiscreteIntegerTraitsExcludeSpecies()) {
      for (PartitionSubstitutionModel model : options.getPartitionTraitsSubstitutionModels()) {
        substitutionModelGenerator.writeStatisticLog(model, writer);
      }
    }

    generateInsertionPoint(ComponentGenerator.InsertionPoint.IN_SCREEN_LOG, writer);

    writer.writeCloseTag(LoggerParser.LOG);

    generateInsertionPoint(ComponentGenerator.InsertionPoint.AFTER_SCREEN_LOG, writer);
  }
Example #4
0
  /**
   * write tree log to file
   *
   * @param writer XMLWriter
   */
  public void writeTreeLogToFile(XMLWriter writer) {
    writer.writeComment("write tree log to file");

    if (options.useStarBEAST) { // species
      // species tree log
      writer.writeOpenTag(
          TreeLoggerParser.LOG_TREE,
          new Attribute[] {
            new Attribute.Default<String>(
                XMLParser.ID, TraitData.TRAIT_SPECIES + "." + TREE_FILE_LOG), // speciesTreeFileLog
            new Attribute.Default<String>(TreeLoggerParser.LOG_EVERY, options.logEvery + ""),
            new Attribute.Default<String>(TreeLoggerParser.NEXUS_FORMAT, "true"),
            new Attribute.Default<String>(
                TreeLoggerParser.FILE_NAME,
                options.fileNameStem + "." + options.starBEASTOptions.SPECIES_TREE_FILE_NAME),
            new Attribute.Default<String>(TreeLoggerParser.SORT_TRANSLATION_TABLE, "true")
          });

      writer.writeIDref(SpeciesTreeModelParser.SPECIES_TREE, SP_TREE);

      if (options.hasData()) {
        // we have data...
        writer.writeIDref("posterior", "posterior");
      }
      writer.writeCloseTag(TreeLoggerParser.LOG_TREE);
    }

    // gene tree log
    // TODO make code consistent to MCMCPanel
    for (PartitionTreeModel tree : options.getPartitionTreeModels()) {
      String treeFileName;
      if (options.substTreeLog) {
        treeFileName =
            options.fileNameStem
                + "."
                + tree.getPrefix()
                + "(time)."
                + STARBEASTOptions.TREE_FILE_NAME;
      } else {
        treeFileName =
            options.fileNameStem
                + "."
                + tree.getPrefix()
                + STARBEASTOptions.TREE_FILE_NAME; // stem.partitionName.tree
      }

      if (options.treeFileName.get(0).endsWith(".txt")) {
        treeFileName += ".txt";
      }

      List<Attribute> attributes = new ArrayList<Attribute>();

      attributes.add(
          new Attribute.Default<String>(
              XMLParser.ID, tree.getPrefix() + TREE_FILE_LOG)); // partionName.treeFileLog
      attributes.add(
          new Attribute.Default<String>(TreeLoggerParser.LOG_EVERY, options.logEvery + ""));
      attributes.add(new Attribute.Default<String>(TreeLoggerParser.NEXUS_FORMAT, "true"));
      attributes.add(new Attribute.Default<String>(TreeLoggerParser.FILE_NAME, treeFileName));
      attributes.add(
          new Attribute.Default<String>(TreeLoggerParser.SORT_TRANSLATION_TABLE, "true"));

      // if (options.clockModelOptions.getRateOptionClockModel() == FixRateType.RElATIVE_TO &&
      // tree.containsUncorrelatedRelaxClock()) { //TODO: Sibon's discretized branch length stuff
      //    double aveFixedRate =
      // options.clockModelOptions.getSelectedRate(options.getPartitionClockModels());
      //    attributes.add(new Attribute.Default<String>(TreeLoggerParser.NORMALISE_MEAN_RATE_TO,
      // Double.toString(aveFixedRate)));
      // }

      // generate <logTree>
      writer.writeOpenTag(TreeLoggerParser.LOG_TREE, attributes);

      //            writer.writeOpenTag(TreeLoggerParser.LOG_TREE,
      //                    new Attribute[]{
      //                            new Attribute.Default<String>(XMLParser.ID, tree.getPrefix() +
      // TREE_FILE_LOG), // partionName.treeFileLog
      //                            new Attribute.Default<String>(TreeLoggerParser.LOG_EVERY,
      // options.logEvery + ""),
      //                            new Attribute.Default<String>(TreeLoggerParser.NEXUS_FORMAT,
      // "true"),
      //                            new Attribute.Default<String>(TreeLoggerParser.FILE_NAME,
      // treeFileName),
      //                            new
      // Attribute.Default<String>(TreeLoggerParser.SORT_TRANSLATION_TABLE, "true")
      //                    });

      writer.writeIDref(TreeModel.TREE_MODEL, tree.getPrefix() + TreeModel.TREE_MODEL);

      for (PartitionClockModel model :
          options.getPartitionClockModels(options.getAllPartitionData(tree))) {
        if (options.getAllPartitionData(model).get(0).getTraitType() == null) {
          switch (model.getClockType()) {
            case STRICT_CLOCK:
              writer.writeIDref(
                  StrictClockBranchRatesParser.STRICT_CLOCK_BRANCH_RATES,
                  model.getPrefix() + BranchRateModel.BRANCH_RATES);
              break;

            case UNCORRELATED_EXPONENTIAL:
            case UNCORRELATED_LOGNORMAL:
              writer.writeIDref(
                  DiscretizedBranchRatesParser.DISCRETIZED_BRANCH_RATES,
                  options.noDuplicatedPrefix(model.getPrefix(), tree.getPrefix())
                      + BranchRateModel.BRANCH_RATES);
              break;

            case RANDOM_LOCAL_CLOCK:
              writer.writeIDref(
                  RandomLocalClockModelParser.LOCAL_BRANCH_RATES,
                  model.getPrefix() + BranchRateModel.BRANCH_RATES);
              break;

            case AUTOCORRELATED_LOGNORMAL:
              writer.writeIDref(
                  ACLikelihoodParser.AC_LIKELIHOOD,
                  options.noDuplicatedPrefix(model.getPrefix(), tree.getPrefix())
                      + BranchRateModel.BRANCH_RATES);
              break;

            default:
              throw new IllegalArgumentException("Unknown clock model");
          }
        }
      }

      if (options.hasData()) {
        // we have data...
        writer.writeIDref("posterior", "posterior");
      }

      if (options.hasDiscreteIntegerTraitsExcludeSpecies()) {
        for (PartitionData partitionData :
            options.getAllPartitionData(
                tree)) { // Each TD except Species has one AncestralTreeLikelihood
          if (partitionData.getTraitType() != null
              && (!partitionData.getName().equalsIgnoreCase(TraitData.TRAIT_SPECIES.toString())))
            writer.writeIDref(
                AncestralStateTreeLikelihoodParser.RECONSTRUCTING_TREE_LIKELIHOOD,
                partitionData.getPrefix() + TreeLikelihoodParser.TREE_LIKELIHOOD);
        }
      }

      writer.writeCloseTag(TreeLoggerParser.LOG_TREE);
    } // end For loop

    generateInsertionPoint(ComponentGenerator.InsertionPoint.IN_TREES_LOG, writer);

    if (options.substTreeLog) {
      if (options.useStarBEAST) { // species
        // TODO: species sub tree
      }

      // gene tree
      for (PartitionTreeModel tree : options.getPartitionTreeModels()) {
        // write tree log to file
        writer.writeOpenTag(
            TreeLoggerParser.LOG_TREE,
            new Attribute[] {
              new Attribute.Default<String>(XMLParser.ID, tree.getPrefix() + SUB_TREE_FILE_LOG),
              new Attribute.Default<String>(TreeLoggerParser.LOG_EVERY, options.logEvery + ""),
              new Attribute.Default<String>(TreeLoggerParser.NEXUS_FORMAT, "true"),
              new Attribute.Default<String>(
                  TreeLoggerParser.FILE_NAME,
                  options.fileNameStem
                      + "."
                      + tree.getPrefix()
                      + "(subst)."
                      + STARBEASTOptions.TREE_FILE_NAME),
              new Attribute.Default<String>(
                  TreeLoggerParser.BRANCH_LENGTHS, TreeLoggerParser.SUBSTITUTIONS)
            });
        writer.writeIDref(TreeModel.TREE_MODEL, tree.getPrefix() + TreeModel.TREE_MODEL);

        for (PartitionClockModel model :
            options.getPartitionClockModels(options.getAllPartitionData(tree))) {
          if (options.getAllPartitionData(model).get(0).getTraitType() == null) {
            switch (model.getClockType()) {
              case STRICT_CLOCK:
                writer.writeIDref(
                    StrictClockBranchRatesParser.STRICT_CLOCK_BRANCH_RATES,
                    model.getPrefix() + BranchRateModel.BRANCH_RATES);
                break;

              case UNCORRELATED_EXPONENTIAL:
              case UNCORRELATED_LOGNORMAL:
                writer.writeIDref(
                    DiscretizedBranchRatesParser.DISCRETIZED_BRANCH_RATES,
                    options.noDuplicatedPrefix(model.getPrefix(), tree.getPrefix())
                        + BranchRateModel.BRANCH_RATES);
                break;

              case RANDOM_LOCAL_CLOCK:
                writer.writeIDref(
                    RandomLocalClockModelParser.LOCAL_BRANCH_RATES,
                    model.getPrefix() + BranchRateModel.BRANCH_RATES);
                break;

              case AUTOCORRELATED_LOGNORMAL:
                writer.writeIDref(
                    ACLikelihoodParser.AC_LIKELIHOOD,
                    options.noDuplicatedPrefix(model.getPrefix(), tree.getPrefix())
                        + BranchRateModel.BRANCH_RATES);
                break;

              default:
                throw new IllegalArgumentException("Unknown clock model");
            }
          }
        }

        writer.writeCloseTag(TreeLoggerParser.LOG_TREE);
      }
    }

    generateInsertionPoint(ComponentGenerator.InsertionPoint.AFTER_TREES_LOG, writer);
  }
Example #5
0
  /**
   * write log to file
   *
   * @param writer XMLWriter
   * @param treePriorGenerator TreePriorGenerator
   * @param branchRatesModelGenerator BranchRatesModelGenerator
   * @param substitutionModelGenerator SubstitutionModelGenerator
   * @param treeLikelihoodGenerator TreeLikelihoodGenerator
   * @param generalTraitGenerator
   */
  public void writeLogToFile(
      XMLWriter writer,
      TreePriorGenerator treePriorGenerator,
      BranchRatesModelGenerator branchRatesModelGenerator,
      SubstitutionModelGenerator substitutionModelGenerator,
      TreeLikelihoodGenerator treeLikelihoodGenerator,
      GeneralTraitGenerator generalTraitGenerator) {
    writer.writeComment("write log to file");

    if (options.logFileName == null) {
      options.logFileName = options.fileNameStem + ".log";
    }
    writer.writeOpenTag(
        LoggerParser.LOG,
        new Attribute[] {
          new Attribute.Default<String>(XMLParser.ID, "fileLog"),
          new Attribute.Default<String>(LoggerParser.LOG_EVERY, options.logEvery + ""),
          new Attribute.Default<String>(LoggerParser.FILE_NAME, options.logFileName),
          new Attribute.Default<Boolean>(
              LoggerParser.ALLOW_OVERWRITE_LOG, options.allowOverwriteLog)
        });

    if (options.hasData()) {
      writer.writeIDref(CompoundLikelihoodParser.POSTERIOR, "posterior");
    }
    writer.writeIDref(CompoundLikelihoodParser.PRIOR, "prior");
    if (options.hasData()) {
      writer.writeIDref(CompoundLikelihoodParser.LIKELIHOOD, "likelihood");
    }

    if (options.useStarBEAST) { // species
      // coalescent prior
      writer.writeIDref(
          MultiSpeciesCoalescentParser.SPECIES_COALESCENT,
          TraitData.TRAIT_SPECIES + "." + COALESCENT);
      // prior on population sizes
      //            if (options.speciesTreePrior == TreePriorType.SPECIES_YULE) {
      writer.writeIDref(MixedDistributionLikelihoodParser.DISTRIBUTION_LIKELIHOOD, SPOPS);
      //            } else {
      //                writer.writeIDref(SpeciesTreeBMPrior.STPRIOR, STP);
      //            }
      // prior on species tree
      writer.writeIDref(SpeciationLikelihoodParser.SPECIATION_LIKELIHOOD, SPECIATION_LIKE);

      writer.writeIDref(
          ParameterParser.PARAMETER,
          TraitData.TRAIT_SPECIES + "." + options.starBEASTOptions.POP_MEAN);
      writer.writeIDref(
          ParameterParser.PARAMETER, SpeciesTreeModelParser.SPECIES_TREE + "." + SPLIT_POPS);

      if (options.getPartitionTreePriors().get(0).getNodeHeightPrior()
          == TreePriorType.SPECIES_BIRTH_DEATH) {
        writer.writeIDref(
            ParameterParser.PARAMETER,
            TraitData.TRAIT_SPECIES + "." + BirthDeathModelParser.MEAN_GROWTH_RATE_PARAM_NAME);
        writer.writeIDref(
            ParameterParser.PARAMETER,
            TraitData.TRAIT_SPECIES + "." + BirthDeathModelParser.RELATIVE_DEATH_RATE_PARAM_NAME);
      } else if (options.getPartitionTreePriors().get(0).getNodeHeightPrior()
          == TreePriorType.SPECIES_YULE) {
        writer.writeIDref(
            ParameterParser.PARAMETER,
            TraitData.TRAIT_SPECIES
                + "."
                + YuleModelParser.YULE
                + "."
                + YuleModelParser.BIRTH_RATE);
      } else {
        throw new IllegalArgumentException(
            "Get wrong species tree prior using *BEAST : "
                + options.getPartitionTreePriors().get(0).getNodeHeightPrior().toString());
      }

      // Species Tree: tmrcaStatistic
      writer.writeIDref(
          TMRCAStatisticParser.TMRCA_STATISTIC,
          SpeciesTreeModelParser.SPECIES_TREE + "." + TreeModelParser.ROOT_HEIGHT);
    }

    for (PartitionTreeModel model : options.getPartitionTreeModels()) {
      writer.writeIDref(
          ParameterParser.PARAMETER,
          model.getPrefix() + TreeModel.TREE_MODEL + "." + TreeModelParser.ROOT_HEIGHT);
    }

    for (Taxa taxa : options.taxonSets) {
      // make tmrca(tree.name) eay to read in log for Tracer
      writer.writeIDref(
          TMRCAStatisticParser.TMRCA_STATISTIC,
          "tmrca(" + taxa.getTreeModel().getPrefix() + taxa.getId() + ")");
    }

    //        if ( options.shareSameTreePrior ) { // Share Same Tree Prior
    //	        treePriorGenerator.setModelPrefix("");
    //        	treePriorGenerator.writeParameterLog(options.activedSameTreePrior, writer);
    //        } else { // no species
    for (PartitionTreePrior prior : options.getPartitionTreePriors()) {
      //	        	treePriorGenerator.setModelPrefix(prior.getPrefix()); // priorName.treeModel
      treePriorGenerator.writeParameterLog(prior, writer);
    }
    //	    }

    for (PartitionSubstitutionModel model : options.getPartitionSubstitutionModels()) {
      substitutionModelGenerator.writeLog(writer, model);
      if (model.hasCodon()) {
        writer.writeIDref(CompoundParameterParser.COMPOUND_PARAMETER, model.getPrefix() + "allMus");
      }
    }

    if (options.clockModelOptions.getRateOptionClockModel() == FixRateType.FIX_MEAN) {
      writer.writeIDref(ParameterParser.PARAMETER, "allClockRates");
      for (PartitionClockModel model : options.getPartitionClockModels()) {
        if (model.getClockType() == ClockType.UNCORRELATED_LOGNORMAL)
          writer.writeIDref(ParameterParser.PARAMETER, model.getPrefix() + ClockType.UCLD_STDEV);
      }
    } else {
      for (PartitionClockModel model : options.getPartitionClockModels()) {
        branchRatesModelGenerator.writeLog(model, writer);
      }
    }

    for (PartitionClockModel model : options.getPartitionClockModels()) {
      branchRatesModelGenerator.writeLogStatistic(model, writer);
    }

    generateInsertionPoint(ComponentGenerator.InsertionPoint.IN_FILE_LOG_PARAMETERS, writer);

    if (options.hasData()) {
      treeLikelihoodGenerator.writeTreeLikelihoodReferences(writer);
    }

    generateInsertionPoint(ComponentGenerator.InsertionPoint.IN_FILE_LOG_LIKELIHOODS, writer);

    // coalescentLikelihood
    for (PartitionTreeModel model : options.getPartitionTreeModels()) {
      PartitionTreePrior prior = model.getPartitionTreePrior();
      treePriorGenerator.writePriorLikelihoodReferenceLog(prior, model, writer);
      writer.writeText("");
    }

    for (PartitionTreePrior prior : options.getPartitionTreePriors()) {
      if (prior.getNodeHeightPrior() == TreePriorType.EXTENDED_SKYLINE)
        writer.writeIDref(
            CoalescentLikelihoodParser.COALESCENT_LIKELIHOOD,
            prior.getPrefix() + COALESCENT); // only 1 coalescent
    }

    if (options.hasDiscreteIntegerTraitsExcludeSpecies()) {
      generalTraitGenerator.writeAncestralTreeLikelihoodReferences(writer);
    }

    writer.writeCloseTag(LoggerParser.LOG);

    generateInsertionPoint(ComponentGenerator.InsertionPoint.AFTER_FILE_LOG, writer);
  }