/** * Sets the partials from a sequence in an alignment. * * @param beagle beagle * @param patternList patternList * @param sequenceIndex sequenceIndex * @param nodeIndex nodeIndex */ protected final void setPartials( Beagle beagle, PatternList patternList, int sequenceIndex, int nodeIndex) { double[] partials = new double[patternCount * stateCount * categoryCount]; boolean[] stateSet; int v = 0; for (int i = 0; i < patternCount; i++) { int state = patternList.getPatternState(sequenceIndex, i); stateSet = dataType.getStateSet(state); for (int j = 0; j < stateCount; j++) { if (stateSet[j]) { partials[v] = 1.0; } else { partials[v] = 0.0; } v++; } } // if there is more than one category then replicate the partials for each int n = patternCount * stateCount; int k = n; for (int i = 1; i < categoryCount; i++) { System.arraycopy(partials, 0, partials, k, n); k += n; } beagle.setPartials(nodeIndex, partials); }
/** Stores the additional state other than model components */ protected void storeState() { partialBufferHelper.storeState(); substitutionModelDelegate.storeState(); if (useScaleFactors || useAutoScaling) { // Only store when actually used scaleBufferHelper.storeState(); System.arraycopy( scaleBufferIndices, 0, storedScaleBufferIndices, 0, scaleBufferIndices.length); // storedRescalingCount = rescalingCount; } super.storeState(); }
/** Sets the partials from a sequence in an alignment. */ protected final void setPartials(Beagle beagle, TipStatesModel tipStatesModel, int nodeIndex) { double[] partials = new double[patternCount * stateCount * categoryCount]; tipStatesModel.getTipPartials(nodeIndex, partials); // if there is more than one category then replicate the partials for each int n = patternCount * stateCount; int k = n; for (int i = 1; i < categoryCount; i++) { System.arraycopy(partials, 0, partials, k, n); k += n; } beagle.setPartials(nodeIndex, partials); }
private static List<Integer> parseSystemPropertyIntegerArray(String propertyName) { List<Integer> order = new ArrayList<Integer>(); String r = System.getProperty(propertyName); if (r != null) { String[] parts = r.split(","); for (String part : parts) { try { int n = Integer.parseInt(part.trim()); order.add(n); } catch (NumberFormatException nfe) { System.err.println("Invalid entry '" + part + "' in " + propertyName); } } } return order; }
public NewBeagleTreeLikelihood( PatternList patternList, TreeModel treeModel, BranchModel branchModel, SiteModel siteModel, BranchRateModel branchRateModel, TipStatesModel tipStatesModel, boolean useAmbiguities, PartialsRescalingScheme rescalingScheme, Map<Set<String>, Parameter> partialsRestrictions) { super(BeagleTreeLikelihoodParser.TREE_LIKELIHOOD, patternList, treeModel); try { final Logger logger = Logger.getLogger("dr.evomodel"); logger.info("Using BEAGLE TreeLikelihood"); this.siteModel = siteModel; addModel(this.siteModel); this.branchModel = branchModel; addModel(this.branchModel); if (branchRateModel != null) { this.branchRateModel = branchRateModel; logger.info(" Branch rate model used: " + branchRateModel.getModelName()); } else { this.branchRateModel = new DefaultBranchRateModel(); } addModel(this.branchRateModel); this.tipStatesModel = tipStatesModel; this.categoryCount = this.siteModel.getCategoryCount(); this.tipCount = treeModel.getExternalNodeCount(); internalNodeCount = nodeCount - tipCount; int compactPartialsCount = tipCount; if (useAmbiguities) { // if we are using ambiguities then we don't use tip partials compactPartialsCount = 0; } // one partials buffer for each tip and two for each internal node (for store restore) partialBufferHelper = new BufferIndexHelper(nodeCount, tipCount); // one scaling buffer for each internal node plus an extra for the accumulation, then doubled // for store/restore scaleBufferHelper = new BufferIndexHelper(getScaleBufferCount(), 0); // Attempt to get the resource order from the System Property if (resourceOrder == null) { resourceOrder = parseSystemPropertyIntegerArray(RESOURCE_ORDER_PROPERTY); } if (preferredOrder == null) { preferredOrder = parseSystemPropertyIntegerArray(PREFERRED_FLAGS_PROPERTY); } if (requiredOrder == null) { requiredOrder = parseSystemPropertyIntegerArray(REQUIRED_FLAGS_PROPERTY); } if (scalingOrder == null) { scalingOrder = parseSystemPropertyStringArray(SCALING_PROPERTY); } if (extraBufferOrder == null) { extraBufferOrder = parseSystemPropertyIntegerArray(EXTRA_BUFFER_COUNT_PROPERTY); } int extraBufferCount = -1; // default if (extraBufferOrder.size() > 0) { extraBufferCount = extraBufferOrder.get(instanceCount % extraBufferOrder.size()); } substitutionModelDelegate = new SubstitutionModelDelegate(treeModel, branchModel, extraBufferCount); // first set the rescaling scheme to use from the parser this.rescalingScheme = rescalingScheme; int[] resourceList = null; long preferenceFlags = 0; long requirementFlags = 0; if (scalingOrder.size() > 0) { this.rescalingScheme = PartialsRescalingScheme.parseFromString( scalingOrder.get(instanceCount % scalingOrder.size())); } if (resourceOrder.size() > 0) { // added the zero on the end so that a CPU is selected if requested resource fails resourceList = new int[] {resourceOrder.get(instanceCount % resourceOrder.size()), 0}; if (resourceList[0] > 0) { preferenceFlags |= BeagleFlag.PROCESSOR_GPU.getMask(); // Add preference weight against CPU } } if (preferredOrder.size() > 0) { preferenceFlags = preferredOrder.get(instanceCount % preferredOrder.size()); } if (requiredOrder.size() > 0) { requirementFlags = requiredOrder.get(instanceCount % requiredOrder.size()); } // Define default behaviour here if (this.rescalingScheme == PartialsRescalingScheme.DEFAULT) { // if GPU: the default is dynamic scaling in BEAST if (resourceList != null && resourceList[0] > 1) { this.rescalingScheme = DEFAULT_RESCALING_SCHEME; } else { // if CPU: just run as fast as possible // this.rescalingScheme = PartialsRescalingScheme.NONE; // Dynamic should run as fast as none until first underflow this.rescalingScheme = DEFAULT_RESCALING_SCHEME; } } if (this.rescalingScheme == PartialsRescalingScheme.AUTO) { preferenceFlags |= BeagleFlag.SCALING_AUTO.getMask(); useAutoScaling = true; } else { // preferenceFlags |= BeagleFlag.SCALING_MANUAL.getMask(); } String r = System.getProperty(RESCALE_FREQUENCY_PROPERTY); if (r != null) { rescalingFrequency = Integer.parseInt(r); if (rescalingFrequency < 1) { rescalingFrequency = RESCALE_FREQUENCY; } } if (preferenceFlags == 0 && resourceList == null) { // else determine dataset characteristics if (stateCount == 4 && patternList.getPatternCount() < 10000) // TODO determine good cut-off preferenceFlags |= BeagleFlag.PROCESSOR_CPU.getMask(); } if (BeagleFlag.VECTOR_SSE.isSet(preferenceFlags) && stateCount != 4) { // @todo SSE doesn't seem to work for larger state spaces so for now we override the // SSE option. preferenceFlags &= ~BeagleFlag.VECTOR_SSE.getMask(); preferenceFlags |= BeagleFlag.VECTOR_NONE.getMask(); if (stateCount > 4 && this.rescalingScheme == PartialsRescalingScheme.DYNAMIC) { this.rescalingScheme = PartialsRescalingScheme.DELAYED; } } if (!BeagleFlag.PRECISION_SINGLE.isSet(preferenceFlags)) { // if single precision not explicitly set then prefer double preferenceFlags |= BeagleFlag.PRECISION_DOUBLE.getMask(); } if (substitutionModelDelegate.canReturnComplexDiagonalization()) { requirementFlags |= BeagleFlag.EIGEN_COMPLEX.getMask(); } instanceCount++; beagle = BeagleFactory.loadBeagleInstance( tipCount, partialBufferHelper.getBufferCount(), compactPartialsCount, stateCount, patternCount, substitutionModelDelegate.getEigenBufferCount(), substitutionModelDelegate.getMatrixBufferCount(), categoryCount, scaleBufferHelper.getBufferCount(), // Always allocate; they may become necessary resourceList, preferenceFlags, requirementFlags); InstanceDetails instanceDetails = beagle.getDetails(); ResourceDetails resourceDetails = null; if (instanceDetails != null) { resourceDetails = BeagleFactory.getResourceDetails(instanceDetails.getResourceNumber()); if (resourceDetails != null) { StringBuilder sb = new StringBuilder(" Using BEAGLE resource "); sb.append(resourceDetails.getNumber()).append(": "); sb.append(resourceDetails.getName()).append("\n"); if (resourceDetails.getDescription() != null) { String[] description = resourceDetails.getDescription().split("\\|"); for (String desc : description) { if (desc.trim().length() > 0) { sb.append(" ").append(desc.trim()).append("\n"); } } } sb.append(" with instance flags: ").append(instanceDetails.toString()); logger.info(sb.toString()); } else { logger.info( " Error retrieving BEAGLE resource for instance: " + instanceDetails.toString()); } } else { logger.info( " No external BEAGLE resources available, or resource list/requirements not met, using Java implementation"); } logger.info( " " + (useAmbiguities ? "Using" : "Ignoring") + " ambiguities in tree likelihood."); logger.info(" With " + patternList.getPatternCount() + " unique site patterns."); if (tipStatesModel != null) { tipStatesModel.setTree(treeModel); if (tipStatesModel.getModelType() == TipStatesModel.Type.PARTIALS) { tipPartials = new double[patternCount * stateCount]; } else { tipStates = new int[patternCount]; } addModel(tipStatesModel); } for (int i = 0; i < tipCount; i++) { // Find the id of tip i in the patternList String id = treeModel.getTaxonId(i); int index = patternList.getTaxonIndex(id); if (index == -1) { throw new TaxonList.MissingTaxonException( "Taxon, " + id + ", in tree, " + treeModel.getId() + ", is not found in patternList, " + patternList.getId()); } else { if (tipStatesModel != null) { // using a tipPartials model. // First set the observed states: tipStatesModel.setStates(patternList, index, i, id); if (tipStatesModel.getModelType() == TipStatesModel.Type.PARTIALS) { // Then set the tip partials as determined by the model: setPartials(beagle, tipStatesModel, i); } else { // or the tip states: tipStatesModel.getTipStates(i, tipStates); beagle.setTipStates(i, tipStates); } } else { if (useAmbiguities) { setPartials(beagle, patternList, index, i); } else { setStates(beagle, patternList, index, i); } } } } if (patternList instanceof AscertainedSitePatterns) { ascertainedSitePatterns = true; } this.partialsRestrictions = partialsRestrictions; // hasRestrictedPartials = (partialsRestrictions != null); if (hasRestrictedPartials) { numRestrictedPartials = partialsRestrictions.size(); updateRestrictedNodePartials = true; partialsMap = new Parameter[treeModel.getNodeCount()]; partials = new double[stateCount * patternCount * categoryCount]; } else { numRestrictedPartials = 0; updateRestrictedNodePartials = false; } beagle.setPatternWeights(patternWeights); String rescaleMessage = " Using rescaling scheme : " + this.rescalingScheme.getText(); if (this.rescalingScheme == PartialsRescalingScheme.AUTO && resourceDetails != null && (resourceDetails.getFlags() & BeagleFlag.SCALING_AUTO.getMask()) == 0) { // If auto scaling in BEAGLE is not supported then do it here this.rescalingScheme = PartialsRescalingScheme.DYNAMIC; rescaleMessage = " Auto rescaling not supported in BEAGLE, using : " + this.rescalingScheme.getText(); } if (this.rescalingScheme == PartialsRescalingScheme.DYNAMIC) { rescaleMessage += " (rescaling every " + rescalingFrequency + " evaluations)"; } logger.info(rescaleMessage); if (this.rescalingScheme == PartialsRescalingScheme.DYNAMIC) { everUnderflowed = false; // If false, BEAST does not rescale until first under-/over-flow. } updateSubstitutionModel = true; updateSiteModel = true; } catch (TaxonList.MissingTaxonException mte) { throw new RuntimeException(mte.toString()); } this.useAmbiguities = useAmbiguities; hasInitialized = true; }