public void addParameter(String key, String value) { if (hasBodyContent()) { bodyParams.add(key, value); } else { querystringParams.add(key, value); } }
@Nullable public static MethodReferenceBag getMethodParameterReferenceBag( PsiElement psiElement, int wantIndex) { PsiElement variableContext = psiElement.getContext(); if (!(variableContext instanceof ParameterList)) { return null; } ParameterList parameterList = (ParameterList) variableContext; if (!(parameterList.getContext() instanceof MethodReference)) { return null; } ParameterBag currentIndex = PsiElementUtils.getCurrentParameterIndex(psiElement); if (currentIndex == null) { return null; } if (wantIndex >= 0 && currentIndex.getIndex() != wantIndex) { return null; } return new MethodReferenceBag( parameterList, (MethodReference) parameterList.getContext(), currentIndex); }
public static boolean isFunctionReference( PsiElement psiElement, int wantIndex, String... funcName) { PsiElement variableContext = psiElement.getContext(); if (!(variableContext instanceof ParameterList)) { return false; } ParameterList parameterList = (ParameterList) variableContext; PsiElement context = parameterList.getContext(); if (!(context instanceof FunctionReference)) { return false; } FunctionReference methodReference = (FunctionReference) context; String name = methodReference.getName(); if (name == null || !Arrays.asList(funcName).contains(name)) { return false; } ParameterBag currentIndex = getCurrentParameterIndex(psiElement); if (currentIndex == null) { return false; } return !(wantIndex >= 0 && currentIndex.getIndex() != wantIndex); }
/** * Get a {@link ParameterList} with the query string parameters. * * @return a {@link ParameterList} containing the query string parameters. * @throws OAuthException if the request URL is not valid. */ public ParameterList getQueryStringParams() { try { final ParameterList result = new ParameterList(); final String queryString = new URL(url).getQuery(); result.addQuerystring(queryString); result.addAll(querystringParams); return result; } catch (MalformedURLException mue) { throw new OAuthException("Malformed URL", mue); } }
@Nullable public static ParameterBag getCurrentParameterIndex(PsiElement psiElement) { if (!(psiElement.getContext() instanceof ParameterList)) { return null; } ParameterList parameterList = (ParameterList) psiElement.getContext(); if (!(parameterList.getContext() instanceof ParameterListOwner)) { return null; } return getCurrentParameterIndex(parameterList.getParameters(), psiElement); }
private String getRealm(ParameterList requestp) { final String realm = requestp.getParameterValue("openid.realm"); final String returnTo = requestp.getParameterValue("openid.return_to"); if (realm == null && returnTo != null) try { return new URL(returnTo).getHost(); } catch (MalformedURLException e) { // Fall back return returnTo; } return realm; }
public ParameterList getParameterList() { ParameterList paramList = new ParameterList(); String uri = getURI(); if (uri == null) return paramList; int paramIdx = uri.indexOf('?'); if (paramIdx < 0) return paramList; while (0 < paramIdx) { int eqIdx = uri.indexOf('=', (paramIdx + 1)); String name = uri.substring(paramIdx + 1, eqIdx); int nextParamIdx = uri.indexOf('&', (eqIdx + 1)); String value = uri.substring(eqIdx + 1, (0 < nextParamIdx) ? nextParamIdx : uri.length()); Parameter param = new Parameter(name, value); paramList.add(param); paramIdx = nextParamIdx; } return paramList; }
public String toString(ParameterList parameters) { StringBuffer sbuf = new StringBuffer(fragments[0]); for (int i = 1; i < fragments.length; ++i) { if (parameters == null) sbuf.append("?"); else sbuf.append(parameters.toString(i)); sbuf.append(fragments[i]); } return sbuf.toString(); }
@Override Object get(GraphDatabaseAPI graphDb, ParameterList parameters, String name) throws BadInputException { try { return parameters.getUri(name).toURL(); } catch (MalformedURLException e) { throw new BadInputException(e); } }
@Nullable public static String getMethodParameterAt(@NotNull ParameterList parameterList, int index) { PsiElement[] parameters = parameterList.getParameters(); if (parameters.length < index + 1) { return null; } return getMethodParameter(parameters[index]); }
byte[] getByteBodyContents() { if (bytePayload != null) { return bytePayload; } final String body = (payload == null) ? bodyParams.asFormUrlEncodedString() : payload; try { return body.getBytes(getCharset()); } catch (UnsupportedEncodingException uee) { throw new OAuthException("Unsupported Charset: " + getCharset(), uee); } }
public void testCorrectness( int i, int cluster, int clusterIndex1, int clusterIndex2, int[] shuffle, int[] mergedClusterSites, double[] lik1, double[] lik2) throws Exception { // System.out.println("Hi!"); int[] tempWeights = new int[tempLikelihood.dataInput.get().getPatternCount()]; tempWeights[tempLikelihood.dataInput.get().getPatternIndex(mergedClusterSites[shuffle[i]])] = 1; tempLikelihood.setPatternWeights(tempWeights); double temp1 = Math.exp( tempLikelihood .calculateLogP( alphaList.getParameter(clusterIndex1).getValue(), invPrList.getParameter(clusterIndex1).getValue(), ratesList.getParameter(clusterIndex1).getValue(), siteModelList.getParameter(clusterIndex1).getValue(), new int[] {mergedClusterSites[shuffle[i]]})[0]); double temp2 = Math.exp( tempLikelihood .calculateLogP( alphaList.getParameter(clusterIndex2).getValue(), invPrList.getParameter(clusterIndex2).getValue(), ratesList.getParameter(clusterIndex2).getValue(), siteModelList.getParameter(clusterIndex2).getValue(), new int[] {mergedClusterSites[shuffle[i]]})[0]); if (temp1 != lik1[shuffle[i]] || temp2 != lik2[shuffle[i]]) { System.out.println("temp1"); System.out.println("shuffle_i: " + shuffle[i]); System.out.println("mergedClusterSites[shuffle]: " + mergedClusterSites[shuffle[i]]); System.out.println("cluster: " + cluster); System.out.println(+mergedClusterSites.length + " " + lik1.length); for (int j = 0; j < lik1.length; j++) { System.out.println("merged lik1: " + mergedClusterSites[j] + " " + lik1[j]); } for (int j = 0; j < lik2.length; j++) { System.out.println("merged lik2: " + mergedClusterSites[j] + " " + lik2[j]); } throw new RuntimeException( temp1 + " " + lik1[shuffle[i]] + " " + temp2 + " " + lik2[shuffle[i]]); } }
public HttpResponse doEndpoint(StaplerRequest request) throws IOException { final ParameterList requestp = new ParameterList(request.getParameterMap()); final String mode = requestp.getParameterValue("openid.mode"); final String realm = getRealm(requestp); if ("associate".equals(mode)) { // --- process an association extend --- return new MessageResponse(manager.associationResponse(requestp)); } else if ("checkid_setup".equals(mode) || "checkid_immediate".equals(mode)) { // No need to redirect to a page with an HTML form // Skip the authentication step String identity = getUserIdentity(); Message rsp = manager.authResponse(requestp, identity, identity, true, false); try { respondToExtensions(requestp, rsp); } catch (MessageException ex) { throw new OperationFailure(ex.getMessage()); } // Need to sign after because SReg extension parameters are signed by openid4java if (rsp instanceof AuthSuccess) { try { manager.sign((AuthSuccess) rsp); } catch (ServerException e) { throw new OperationFailure(e.getMessage()); } catch (AssociationException e) { throw new OperationFailure(e.getMessage()); } } return new HttpRedirect(rsp.getDestinationUrl(true)); } else if ("check_authentication".equals(mode)) { return new MessageResponse(manager.verify(requestp)); } else { throw new OperationFailure("Unknown extend: " + mode); } }
@Nullable public static MethodReference getMethodReferenceWithFirstStringParameter(PsiElement psiElement) { if (!PlatformPatterns.psiElement() .withParent(StringLiteralExpression.class) .inside(ParameterList.class) .withLanguage(PhpLanguage.INSTANCE) .accepts(psiElement)) { return null; } ParameterList parameterList = PsiTreeUtil.getParentOfType(psiElement, ParameterList.class); if (parameterList == null) { return null; } if (!(parameterList.getContext() instanceof MethodReference)) { return null; } return (MethodReference) parameterList.getContext(); }
/** * Retrieve a RFC2045 style string representation of this Content-Type. Returns <code>null</code> * if the conversion failed. * * @return RFC2045 style string */ public String toString() { if (primaryType == null || subType == null) // need both return null; StringBuffer sb = new StringBuffer(); sb.append(primaryType).append('/').append(subType); if (list != null) // Http Binding section of the "SOAP with attachments" specification says, // "SOAP message senders should send Content-Type headers on a single long line." // (http://www.w3.org/TR/SOAP-attachments#HTTPBinding) sb.append(list.toString()); return sb.toString(); }
@Override Object[] getList(GraphDatabaseAPI graphDb, ParameterList parameters, String name) throws BadInputException { URI[] uris = parameters.getUriList(name); URL[] urls = new URL[uris.length]; try { for (int i = 0; i < urls.length; i++) { urls[i] = uris[i].toURL(); } } catch (MalformedURLException e) { throw new BadInputException(e); } return urls; }
/** * Extract type hint from method parameter * * <p>function foo(\FooClass $class) */ @Nullable public static String getMethodParameterTypeHint(@NotNull Method method) { ParameterList childOfType = PsiTreeUtil.getChildOfType(method, ParameterList.class); if (childOfType == null) { return null; } PsiElement[] parameters = childOfType.getParameters(); if (parameters.length == 0) { return null; } ClassReference classReference = PsiTreeUtil.getChildOfType(parameters[0], ClassReference.class); if (classReference == null) { return null; } String fqn = classReference.getFQN(); if (fqn == null) { return null; } return fqn; }
public static boolean isCallToWithParameter( PsiElement psiElement, String className, String methodName, int parameterIndex) { if (!(psiElement.getContext() instanceof ParameterList)) { return false; } ParameterList parameterList = (ParameterList) psiElement.getContext(); if (parameterList == null || !(parameterList.getContext() instanceof MethodReference)) { return false; } MethodReference method = (MethodReference) parameterList.getContext(); Symfony2InterfacesUtil interfacesUtil = new Symfony2InterfacesUtil(); if (!interfacesUtil.isCallTo(method, className, methodName)) { return false; } ParameterBag currentIndex = PsiElementUtils.getCurrentParameterIndex(psiElement); if (currentIndex == null || currentIndex.getIndex() != parameterIndex) { return false; } return true; }
@Nullable public static PsiElement getMethodParameterPsiElementAt( @Nullable ParameterList parameterList, int index) { if (parameterList == null) { return null; } PsiElement[] parameters = parameterList.getParameters(); if (parameters.length < index + 1) { return null; } return parameters[index]; }
/** * Returns the complete url (host + resource + encoded querystring parameters). * * @return the complete url. */ public String getCompleteUrl() { return querystringParams.appendTo(url); }
@Override public void accept(NodeVisitor visitor) { type.accept(visitor); parameterList.accept(visitor); visitor.visit(this); }
@Override Object[] getList(GraphDatabaseAPI graphDb, ParameterList parameters, String name) throws BadInputException { return parameters.getStringList(name); }
public String getParameterValue(String name) { ParameterList paramList = getParameterList(); return paramList.getValue(name); }
/** * Return the specified parameter value. Returns <code>null</code> if this parameter is absent. * * @return parameter value */ public String getParameter(String name) { if (list == null) return null; return list.get(name); }
// Reads: key operator value private Predicate readComparison(Queue<String> tokens, ParameterList parameters) { String key = tokens.poll(); if (key == null) { return null; } String realKey = Query.Static.getCanonicalKey(key); if (realKey != null) { key = realKey; } String operator = tokens.poll(); boolean isIgnoreCase = false; if (operator == null) { throw new IllegalArgumentException(String.format("No operator after [%s] key!", key)); } else { operator = operator.toLowerCase(Locale.ENGLISH); if (operator.endsWith("[c]")) { operator = operator.substring(0, operator.length() - 3); isIgnoreCase = true; } } Object value = readValue(tokens); if (value == null) { throw new IllegalArgumentException( String.format("No value after [%s] key and [%s] operator!", key, operator)); } else if (value instanceof String) { String valueString = (String) value; if (valueString.startsWith("?")) { if (valueString.length() == 1) { value = parameters.poll(); } else { String path = valueString.substring(1); int slashAt = path.indexOf('/'); String splitIndex; String splitPath; if (slashAt > -1) { splitIndex = path.substring(0, slashAt); splitPath = path.substring(slashAt + 1); } else { splitIndex = path; splitPath = ""; } Integer index = ObjectUtils.to(Integer.class, splitIndex); if (index == null) { index = 0; } else { path = splitPath; } value = index < parameters.size() ? parameters.get(index) : null; if (value != null && path.length() > 0) { if (value instanceof Recordable) { value = ((Recordable) value).getState().getByPath(path); } else { value = CollectionUtils.getByPath(value, path); } } } } else if ("true".equalsIgnoreCase(valueString)) { value = Boolean.TRUE; } else if ("false".equalsIgnoreCase(valueString)) { value = Boolean.FALSE; } else if ("null".equalsIgnoreCase(valueString)) { value = null; } else if ("missing".equalsIgnoreCase(valueString)) { value = Query.MISSING_VALUE; } } String comparisonOperator = getComparisonOperators().get(operator); if (comparisonOperator != null) { return new ComparisonPredicate( comparisonOperator, isIgnoreCase, key, ObjectUtils.to(Iterable.class, value)); } else { throw new IllegalArgumentException( String.format("[%s] isn't a valid comparison operator!", operator)); } }
/** * Set the specified parameter. If this parameter already exists, it is replaced by this new * value. * * @param name parameter name * @param value parameter value */ public void setParameter(String name, String value) { if (list == null) list = new ParameterList(); list.set(name, value); }
public double merge( int index1, int index2, int clusterIndex1, int clusterIndex2, int[] cluster1Sites, int[] cluster2Sites) { double logqMerge = 0.0; HashMap<Integer, Integer> siteMap = new HashMap<Integer, Integer>(); // The value of the merged cluster will have that of cluster 2 before the merge. QuietRealParameter mergedParam = paramList.getParameter(clusterIndex2); QuietRealParameter mergedModel = modelList.getParameter(clusterIndex2); QuietRealParameter mergedFreqs = freqsList.getParameter(clusterIndex2); QuietRealParameter mergedRates = ratesList.getParameter(clusterIndex2); // Create a vector that combines the site indices of the two clusters int[] mergedClusterSites = new int[cluster1Sites.length + cluster2Sites.length - 2]; int k = 0; for (int i = 0; i < cluster1Sites.length; i++) { // Point every member in cluster 1 to cluster 2 // paramPointers.point(cluster1Sites[i],mergedParam); // modelPointers.point(cluster1Sites[i],mergedModel); // freqsPointers.point(cluster1Sites[i],mergedFreqs); // ratesPointers.point(cluster1Sites[i],mergedRates); if (cluster1Sites[i] != index1) { // For all members that are not index 1, // record the cluster in which they have been before the merge, // and assign them to the combined vector. siteMap.put(cluster1Sites[i], clusterIndex1); mergedClusterSites[k++] = cluster1Sites[i]; } } for (int i = 0; i < cluster2Sites.length; i++) { // All members in cluster 2 remains in cluster2 so no new pointer assignments if (cluster2Sites[i] != index2) { // For all members that are not index 2, // record the cluster in which they have been before the merge, // and assign them to the combined vector. siteMap.put(cluster2Sites[i], clusterIndex2); mergedClusterSites[k++] = cluster2Sites[i]; } } try { // Create a weight vector of patterns to inform the temporary tree likelihood // which set of pattern likelihoods are to be computed. // int[] tempWeights = dpTreeLikelihood.getClusterWeights(clusterIndex1); /*int[] tempWeights = new int[tempLikelihood.m_data.get().getPatternCount()]; for(int i = 0; i < cluster1Sites.length; i++){ int patIndex = tempLikelihood.m_data.get().getPatternIndex(cluster1Sites[i]); tempWeights[patIndex] = 1; } tempLikelihood.setPatternWeights(tempWeights); double[] cluster1SitesCluster2ParamLogLik = tempLikelihood.calculateLogP( mergedParam, mergedModel, mergedFreqs, mergedRates, cluster1Sites, index1 ); */ k = 0; int[] sCluster1Sites = new int[cluster1Sites.length - 1]; for (int i = 0; i < cluster1Sites.length; i++) { if (cluster1Sites[i] != index1) { sCluster1Sites[k++] = cluster1Sites[i]; } } tempLikelihood.setupPatternWeightsFromSites(sCluster1Sites); double[] cluster1SitesCluster2ParamLogLik = tempLikelihood.calculateLogP( mergedParam, mergedModel, mergedFreqs, mergedRates, sCluster1Sites); // tempWeights = dpTreeLikelihood.getClusterWeights(clusterIndex2); /*tempWeights = new int[tempLikelihood.m_data.get().getPatternCount()]; for(int i = 0; i < cluster2Sites.length; i++){ int patIndex = tempLikelihood.m_data.get().getPatternIndex(cluster2Sites[i]); tempWeights[patIndex] = 1; } tempLikelihood.setPatternWeights(tempWeights); RealParameter removedParam = paramList.getParameter(clusterIndex1); RealParameter removedModel = modelList.getParameter(clusterIndex1); RealParameter removedFreqs = freqsList.getParameter(clusterIndex1); RealParameter removedRates = ratesList.getParameter(clusterIndex1); double[] cluster2SitesCluster1ParamLogLik = tempLikelihood.calculateLogP( removedParam, removedModel, removedFreqs, removedRates, cluster2Sites, index2 );*/ k = 0; int[] sCluster2Sites = new int[cluster2Sites.length - 1]; for (int i = 0; i < cluster2Sites.length; i++) { if (cluster2Sites[i] != index2) { sCluster2Sites[k++] = cluster2Sites[i]; } } tempLikelihood.setupPatternWeightsFromSites(sCluster2Sites); RealParameter removedParam = paramList.getParameter(clusterIndex1); RealParameter removedModel = modelList.getParameter(clusterIndex1); RealParameter removedFreqs = freqsList.getParameter(clusterIndex1); RealParameter removedRates = ratesList.getParameter(clusterIndex1); double[] cluster2SitesCluster1ParamLogLik = tempLikelihood.calculateLogP( removedParam, removedModel, removedFreqs, removedRates, sCluster2Sites); // System.out.println("populate logLik1:"); double[] logLik1 = new double[mergedClusterSites.length]; for (int i = 0; i < (cluster1Sites.length - 1); i++) { // System.out.println(clusterIndex1+" "+mergedClusterSites[i]); logLik1[i] = dpTreeLikelihood.getSiteLogLikelihood(clusterIndex1, mergedClusterSites[i]); } System.arraycopy( cluster2SitesCluster1ParamLogLik, 0, logLik1, cluster1Sites.length - 1, cluster2SitesCluster1ParamLogLik.length); double[] logLik2 = new double[mergedClusterSites.length]; System.arraycopy( cluster1SitesCluster2ParamLogLik, 0, logLik2, 0, cluster1SitesCluster2ParamLogLik.length); // System.out.println("populate logLik2:"); for (int i = cluster1SitesCluster2ParamLogLik.length; i < logLik2.length; i++) { // System.out.println(clusterIndex2+" // "+mergedClusterSites[i-cluster1SitesCluster2ParamLogLik.length]); logLik2[i] = dpTreeLikelihood.getSiteLogLikelihood(clusterIndex2, mergedClusterSites[i]); } double[] lik1 = new double[logLik1.length]; double[] lik2 = new double[logLik2.length]; // scale it so it may be more accuate double minLog; /*for(int i = 0; i < logLik1.length; i++){ minLog = Math.min(logLik1[i],logLik2[i]); if(minLog == logLik1[i]){ lik1[i] = 1.0; lik2[i] = Math.exp(logLik2[i] - minLog); }else{ lik1[i] = Math.exp(logLik1[i] - minLog); lik2[i] = 1.0; } }*/ for (int i = 0; i < logLik1.length; i++) { lik1[i] = Math.exp(logLik1[i]); lik2[i] = Math.exp(logLik2[i]); // System.out.println(lik1[i]+" "+lik2[i]); } // Create a set of indices for random permutation int[] shuffle = new int[mergedClusterSites.length]; for (int i = 0; i < shuffle.length; i++) { shuffle[i] = i; } Randomizer.shuffle(shuffle); int cluster1Count = 1; int cluster2Count = 1; int cluster; double psi1, psi2, cluster1Prob; for (int i = 0; i < mergedClusterSites.length; i++) { cluster = siteMap.get(mergedClusterSites[shuffle[i]]); psi1 = cluster1Count * lik1[shuffle[i]]; psi2 = cluster2Count * lik2[shuffle[i]]; /*testCorrectness(i,cluster, clusterIndex1,clusterIndex2,shuffle, mergedClusterSites, lik1,lik2);*/ cluster1Prob = psi1 / (psi1 + psi2); // System.out.println(cluster1Prob); if (cluster == clusterIndex1) { logqMerge += Math.log(cluster1Prob); cluster1Count++; } else if (cluster == clusterIndex2) { logqMerge += Math.log(1 - cluster1Prob); cluster2Count++; } else { throw new RuntimeException("Something is wrong."); } } logqMerge += // paramBaseDistr.calcLogP(removedParam) mergeValue(removedParam, mergedParam, paramBaseDistr) // + modelBaseDistr.calcLogP(removedModel) + mergeDiscreteValue(removedModel, mergedModel, modelDistrInput.get()) + freqsBaseDistr.calcLogP(removedFreqs) // + ratesBaseDistr.calcLogP(removedRates); + mergeValueInLogSpace(removedRates, mergedRates, ratesBaseDistr); if (logqMerge > Double.NEGATIVE_INFINITY) { paramList.mergeParameter(clusterIndex1, clusterIndex2); modelList.mergeParameter(clusterIndex1, clusterIndex2); freqsList.mergeParameter(clusterIndex1, clusterIndex2); ratesList.mergeParameter(clusterIndex1, clusterIndex2); for (int i = 0; i < cluster1Sites.length; i++) { // Point every member in cluster 1 to cluster 2 paramPointers.point(cluster1Sites[i], mergedParam); modelPointers.point(cluster1Sites[i], mergedModel); freqsPointers.point(cluster1Sites[i], mergedFreqs); ratesPointers.point(cluster1Sites[i], mergedRates); } } } catch (Exception e) { throw new RuntimeException(e); } return logqMerge; }
public double split(int index1, int index2, int clusterIndex, int[] initClusterSites) { try { double logqSplit = 0.0; // Create a parameter by sampling from the prior // QuietRealParameter newParam = getSample(paramBaseDistr, paramList.getUpper(), // paramList.getLower()); QuietRealParameter newParam = new QuietRealParameter(new Double[5]); // logqSplit += proposeNewValue(newParam, paramBaseDistr, paramList.getUpper(), // paramList.getLower()); double[] oldParamValues = new double[5]; for (int i = 0; i < oldParamValues.length; i++) { oldParamValues[i] = paramList.getValue(clusterIndex, i); } logqSplit += proposeNewValue2( newParam, oldParamValues, paramBaseDistr, paramList.getUpper(), paramList.getLower()); // QuietRealParameter newModel = getSample(modelBaseDistr, modelList.getUpper(), // modelList.getLower()); QuietRealParameter newModel = new QuietRealParameter(new Double[1]); logqSplit += proposeDiscreteValue( newModel, modelList.getValue(clusterIndex, 0), modelDistrInput.get(), modelList.getUpper(), modelList.getLower()); QuietRealParameter newFreqs = getSample(freqsBaseDistr, freqsList.getUpper(), freqsList.getLower()); // QuietRealParameter newRates = getSample(ratesBaseDistr, ratesList.getUpper(), // ratesList.getLower()); QuietRealParameter newRates = new QuietRealParameter(new Double[1]); logqSplit += proposalValueInLogSpace( newRates, ratesList.getValue(clusterIndex, 0), ratesBaseDistr, ratesList.getUpper(), ratesList.getLower()); // Remove the index 1 and index 2 from the cluster int[] clusterSites = new int[initClusterSites.length - 2]; int k = 0; for (int i = 0; i < initClusterSites.length; i++) { if (initClusterSites[i] != index1 && initClusterSites[i] != index2) { clusterSites[k++] = initClusterSites[i]; } } // Form a new cluster with index 1 paramPointers.point(index1, newParam); modelPointers.point(index1, newModel); freqsPointers.point(index1, newFreqs); ratesPointers.point(index1, newRates); // Shuffle the cluster_-{index_1,index_2} to obtain a random permutation Randomizer.shuffle(clusterSites); // Create the weight vector of site patterns according to the order of the shuffled index. /*int[] tempWeights = new int[tempLikelihood.m_data.get().getPatternCount()]; int patIndex; for(int i = 0; i < clusterSites.length; i++){ patIndex = tempLikelihood.m_data.get().getPatternIndex(clusterSites[i]); tempWeights[patIndex] = 1; } tempLikelihood.setPatternWeights(tempWeights);*/ tempLikelihood.setupPatternWeightsFromSites(clusterSites); // Site log likelihoods in the order of the shuffled sites double[] logLik1 = tempLikelihood.calculateLogP(newParam, newModel, newFreqs, newRates, clusterSites); double[] logLik2 = new double[clusterSites.length]; for (int i = 0; i < logLik2.length; i++) { logLik2[i] = dpTreeLikelihood.getSiteLogLikelihood(clusterIndex, clusterSites[i]); } double[] lik1 = new double[logLik1.length]; double[] lik2 = new double[logLik2.length]; double minLog; // scale it so it may be more accurate /*for(int i = 0; i < logLik1.length; i++){ minLog = Math.min(logLik1[i],logLik2[i]); if(minLog == logLik1[i]){ lik1[i] = 1.0; lik2[i] = Math.exp(logLik2[i] - minLog); }else{ lik1[i] = Math.exp(logLik1[i] - minLog); lik2[i] = 1.0; } }*/ for (int i = 0; i < logLik1.length; i++) { lik1[i] = Math.exp(logLik1[i]); lik2[i] = Math.exp(logLik2[i]); // System.out.println(lik1[i]+" "+lik2[i]); } /*for(int i = 0; i < clusterSites.length;i++){ System.out.println("clusterSites: "+clusterSites[i]); } System.out.println("index 1: "+index1+" index2: "+index2);*/ int cluster1Count = 1; int cluster2Count = 1; int[] sitesInCluster1 = new int[initClusterSites.length]; sitesInCluster1[0] = index1; // Assign members of the existing cluster (except for indice 1 and 2) randomly // to the existing and the new cluster double psi1, psi2, newClusterProb, draw; for (int i = 0; i < clusterSites.length; i++) { psi1 = cluster1Count * lik1[i]; psi2 = cluster2Count * lik2[i]; newClusterProb = psi1 / (psi1 + psi2); draw = Randomizer.nextDouble(); if (draw < newClusterProb) { // System.out.println("in new cluster: "+clusterSites[i]); sitesInCluster1[cluster1Count] = clusterSites[i]; // paramPointers.point(clusterSites[i],newParam); // modelPointers.point(clusterSites[i],newModel); // freqsPointers.point(clusterSites[i],newFreqs); // ratesPointers.point(clusterSites[i],newRates); logqSplit += Math.log(newClusterProb); cluster1Count++; } else { logqSplit += Math.log(1.0 - newClusterProb); cluster2Count++; } } // logqSplit += paramBaseDistr.calcLogP(newParam) logqSplit += // modelBaseDistr.calcLogP(newModel) + freqsBaseDistr.calcLogP(newFreqs) // + ratesBaseDistr.calcLogP(newRates) ; // Perform a split paramList = paramListInput.get(this); modelList = modelListInput.get(this); freqsList = freqsListInput.get(this); ratesList = ratesListInput.get(this); paramPointers = paramPointersInput.get(this); modelPointers = modelPointersInput.get(this); freqsPointers = freqsPointersInput.get(this); ratesPointers = ratesPointersInput.get(this); paramList.splitParameter(clusterIndex, newParam); modelList.splitParameter(clusterIndex, newModel); freqsList.splitParameter(clusterIndex, newFreqs); ratesList.splitParameter(clusterIndex, newRates); // Form a new cluster with index 1 paramPointers = paramPointersInput.get(this); modelPointers = modelPointersInput.get(this); freqsPointers = freqsPointersInput.get(this); ratesPointers = ratesPointersInput.get(this); for (int i = 0; i < cluster1Count; i++) { paramPointers.point(sitesInCluster1[i], newParam); modelPointers.point(sitesInCluster1[i], newModel); freqsPointers.point(sitesInCluster1[i], newFreqs); ratesPointers.point(sitesInCluster1[i], newRates); } return -logqSplit; } catch (Exception e) { throw new RuntimeException(e); } }
public double split(int index1, int index2, int clusterIndex, int[] initClusterSites) { try { double logqSplit = 0.0; // Create a parameter by sampling from the prior QuietRealParameter newParam = getSample(paramBaseDistr, paramList.getUpper(), paramList.getLower()); QuietRealParameter newModel = getSample(modelBaseDistr, modelList.getUpper(), modelList.getLower()); QuietRealParameter newFreqs = getSample(freqsBaseDistr, freqsList.getUpper(), freqsList.getLower()); // Perform a split // paramList.splitParameter(clusterIndex,newParam); // modelList.splitParameter(clusterIndex,newModel); // freqsList.splitParameter(clusterIndex,newFreqs); // Remove the index 1 and index 2 from the cluster int[] clusterSites = new int[initClusterSites.length - 2]; int k = 0; for (int i = 0; i < initClusterSites.length; i++) { if (initClusterSites[i] != index1 && initClusterSites[i] != index2) { clusterSites[k++] = initClusterSites[i]; } } // Form a new cluster with index 1 // paramPointers.point(index1,newParam); // modelPointers.point(index1,newModel); // freqsPointers.point(index1,newFreqs); // Shuffle the cluster_-{index_1,index_2} to obtain a random permutation Randomizer.shuffle(clusterSites); // Create the weight vector of site patterns according to the order of the shuffled index. /*int[] tempWeights = new int[tempLikelihood.m_data.get().getPatternCount()]; int patIndex; for(int i = 0; i < clusterSites.length; i++){ patIndex = tempLikelihood.m_data.get().getPatternIndex(clusterSites[i]); tempWeights[patIndex] = 1; }*/ tempLikelihood.setupPatternWeightsFromSites(clusterSites); // Site log likelihoods in the order of the shuffled sites double[] logLik1 = tempLikelihood.calculateLogP(newParam, newModel, newFreqs, clusterSites); double[] logLik2 = new double[clusterSites.length]; for (int i = 0; i < logLik2.length; i++) { // logLik2[i] = dpTreeLikelihood.getSiteLogLikelihood(clusterIndex,clusterSites[i]); logLik2[i] = getSiteLogLikelihood( paramList.getParameter(clusterIndex).getIDNumber(), clusterIndex, clusterSites[i]); } double[] lik1 = new double[logLik1.length]; double[] lik2 = new double[logLik2.length]; double maxLog; // scale it so it may be more accurate for (int i = 0; i < logLik1.length; i++) { maxLog = Math.max(logLik1[i], logLik2[i]); if (Math.exp(maxLog) < 1e-100) { if (maxLog == logLik1[i]) { lik1[i] = 1.0; lik2[i] = Math.exp(logLik2[i] - maxLog); } else { lik1[i] = Math.exp(logLik1[i] - maxLog); lik2[i] = 1.0; } } else { lik1[i] = Math.exp(logLik1[i]); lik2[i] = Math.exp(logLik2[i]); } } /*boolean ohCrap = false; for(int i = 0; i < logLik1.length; i++){ if(Double.isNaN(logLik1[i])){ return Double.NEGATIVE_INFINITY; //ohCrap = true; //System.out.println("logLik1: "+logLik1); //logLik1[i] = Double.NEGATIVE_INFINITY; } if(Double.isNaN(logLik2[i])){ return Double.NEGATIVE_INFINITY; //ohCrap = true; //System.out.println("logLik1: "+logLik2); //logLik2[i] = Double.NEGATIVE_INFINITY; } lik1[i] = Math.exp(logLik1[i]); lik2[i] = Math.exp(logLik2[i]); //System.out.println(lik1[i]+" "+lik2[i]); } if(ohCrap){ for(int i = 0; i < newParam.getDimension();i++){ System.out.print(newParam.getValue(i)+" "); } System.out.println(); }*/ /*for(int i = 0; i < clusterSites.length;i++){ System.out.println("clusterSites: "+clusterSites[i]); } System.out.println("index 1: "+index1+" index2: "+index2);*/ int cluster1Count = 1; int cluster2Count = 1; // Assign members of the existing cluster (except for indice 1 and 2) randomly // to the existing and the new cluster double psi1, psi2, newClusterProb, draw; int[] newAssignment = new int[clusterSites.length]; for (int i = 0; i < clusterSites.length; i++) { psi1 = cluster1Count * lik1[i]; psi2 = cluster2Count * lik2[i]; newClusterProb = psi1 / (psi1 + psi2); draw = Randomizer.nextDouble(); if (draw < newClusterProb) { // System.out.println("in new cluster: "+clusterSites[i]); // paramPointers.point(clusterSites[i],newParam); // modelPointers.point(clusterSites[i],newModel); // freqsPointers.point(clusterSites[i],newFreqs); newAssignment[cluster1Count - 1] = clusterSites[i]; logqSplit += Math.log(newClusterProb); cluster1Count++; } else { logqSplit += Math.log(1.0 - newClusterProb); cluster2Count++; } } // System.out.println("halfway: "+logqSplit); logqSplit += paramBaseDistr.calcLogP(newParam) + modelBaseDistr.calcLogP(newModel) + freqsBaseDistr.calcLogP(newFreqs); if (-logqSplit > Double.NEGATIVE_INFINITY) { paramList = paramListInput.get(this); modelList = modelListInput.get(this); freqsList = freqsListInput.get(this); paramPointers = paramPointersInput.get(this); modelPointers = modelPointersInput.get(this); freqsPointers = freqsPointersInput.get(this); // Perform a split paramList.splitParameter(clusterIndex, newParam); modelList.splitParameter(clusterIndex, newModel); freqsList.splitParameter(clusterIndex, newFreqs); // Form a new cluster with index 1 paramPointers.point(index1, newParam); modelPointers.point(index1, newModel); freqsPointers.point(index1, newFreqs); for (int i = 0; i < (cluster1Count - 1); i++) { paramPointers.point(newAssignment[i], newParam); modelPointers.point(newAssignment[i], newModel); freqsPointers.point(newAssignment[i], newFreqs); } } return -logqSplit; } catch (Exception e) { // freqsBaseDistr.printDetails(); throw new RuntimeException(e); } }
public ContentType copy() { return new ContentType(primaryType, subType, list.copy()); }