/** descendingkeySet.toArray returns contains all keys */ public void testDescendingDescendingKeySetToArray() { ConcurrentNavigableMap map = dmap5(); Set s = map.descendingKeySet(); Object[] ar = s.toArray(); assertEquals(5, ar.length); assertTrue(s.containsAll(Arrays.asList(ar))); ar[0] = m10; assertFalse(s.containsAll(Arrays.asList(ar))); }
public void testMdlToGraph() { List models = TestInference.createTestModels(); for (Iterator mdlIt = models.iterator(); mdlIt.hasNext(); ) { UndirectedModel mdl = (UndirectedModel) mdlIt.next(); UndirectedGraph g = Graphs.mdlToGraph(mdl); Set vertices = g.vertexSet(); // check the number of vertices assertEquals(mdl.numVariables(), vertices.size()); // check the number of edges int numEdgePtls = 0; for (Iterator factorIt = mdl.factors().iterator(); factorIt.hasNext(); ) { Factor factor = (Factor) factorIt.next(); if (factor.varSet().size() == 2) numEdgePtls++; } assertEquals(numEdgePtls, g.edgeSet().size()); // check that the neighbors of each vertex contain at least some of what they're supposed to Iterator it = vertices.iterator(); while (it.hasNext()) { Variable var = (Variable) it.next(); assertTrue(vertices.contains(var)); Set neighborsInG = new HashSet(GraphHelper.neighborListOf(g, var)); neighborsInG.add(var); Iterator factorIt = mdl.allFactorsContaining(var).iterator(); while (factorIt.hasNext()) { Factor factor = (Factor) factorIt.next(); assertTrue(neighborsInG.containsAll(factor.varSet())); } } } }
private static List<? extends Library> getNotAddedLibraries( @NotNull final ArtifactEditorContext context, @NotNull Artifact artifact, List<Library> librariesList) { final Set<VirtualFile> roots = new HashSet<VirtualFile>(); ArtifactUtil.processPackagingElements( artifact, PackagingElementFactoryImpl.FILE_COPY_ELEMENT_TYPE, new Processor<FileCopyPackagingElement>() { public boolean process(FileCopyPackagingElement fileCopyPackagingElement) { final VirtualFile root = fileCopyPackagingElement.getLibraryRoot(); if (root != null) { roots.add(root); } return true; } }, context, true); final List<Library> result = new ArrayList<Library>(); for (Library library : librariesList) { if (!roots.containsAll(Arrays.asList(library.getFiles(OrderRootType.CLASSES)))) { result.add(library); } } return result; }
public static Set<Artifact> getArtifactsToBuild( final Project project, final CompileScope compileScope, final boolean addIncludedArtifactsWithOutputPathsOnly) { final Artifact[] artifactsFromScope = getArtifacts(compileScope); final ArtifactManager artifactManager = ArtifactManager.getInstance(project); PackagingElementResolvingContext context = artifactManager.getResolvingContext(); if (artifactsFromScope != null) { return addIncludedArtifacts( Arrays.asList(artifactsFromScope), context, addIncludedArtifactsWithOutputPathsOnly); } final Set<Artifact> cached = compileScope.getUserData(CACHED_ARTIFACTS_KEY); if (cached != null) { return cached; } Set<Artifact> artifacts = new HashSet<Artifact>(); final Set<Module> modules = new HashSet<Module>(Arrays.asList(compileScope.getAffectedModules())); final List<Module> allModules = Arrays.asList(ModuleManager.getInstance(project).getModules()); for (Artifact artifact : artifactManager.getArtifacts()) { if (artifact.isBuildOnMake()) { if (modules.containsAll(allModules) || containsModuleOutput(artifact, modules, context)) { artifacts.add(artifact); } } } Set<Artifact> result = addIncludedArtifacts(artifacts, context, addIncludedArtifactsWithOutputPathsOnly); compileScope.putUserData(CACHED_ARTIFACTS_KEY, result); return result; }
@Test public void testFlatMapMaxConcurrent() { final int m = 4; final AtomicInteger subscriptionCount = new AtomicInteger(); Observable<Integer> source = Observable.range(1, 10) .flatMap( new Func1<Integer, Observable<Integer>>() { @Override public Observable<Integer> call(Integer t1) { return compose(Observable.range(t1 * 10, 2), subscriptionCount, m) .subscribeOn(Schedulers.computation()); } }, m); TestSubscriber<Integer> ts = new TestSubscriber<Integer>(); source.subscribe(ts); ts.awaitTerminalEvent(); ts.assertNoErrors(); Set<Integer> expected = new HashSet<Integer>( Arrays.asList( 10, 11, 20, 21, 30, 31, 40, 41, 50, 51, 60, 61, 70, 71, 80, 81, 90, 91, 100, 101)); Assert.assertEquals(expected.size(), ts.getOnNextEvents().size()); Assert.assertTrue(expected.containsAll(ts.getOnNextEvents())); }
@Test public void testCompactionLog() throws Exception { SystemKeyspace.discardCompactionsInProgress(); String cf = "Standard4"; ColumnFamilyStore cfs = Keyspace.open(KEYSPACE1).getColumnFamilyStore(cf); SchemaLoader.insertData(KEYSPACE1, cf, 0, 1); cfs.forceBlockingFlush(); Collection<SSTableReader> sstables = cfs.getSSTables(); assertFalse(sstables.isEmpty()); Set<Integer> generations = Sets.newHashSet( Iterables.transform( sstables, new Function<SSTableReader, Integer>() { public Integer apply(SSTableReader sstable) { return sstable.descriptor.generation; } })); UUID taskId = SystemKeyspace.startCompaction(cfs, sstables); Map<Pair<String, String>, Map<Integer, UUID>> compactionLogs = SystemKeyspace.getUnfinishedCompactions(); Set<Integer> unfinishedCompactions = compactionLogs.get(Pair.create(KEYSPACE1, cf)).keySet(); assertTrue(unfinishedCompactions.containsAll(generations)); SystemKeyspace.finishCompaction(taskId); compactionLogs = SystemKeyspace.getUnfinishedCompactions(); assertFalse(compactionLogs.containsKey(Pair.create(KEYSPACE1, cf))); }
@Nullable private VirtualFile findLibraryRootInfo(@NotNull List<VirtualFile> hierarchy, boolean source) { Set<Library> librariesToIgnore = ContainerUtil.newHashSet(); for (VirtualFile root : hierarchy) { librariesToIgnore.addAll(excludedFromLibraries.get(root)); if (source && libraryOrSdkSources.contains(root) && (!sourceOfLibraries.containsKey(root) || !librariesToIgnore.containsAll(sourceOfLibraries.get(root)))) { return root; } else if (!source && libraryOrSdkClasses.contains(root) && (!classOfLibraries.containsKey(root) || !librariesToIgnore.containsAll(classOfLibraries.get(root)))) { return root; } } return null; }
/** Test if two canonical regions could be merge in one non canonical region. */ private boolean isNonCanonicalRegion(RPST rpst, Region region1, Region region2) { if (!region1.getExit().equals(region2.getEntry())) { return false; } if (rpst.getChildCount(region2) == 0) { return false; } // basic blocks of merged region Set<BasicBlock> basicBlocks = new HashSet<>(); basicBlocks.addAll(rpst.getBasicBlocks(region1)); basicBlocks.addAll(rpst.getBasicBlocks(region2)); basicBlocks.add(region2.getExit()); if (!basicBlocks.containsAll(cfg.getSuccessorsOf(region1.getExit()))) { return false; } if (!basicBlocks.containsAll(cfg.getPredecessorsOf(region1.getExit()))) { return false; } return true; }
private boolean equalsAccessPoints(HashMap<String, AccessPoint> accessPoints) { if (this.accessPoints == null && accessPoints == null) { return true; } else if (this.accessPoints == null || accessPoints == null || this.accessPoints.size() != accessPoints.size()) { return false; } Set<String> localAccessPointsIds = this.accessPoints.keySet(); Set<String> otherAccessPointsIds = accessPoints.keySet(); return localAccessPointsIds.containsAll(otherAccessPointsIds); }
/** * @param other * @return */ private boolean checkHeapObjectEquality(Heap other) { Set<Location> this_keyset = this.getKeySet(); Set<Location> other_keyset = other.getKeySet(); if (this_keyset != null && other_keyset != null) { if (this_keyset.containsAll(other_keyset) && other_keyset.containsAll( this_keyset)) { // check if both the keysets have the same keys // If true, compare value sets boolean retval = true; for (Location key : this_keyset) { JSObject this_value = this.get(key); JSObject other_value = other.get(key); retval = retval && (this_value.isomorphic(other_value)); } return retval; } else { // If false, the maps don't match return false; } } else { if (this_keyset == null && other_keyset == null) return true; else return false; } }
/** * Prune candidate fact sets with respect to available candidate facts. * * <p>Prune a candidate set, if any of the fact is missing. * * @param pruneCause */ public void pruneCandidateFactSet(CandidateTablePruneCode pruneCause) { // remove candidate fact sets that have missing facts for (Iterator<Set<CandidateFact>> i = candidateFactSets.iterator(); i.hasNext(); ) { Set<CandidateFact> cfacts = i.next(); if (!candidateFacts.containsAll(cfacts)) { log.info( "Not considering fact table set:{} as they have non candidate tables and facts missing because of {}", cfacts, pruneCause); i.remove(); } } // prune candidate facts pruneCandidateFactWithCandidateSet(CandidateTablePruneCode.ELEMENT_IN_SET_PRUNED); }
@Test public void testTransformR() { System.out.println("transformR"); Random rand = new Random(13); int t0 = 1, t1 = 5, t2 = 8; Set<Integer> shouldHave = new HashSet<Integer>(); shouldHave.addAll(Arrays.asList(t0, t1, t2)); RegressionDataSet rds = SFSTest.generate3DimIn10R(rand, t0, t1, t2); BDS bds = new BDS(3, rds, new MultipleLinearRegression(), 5); Set<Integer> found = bds.getSelectedNumerical(); assertEquals(shouldHave.size(), found.size()); assertTrue(shouldHave.containsAll(found)); }
/** Test of transform method, of class BDS. */ @Test public void testTransformC() { System.out.println("transformC"); Random rand = new Random(13); int t0 = 1, t1 = 5, t2 = 8; Set<Integer> shouldHave = new HashSet<Integer>(); shouldHave.addAll(Arrays.asList(t0, t1, t2)); ClassificationDataSet cds = SFSTest.generate3DimIn10(rand, t0, t1, t2); BDS bds = new BDS(3, cds, new NearestNeighbour(7), 5); Set<Integer> found = bds.getSelectedNumerical(); assertEquals(shouldHave.size(), found.size()); assertTrue(shouldHave.containsAll(found)); }
/** * Loops through all known nucleotides and attempts to find which are equivalent to each other. * Also takes into account lower casing nucleotides as well as upper-cased ones. */ @SuppressWarnings("unchecked") protected void calculateIndirectAmbiguities() { Map<NucleotideCompound, List<NucleotideCompound>> equivalentsMap = new HashMap<NucleotideCompound, List<NucleotideCompound>>(); List<NucleotideCompound> ambiguousCompounds = new ArrayList<NucleotideCompound>(); for (NucleotideCompound compound : getAllCompounds()) { if (!compound.isAmbiguous()) { continue; } ambiguousCompounds.add(compound); } for (NucleotideCompound sourceCompound : ambiguousCompounds) { Set<NucleotideCompound> compoundConstituents = sourceCompound.getConstituents(); for (NucleotideCompound targetCompound : ambiguousCompounds) { Set<NucleotideCompound> targetConstituents = targetCompound.getConstituents(); if (targetConstituents.containsAll(compoundConstituents)) { NucleotideCompound lcSourceCompound = toLowerCase(sourceCompound); NucleotideCompound lcTargetCompound = toLowerCase(targetCompound); // equivalentsMap.put(sourceCompound, targetCompound); // equivalentsMap.put(sourceCompound, lcTargetCompound); checkAdd(equivalentsMap, sourceCompound, targetCompound); checkAdd(equivalentsMap, sourceCompound, lcTargetCompound); checkAdd(equivalentsMap, targetCompound, sourceCompound); checkAdd(equivalentsMap, lcTargetCompound, sourceCompound); checkAdd(equivalentsMap, lcSourceCompound, targetCompound); checkAdd(equivalentsMap, lcSourceCompound, lcTargetCompound); } } } // And once it's all done start adding them to the equivalents map for (NucleotideCompound key : equivalentsMap.keySet()) { List<NucleotideCompound> vals = equivalentsMap.get(key); for (NucleotideCompound value : vals) { addEquivalent((C) key, (C) value); addEquivalent((C) value, (C) key); } } }
@Test public void shouldGetRightsForAUserOnHomeFacilityAndProgram() throws Exception { Long userId = 1L; Facility facility = new Facility(2L); Program program = new Program(3L); List<Right> expected = asList(CREATE_REQUISITION); when(facilityService.getHomeFacility(userId)).thenReturn(facility); when(roleRightsRepository.getRightsForUserOnHomeFacilityAndProgram(userId, program)) .thenReturn(expected); Set<Right> result = roleRightsService.getRightsForUserAndFacilityProgram(userId, facility, program); assertThat(result.containsAll(expected), is(true)); verify(roleRightsRepository).getRightsForUserOnHomeFacilityAndProgram(userId, program); }
@Ignore @Test public void testNormalSimulatedSession() throws SolverException, InconsistentTheoryException, NoConflictException, OWLOntologyCreationException { logger.info("NormalSimulatedSession"); TreeSearch<FormulaSet<OWLLogicalAxiom>, OWLLogicalAxiom> search = testSetup("ontologies/ecai2010.owl"); Set<? extends FormulaSet<OWLLogicalAxiom>> diagnoses = getDiagnoses(search); Map<QSSType, DurationStat> nTimes = new HashMap<QSSType, DurationStat>(); Map<QSSType, List<Double>> nQueries = new HashMap<QSSType, List<Double>>(); for (QSSType type : QSSType.values()) { // run for each scoring function logger.info("QSSType: " + type); nTimes.put(type, new DurationStat()); nQueries.put(type, new LinkedList<Double>()); for (FormulaSet<OWLLogicalAxiom> targetDiagnosis : diagnoses) { // run for each possible target diagnosis logger.info("targetD: " + CalculateDiagnoses.renderAxioms(targetDiagnosis)); long completeTime = System.currentTimeMillis(); computeHS(search, ctTheory, targetDiagnosis, nQueries.get(type), type); ctTheory.getKnowledgeBase().removeFormulas(targetDiagnosis); completeTime = System.currentTimeMillis() - completeTime; nTimes.get(type).add(completeTime); foundDiagnoses.addAll(targetDiagnosis); ctTheory.getReasoner().addFormulasToCache(ctTheory.getKnowledgeBase().getFaultyFormulas()); assertTrue(ctTheory.verifyConsistency()); ctTheory.reset(); resetTheoryTests(ctTheory); search.reset(); } logStatistics(nQueries, nTimes, type, "normal"); logger.info("found Diagnoses: " + foundDiagnoses.size()); logger.info("found Diagnosis: " + CalculateDiagnoses.renderAxioms(foundDiagnoses)); logger.info( "found all target diagnoses: " + (foundDiagnoses.size() > 0 && foundDiagnoses.containsAll(diagnoses))); } }
/** * Utility method which prints the abbreviations of the elements in a passed {@link Set} of * exception types. * * @param s The exceptions to print. * @param connector The character to insert between exceptions. * @return An abbreviated representation of the exceptions. */ private String toAbbreviatedString(Set s, char connector) { final String JAVA_LANG = "java.lang."; final int JAVA_LANG_LENGTH = JAVA_LANG.length(); final String EXCEPTION = "Exception"; final int EXCEPTION_LENGTH = EXCEPTION.length(); Collection vmErrorThrowables = ThrowableSet.Manager.v().VM_ERRORS.exceptionsIncluded; boolean containsAllVmErrors = s.containsAll(vmErrorThrowables); StringBuffer buf = new StringBuffer(); if (containsAllVmErrors) { buf.append(connector); buf.append("vmErrors"); } for (Iterator it = sortedThrowableIterator(s); it.hasNext(); ) { RefLikeType reflikeType = (RefLikeType) it.next(); RefType baseType = null; if (reflikeType instanceof RefType) { baseType = (RefType) reflikeType; if (vmErrorThrowables.contains(baseType) && containsAllVmErrors) { continue; // Already accounted for vmErrors. } else { buf.append(connector); } } else if (reflikeType instanceof AnySubType) { buf.append(connector); buf.append('('); baseType = ((AnySubType) reflikeType).getBase(); } String typeName = baseType.toString(); if (typeName.startsWith(JAVA_LANG)) { typeName = typeName.substring(JAVA_LANG_LENGTH); } if (typeName.length() > EXCEPTION_LENGTH && typeName.endsWith(EXCEPTION)) { typeName = typeName.substring(0, typeName.length() - EXCEPTION_LENGTH); } buf.append(typeName); if (reflikeType instanceof AnySubType) { buf.append(')'); } } return buf.toString(); }
/** if there is a hint to use, use it */ private void _lookForHints() { if (_hint != null) // if someone set a hint, then don't do this return; if (_collection._hintFields == null) return; Set<String> mykeys = _query.keySet(); for (DBObject o : _collection._hintFields) { Set<String> hintKeys = o.keySet(); if (!mykeys.containsAll(hintKeys)) continue; hint(o); return; } }
private boolean compare_Collection(int operation, Collection collection, Object value2) { if (op == SUBSET || op == SUPERSET) { Set set = new HashSet(); if (value2 != null) { StringTokenizer st = new StringTokenizer(value2.toString(), ","); while (st.hasMoreTokens()) { set.add(st.nextToken().trim()); } } if (op == SUBSET) { return set.containsAll(collection); } else { return collection.containsAll(set); } } for (Iterator iterator = collection.iterator(); iterator.hasNext(); ) { if (compare(operation, iterator.next(), value2)) { return true; } } return false; }
private void processUnsuccessfulSelections( final Object[] toSelect, Function<Object, Object> restore, Set<Object> originallySelected) { final Set<Object> selected = myUi.getSelectedElements(); boolean wasFullyRejected = false; if (toSelect.length > 0 && !selected.isEmpty() && !originallySelected.containsAll(selected)) { final Set<Object> successfulSelections = new HashSet<Object>(); ContainerUtil.addAll(successfulSelections, toSelect); successfulSelections.retainAll(selected); wasFullyRejected = successfulSelections.isEmpty(); } else if (selected.isEmpty() && originallySelected.isEmpty()) { wasFullyRejected = true; } if (wasFullyRejected && !selected.isEmpty()) return; for (Object eachToSelect : toSelect) { if (!selected.contains(eachToSelect)) { restore.fun(eachToSelect); } } }
@Test public void shouldGetRightsForAUserOnSupervisedFacilityAndProgram() throws Exception { Long userId = 1L; Facility facility = new Facility(2L); Program program = new Program(3L); List<Right> expected = asList(CREATE_REQUISITION); SupervisoryNode supervisoryNode = new SupervisoryNode(4L); List<SupervisoryNode> supervisoryNodes = asList(supervisoryNode); when(supervisoryNodeService.getFor(facility, program)).thenReturn(supervisoryNode); when(supervisoryNodeService.getAllParentSupervisoryNodesInHierarchy(supervisoryNode)) .thenReturn(supervisoryNodes); when(roleRightsRepository.getRightsForUserOnSupervisoryNodeAndProgram( userId, supervisoryNodes, program)) .thenReturn(expected); Set<Right> result = roleRightsService.getRightsForUserAndFacilityProgram(userId, facility, program); verify(roleRightsRepository) .getRightsForUserOnSupervisoryNodeAndProgram(userId, supervisoryNodes, program); assertThat(result.containsAll(expected), is(true)); }
/* * Check and apply implicit and explicit lexical precedence rules. Display * errors and infos for the human user during the process. * * @param automaton * is the automaton to check. In order to have the explicit * priorities applied, it is required that the automaton is * tagged with the acceptation of the LexerExpression. * @return a new automaton where only the right acceptation tags remains. */ public Automaton checkAndApplyLexerPrecedence( Automaton automaton, Trace trace, Strictness strictness) { automaton = automaton.minimal(); Map<State, String> words = automaton.collectShortestWords(); Map<Acceptation, Set<State>> accepts = automaton.collectAcceptationStates(); // Associate each acceptation with the ones it share at least a common // state. Map<Acceptation, Set<Acceptation>> conflicts = new HashMap<Acceptation, Set<Acceptation>>(); // Associate each acceptation with the ones it supersedes. Map<Acceptation, Set<Acceptation>> priorities = new HashMap<Acceptation, Set<Acceptation>>(); // Fill the priorities structure with the implicit inclusion rule for (Acceptation acc1 : automaton.getAcceptations()) { if (acc1 == Acceptation.ACCEPT) { continue; } // FIXME: empty LexerExpressions are not detected here since // their acceptation tag is not in the automaton. // Collect all the conflicts Set<State> set1 = accepts.get(acc1); Set<Acceptation> confs = new TreeSet<Acceptation>(); for (State s : set1) { confs.addAll(s.getAcceptations()); } conflicts.put(acc1, confs); // Check for implicit priority for each conflict for (Acceptation acc2 : confs) { if (acc2 == Acceptation.ACCEPT) { continue; } if (acc1 == acc2) { continue; } Set<State> set2 = accepts.get(acc2); if (set2.equals(set1)) { if (!conflicts.containsKey(acc2)) { throw SemanticException.genericError( "The " + acc1.getName() + " and " + acc2.getName() + " tokens are equivalent."); } } else if (set2.containsAll(set1)) { addPriority(priorities, acc1, acc2); State example = null; for (State s : set2) { if (!set1.contains(s)) { example = s; break; } } // Note: Since set1 is strictly included in set2, example // cannot be null trace.verboseln( " The " + acc1.getName() + " token is included in the " + acc2.getName() + " token. (Example of divergence: '" + words.get(example) + "'.)"); } } } // Collect new acceptation states and see if a conflict still exists Map<State, Acceptation> newAccepts = new HashMap<State, Acceptation>(); for (State s : automaton.getStates()) { if (s.getAcceptations().isEmpty()) { continue; } Acceptation candidate = s.getAcceptations().first(); for (Acceptation challenger : s.getAcceptations()) { if (candidate == challenger) { continue; } if (hasPriority(priorities, candidate, challenger)) { // nothing. keep the candidate } else if (hasPriority(priorities, challenger, candidate)) { candidate = challenger; } else { throw SemanticException.genericError( "The " + candidate.getName() + " token and the " + challenger.getName() + " token conflict on the string '" + words.get(s) + "'. You should specify a precedence between them."); } } newAccepts.put(s, candidate); } // Ask for a new automaton with the correct acceptation states. return automaton.resetAcceptations(newAccepts); }
private Set<Set<Integer>> finishESeeds(Set<Set<Integer>> ESeeds) { log("Growing Effect Seeds.", true); Set<Set<Integer>> grown = new HashSet<Set<Integer>>(); List<Integer> _variables = new ArrayList<Integer>(); for (int i = 0; i < variables.size(); i++) _variables.add(i); // Lax grow phase with speedup. if (algType == AlgType.lax) { Set<Integer> t = new HashSet<Integer>(); int count = 0; int total = ESeeds.size(); do { if (!ESeeds.iterator().hasNext()) { break; } Set<Integer> cluster = ESeeds.iterator().next(); Set<Integer> _cluster = new HashSet<Integer>(cluster); if (extraShuffle) { Collections.shuffle(_variables); } for (int o : _variables) { if (_cluster.contains(o)) continue; List<Integer> _cluster2 = new ArrayList<Integer>(_cluster); int rejected = 0; int accepted = 0; ChoiceGenerator gen = new ChoiceGenerator(_cluster2.size(), 2); int[] choice; while ((choice = gen.next()) != null) { int n1 = _cluster2.get(choice[0]); int n2 = _cluster2.get(choice[1]); t.clear(); t.add(n1); t.add(n2); t.add(o); if (!ESeeds.contains(t)) { rejected++; } else { accepted++; } } if (rejected > accepted) { continue; } _cluster.add(o); // if (!(avgSumLnP(new ArrayList<Integer>(_cluster)) > -10)) { // _cluster.remove(o); // } } // This takes out all pure clusters that are subsets of _cluster. ChoiceGenerator gen2 = new ChoiceGenerator(_cluster.size(), 3); int[] choice2; List<Integer> _cluster3 = new ArrayList<Integer>(_cluster); while ((choice2 = gen2.next()) != null) { int n1 = _cluster3.get(choice2[0]); int n2 = _cluster3.get(choice2[1]); int n3 = _cluster3.get(choice2[2]); t.clear(); t.add(n1); t.add(n2); t.add(n3); ESeeds.remove(t); } if (verbose) { System.out.println( "Grown " + (++count) + " of " + total + ": " + variablesForIndices(new ArrayList<Integer>(_cluster))); } grown.add(_cluster); } while (!ESeeds.isEmpty()); } // Lax grow phase without speedup. if (algType == AlgType.laxWithSpeedup) { int count = 0; int total = ESeeds.size(); // Optimized lax version of grow phase. for (Set<Integer> cluster : new HashSet<Set<Integer>>(ESeeds)) { Set<Integer> _cluster = new HashSet<Integer>(cluster); if (extraShuffle) { Collections.shuffle(_variables); } for (int o : _variables) { if (_cluster.contains(o)) continue; List<Integer> _cluster2 = new ArrayList<Integer>(_cluster); int rejected = 0; int accepted = 0; // ChoiceGenerator gen = new ChoiceGenerator(_cluster2.size(), 2); int[] choice; while ((choice = gen.next()) != null) { int n1 = _cluster2.get(choice[0]); int n2 = _cluster2.get(choice[1]); Set<Integer> triple = triple(n1, n2, o); if (!ESeeds.contains(triple)) { rejected++; } else { accepted++; } } // if (rejected > accepted) { continue; } // System.out.println("Adding " + o + " to " + cluster); _cluster.add(o); } for (Set<Integer> c : new HashSet<Set<Integer>>(ESeeds)) { if (_cluster.containsAll(c)) { ESeeds.remove(c); } } if (verbose) { System.out.println("Grown " + (++count) + " of " + total + ": " + _cluster); } grown.add(_cluster); } } // Strict grow phase. if (algType == AlgType.strict) { Set<Integer> t = new HashSet<Integer>(); int count = 0; int total = ESeeds.size(); do { if (!ESeeds.iterator().hasNext()) { break; } Set<Integer> cluster = ESeeds.iterator().next(); Set<Integer> _cluster = new HashSet<Integer>(cluster); if (extraShuffle) { Collections.shuffle(_variables); } VARIABLES: for (int o : _variables) { if (_cluster.contains(o)) continue; List<Integer> _cluster2 = new ArrayList<Integer>(_cluster); ChoiceGenerator gen = new ChoiceGenerator(_cluster2.size(), 2); int[] choice; while ((choice = gen.next()) != null) { int n1 = _cluster2.get(choice[0]); int n2 = _cluster2.get(choice[1]); t.clear(); t.add(n1); t.add(n2); t.add(o); if (!ESeeds.contains(t)) { continue VARIABLES; } // if (avgSumLnP(new ArrayList<Integer>(t)) < -10) continue // CLUSTER; } _cluster.add(o); } // This takes out all pure clusters that are subsets of _cluster. ChoiceGenerator gen2 = new ChoiceGenerator(_cluster.size(), 3); int[] choice2; List<Integer> _cluster3 = new ArrayList<Integer>(_cluster); while ((choice2 = gen2.next()) != null) { int n1 = _cluster3.get(choice2[0]); int n2 = _cluster3.get(choice2[1]); int n3 = _cluster3.get(choice2[2]); t.clear(); t.add(n1); t.add(n2); t.add(n3); ESeeds.remove(t); } if (verbose) { System.out.println("Grown " + (++count) + " of " + total + ": " + _cluster); } grown.add(_cluster); } while (!ESeeds.isEmpty()); } // Optimized pick phase. log("Choosing among grown Effect Clusters.", true); for (Set<Integer> l : grown) { ArrayList<Integer> _l = new ArrayList<Integer>(l); Collections.sort(_l); if (verbose) { log("Grown: " + variablesForIndices(_l), false); } } Set<Set<Integer>> out = new HashSet<Set<Integer>>(); List<Set<Integer>> list = new ArrayList<Set<Integer>>(grown); // final Map<Set<Integer>, Double> pValues = new HashMap<Set<Integer>, Double>(); // // for (Set<Integer> o : grown) { // pValues.put(o, getP(new ArrayList<Integer>(o))); // } Collections.sort( list, new Comparator<Set<Integer>>() { @Override public int compare(Set<Integer> o1, Set<Integer> o2) { // if (o1.size() == o2.size()) { // double chisq1 = pValues.get(o1); // double chisq2 = pValues.get(o2); // return Double.compare(chisq2, chisq1); // } return o2.size() - o1.size(); } }); // for (Set<Integer> o : list) { // if (pValues.get(o) < alpha) continue; // System.out.println(variablesForIndices(new ArrayList<Integer>(o)) + " p = " + // pValues.get(o)); // } Set<Integer> all = new HashSet<Integer>(); CLUSTER: for (Set<Integer> cluster : list) { // if (pValues.get(cluster) < alpha) continue; for (Integer i : cluster) { if (all.contains(i)) continue CLUSTER; } out.add(cluster); // if (getPMulticluster(out) < alpha) { // out.remove(cluster); // continue; // } all.addAll(cluster); } return out; }
@Override public boolean containsAll(Collection<?> c) { return s.containsAll(c); }
private void compareMaps(Map<AMQPString, AMQPType> m1, Map<AMQPType, AMQPType> m2){ Set e1 = m1.entrySet(); Set e2 = m2.entrySet(); assertTrue(e1.containsAll(e2)); assertTrue(e2.containsAll(e1)); }
/** Initialize the stratifications, evaluations, evaluation contexts, and reporting object */ public void initialize() { // Just list the modules, and exit quickly. if (LIST) { variantEvalUtils.listModulesAndExit(); } // maintain the full list of comps comps.addAll(compsProvided); if (dbsnp.dbsnp.isBound()) { comps.add(dbsnp.dbsnp); knowns.add(dbsnp.dbsnp); } // Add a dummy comp track if none exists if (comps.size() == 0) comps.add( new RodBinding<VariantContext>(VariantContext.class, "none", "UNBOUND", "", new Tags())); // Set up set of additional knowns for (RodBinding<VariantContext> compRod : comps) { if (KNOWN_NAMES.contains(compRod.getName())) knowns.add(compRod); } // Now that we have all the rods categorized, determine the sample list from the eval rods. Map<String, VCFHeader> vcfRods = GATKVCFUtils.getVCFHeadersFromRods(getToolkit(), evals); Set<String> vcfSamples = SampleUtils.getSampleList( vcfRods, GATKVariantContextUtils.GenotypeMergeType.REQUIRE_UNIQUE); // Load the sample list, using an intermediate tree set to sort the samples final Set<String> allSampleNames = SampleUtils.getSamplesFromCommandLineInput(vcfSamples); sampleNamesForEvaluation.addAll( new TreeSet<String>( SampleUtils.getSamplesFromCommandLineInput(vcfSamples, SAMPLE_EXPRESSIONS))); isSubsettingSamples = !sampleNamesForEvaluation.containsAll(allSampleNames); if (Arrays.asList(STRATIFICATIONS_TO_USE).contains("Sample")) { sampleNamesForStratification.addAll(sampleNamesForEvaluation); } sampleNamesForStratification.add(ALL_SAMPLE_NAME); // Initialize select expressions for (VariantContextUtils.JexlVCMatchExp jexl : VariantContextUtils.initializeMatchExps(SELECT_NAMES, SELECT_EXPS)) { SortableJexlVCMatchExp sjexl = new SortableJexlVCMatchExp(jexl.name, jexl.exp); jexlExpressions.add(sjexl); } // Initialize the set of stratifications and evaluations to use // The list of stratifiers and evaluators to use final List<VariantStratifier> stratificationObjects = variantEvalUtils.initializeStratificationObjects( NO_STANDARD_STRATIFICATIONS, STRATIFICATIONS_TO_USE); final Set<Class<? extends VariantEvaluator>> evaluationClasses = variantEvalUtils.initializeEvaluationObjects(NO_STANDARD_MODULES, MODULES_TO_USE); checkForIncompatibleEvaluatorsAndStratifiers(stratificationObjects, evaluationClasses); for (VariantStratifier vs : stratificationObjects) { if (vs.getName().equals("Filter")) byFilterIsEnabled = true; else if (vs.getName().equals("Sample")) perSampleIsEnabled = true; } if (intervalsFile != null) { boolean fail = true; for (final VariantStratifier vs : stratificationObjects) { if (vs.getClass().equals(IntervalStratification.class)) fail = false; } if (fail) throw new UserException.BadArgumentValue( "ST", "stratIntervals argument provided but -ST IntervalStratification not provided"); } // Initialize the evaluation contexts createStratificationStates(stratificationObjects, evaluationClasses); // Load ancestral alignments if (ancestralAlignmentsFile != null) { try { ancestralAlignments = new IndexedFastaSequenceFile(ancestralAlignmentsFile); } catch (FileNotFoundException e) { throw new ReviewedStingException( String.format( "The ancestral alignments file, '%s', could not be found", ancestralAlignmentsFile.getAbsolutePath())); } } // initialize CNVs if (knownCNVsFile != null) { knownCNVsByContig = createIntervalTreeByContig(knownCNVsFile); } }
public static void main(String[] args) { final Set<String> show = new HashSet<String>(Arrays.asList("point$ triangles acres neighbors sectors".split(" "))); // final Set<String> show = new HashSet<String>(Arrays.asList("global-sectors // sectors".split(" "))); final Map<Long, Acre> acresByID = new HashMap<Long, Acre>(); Consumer<Acre> acreConsumer = new Consumer<Acre>() { public void consume(Acre acre) { assert !acresByID.containsKey(acre.id); acresByID.put(acre.id, acre); } }; Point[] points = { new Point(1.0, -1000.0, Math.sqrt(3) * -0.5), new Point(-1.0, -1000.0, Math.sqrt(3) * -0.5), new Point(0.0, -1000.0, Math.sqrt(3) * 0.5) }; GeoPoint[] geoPoints = { GeoPoint.fromPoint(points[0]), GeoPoint.fromPoint(points[1]), GeoPoint.fromPoint(points[2]) }; for (int i = 0; i < 3; i++) { points[i] = geoPoints[i].toPoint(1000.0); } final OutputGraph out = new OutputGraph("Acres", new Dimension(2048, 2048), 0.0, 0.0, 1000.0); out.onClose( new Runnable() { public void run() { System.exit(0); } }); Sector.MAP_ACRES_BY_CENTER_POINT.set(true); final GlobalSector gs = new GlobalSector(0, Globe.INSTANCE, geoPoints); final AtomicReference<Sector[]> sectors = new AtomicReference<Sector[]>(); gs.setInit( gs.new Initializer(5, points[0], points[1], points[2], false, null) { @Override protected Sector[] getChildren(int length) { sectors.set(super.getChildren(length)); return sectors.get(); } @Override protected Sector buildChild( int index, GeoPoint a, GeoPoint b, GeoPoint c, Point i, Point j, Point k, boolean inverted) { TestSector sector = new TestSector(index, inverted, gs, new GeoPoint[] {a, b, c}); sector.setInit(sector.new TestInitializer(new Point[] {i, j, k}, show, out)); sectors.get()[index] = sector; return sector; } }); gs.getInit().run(); for (Sector s : sectors.get()) { s.getInit().run(); s.edgesBuilt = new AtomicInteger(); } if (show.contains("global-sectors")) { Point a = gs.points[0].toPoint(1000.0); Point b = gs.points[1].toPoint(1000.0); Point c = gs.points[2].toPoint(1000.0); double x = (a.x + b.x + c.x) / 3.0; double y = (a.z + b.z + c.z) / 3.0; if (show.contains("labels")) { out.addLabel(Color.orange, gs.getIDString(), x, y + 0.075); } x *= -0.02; y *= -0.02; out.addLine( Color.orange, a.x * 1.02 + x, a.z * 1.02 + y, b.x * 1.02 + x, b.z * 1.02 + y, c.x * 1.02 + x, c.z * 1.02 + y, a.x * 1.02 + x, a.z * 1.02 + y); } if (show.contains("sectors")) { for (Sector s : sectors.get()) { Point a = s.points[0].toPoint(1000.0); Point b = s.points[1].toPoint(1000.0); Point c = s.points[2].toPoint(1000.0); out.addLine(Color.magenta, a.x, a.z, b.x, b.z, c.x, c.z, a.x, a.z); if (show.contains("labels")) { double x = (a.x + b.x + c.x) / 3.0; double y = (a.z + b.z + c.z) / 3.0; out.addLabel(Color.magenta, s.getIDString(), x, y); } } } GeoFactory.combinePoints(Arrays.<GeoPointBasedElement>asList(sectors.get()), Globe.INSTANCE); Map<GeoPoint, Sector[]> sectorMap = new HashMap<GeoPoint, Sector[]>(); for (Sector s : sectors.get()) { GeoFactory.addSectorByPoint(sectorMap, s, s.points[0]); GeoFactory.addSectorByPoint(sectorMap, s, s.points[1]); GeoFactory.addSectorByPoint(sectorMap, s, s.points[2]); } Collection<AcreBuilder> builders = new ArrayList<AcreBuilder>(); for (final Sector s : sectors.get()) { builders.add( new AcreBuilder( GeoSpec.SECTOR_DIVISIONS.iGet(), false, s, sectorMap, null, acreConsumer) { @Override protected int[] findNearCornerAcres() { int[] nearCornerAcres = super.findNearCornerAcres(); if (show.contains("near-edge-acres")) { System.out.println("Corner A: " + nearCornerAcres[0]); System.out.println("Corner B: " + nearCornerAcres[1]); System.out.println("Corner C: " + nearCornerAcres[2]); } return nearCornerAcres; } @Override protected Map<Edge, int[]> findNearEdgeAcres() { Map<Edge, int[]> nearEdgeAcres = super.findNearEdgeAcres(); if (show.contains("near-edge-acres")) { System.out.println("Edge AB: " + Arrays.toString(nearEdgeAcres.get(Edge.AB))); System.out.println("Edge BC: " + Arrays.toString(nearEdgeAcres.get(Edge.BC))); System.out.println("Edge CA: " + Arrays.toString(nearEdgeAcres.get(Edge.CA))); } return nearEdgeAcres; } }); } for (AcreBuilder builder : builders) { builder.run(0); } for (AcreBuilder builder : builders) { builder.secondRun().run(); } for (Acre a : acresByID.values()) { int c = 0; Point p = a.center.toPoint(1000.0); for (Sector s : sectors.get()) { Acre acre = s.acres.get(p); if (acre != null) { assert acre == a; c++; } } assert a.flavor == Acre.Flavor.MULTI_SECTOR ? c >= 5 : (a.flavor == Acre.Flavor.DUAL_SECTOR ? c == 2 : c == 1); } if (show.contains("acres")) { Set<Long> visitedAcreIds = new HashSet<Long>(); for (Sector s : sectors.get()) { Acre[] acres = s.getInnerAcres(); for (Acre acre : acres) { debugAcre(acre, acre.getIDString(), visitedAcreIds, show, out); } acres = s.getSharedAcres(); for (Acre acre : acres) { if (acre != null) { debugAcre(acre, acre.getIDString(), visitedAcreIds, show, out); } } } assert visitedAcreIds.containsAll(acresByID.keySet()); } if (show.contains("neighbors")) { double v = 0.7, w = 1.0 - v; for (Acre a : acresByID.values()) { for (long neighborID : a.neighbors) { Acre n = acresByID.get(neighborID); if (n != null) { Point s = a.center.toPoint(1000.0); Point t = n.center.toPoint(1000.0); out.addArrow( Color.white, s.x * v + t.x * w, s.z * v + t.z * w, s.x * w + t.x * v, s.z * w + t.z * v); } } } } }
/** * Recursive function to check if the JSObjects in both the heaps match * * @param other * @param this_global * @param other_global * @return */ private boolean checkContentEquality(Heap other, Location this_obj, Location other_obj) { JSObject curr_obj = this.get(this_obj); JSObject test_obj = other.get(other_obj); if (curr_obj == null && test_obj == null) return true; if ((curr_obj != null && test_obj == null) || (curr_obj == null && test_obj != null)) return false; SecurityType curr_taint = (SecurityType) curr_obj.get("at_Taint"); SecurityType test_taint = (SecurityType) test_obj.get("at_Taint"); // Proceed further only if the JSObject taints match if (curr_taint.equals(test_taint)) { // If the location is a dummy object, it suffices to just check the taints if (other.isDummy(other_obj) && this.isDummy(this_obj)) { return true; } Set<String> this_keyset = curr_obj.getKeySet(); Set<String> other_keyset = test_obj.getKeySet(); // Proceed further only if the keys in both the JSObjects match if (this_keyset.containsAll(other_keyset) && other_keyset.containsAll(this_keyset)) { // For each key compare the object values boolean retval = true; for (String key : this_keyset) { // we don't want to go into libraryProperties as they will lead to the parents // creating a circular check and infinite recursion if (ProgramAnalyzerC.libraryProperties.contains(key)) continue; if (key.equals("at_Taint")) // we already compared taints continue; ObjectValue this_val = curr_obj.get(key); ObjectValue other_val = test_obj.get(key); if (this_val == null && other_val == null) { continue; } if ((this_val == null && other_val != null) || (this_val != null && other_val == null)) { retval = false; return retval; } if (this_val.getClass() != other_val.getClass()) { retval = false; // If the class types don't match return retval; } if (this_val instanceof Location) retval = retval && checkContentEquality(other, (Location) this_val, (Location) other_val); if (this_val instanceof FunctionValue) retval = retval && ((FunctionValue) this_val).equals((FunctionValue) other_val); if (this_val instanceof ObjectValue) retval = retval && this_val.equals(other_val); if (retval == false) return false; } return retval; } } return false; }
@Ignore @Test public void testCompareSearchMethods() throws SolverException, InconsistentTheoryException, NoConflictException, OWLOntologyCreationException { logger.info("NormalSimulatedSession compared to ConflictTreeSimulatedSession"); TreeSearch<FormulaSet<OWLLogicalAxiom>, OWLLogicalAxiom> search = testSetup("ontologies/ecai2010.owl"); Set<? extends FormulaSet<OWLLogicalAxiom>> diagnoses = getDiagnoses(search); // setup second search TreeSearch<FormulaSet<OWLLogicalAxiom>, OWLLogicalAxiom> ctSearch = new HsTreeSearch<FormulaSet<OWLLogicalAxiom>, OWLLogicalAxiom>(); ctSearch.setSearchStrategy(new UniformCostSearchStrategy<OWLLogicalAxiom>()); ctSearch.setSearcher(new QuickXplain<OWLLogicalAxiom>()); ctSearch.setSearchable(ctTheory); ctSearch.setCostsEstimator(new OWLAxiomKeywordCostsEstimator(ctTheory)); Map<QSSType, DurationStat> nTimes = new HashMap<QSSType, DurationStat>(); Map<QSSType, List<Double>> nQueries = new HashMap<QSSType, List<Double>>(); Map<QSSType, DurationStat> ctTimes = new HashMap<QSSType, DurationStat>(); Map<QSSType, List<Double>> ctQueries = new HashMap<QSSType, List<Double>>(); for (QSSType type : QSSType.values()) { // run for each scoring function logger.info("QSSType: " + type); // run normal simulated session logger.info("NormalSimulatedSession"); nTimes.put(type, new DurationStat()); nQueries.put(type, new LinkedList<Double>()); for (FormulaSet<OWLLogicalAxiom> targetDiagnosis : diagnoses) { // run for each possible target diagnosis long completeTime = System.currentTimeMillis(); computeHS(search, ctTheory, targetDiagnosis, nQueries.get(type), type); ctTheory.getKnowledgeBase().removeFormulas(targetDiagnosis); completeTime = System.currentTimeMillis() - completeTime; nTimes.get(type).add(completeTime); foundDiagnoses.addAll(targetDiagnosis); ctTheory.getReasoner().addFormulasToCache(ctTheory.getKnowledgeBase().getFaultyFormulas()); assertTrue(ctTheory.verifyConsistency()); ctTheory.reset(); resetTheoryTests(ctTheory); search.reset(); } logger.info("found Diagnoses: " + foundDiagnoses.size()); logger.info("found Diagnosis: " + CalculateDiagnoses.renderAxioms(foundDiagnoses)); logger.info( "found all target diagnoses: " + (foundDiagnoses.size() > 0 && foundDiagnoses.containsAll(diagnoses))); // end (run normal simulated session) foundDiagnoses.clear(); // run conflict tree simulated session logger.info("ConflictTreeSimulatedSession"); ctTimes.put(type, new DurationStat()); ctQueries.put(type, new LinkedList<Double>()); for (FormulaSet<OWLLogicalAxiom> targetDiagnosis : diagnoses) { // run for each possible target diagnosis logger.info("targetD: " + CalculateDiagnoses.renderAxioms(targetDiagnosis)); ConflictTreeSession conflictTreeSearch = new ConflictTreeSession(this, ctTheory, ctSearch); long completeTime = conflictTreeSearch.search(targetDiagnosis, ctQueries, type); ctTimes.get(type).add(completeTime); foundDiagnoses.addAll(conflictTreeSearch.getDiagnosis()); ctTheory.getReasoner().addFormulasToCache(ctTheory.getKnowledgeBase().getFaultyFormulas()); assertTrue(ctTheory.verifyConsistency()); ctTheory.reset(); resetTheoryTests(ctTheory); ctSearch.reset(); } logger.info("found Diagnoses: " + foundDiagnoses.size()); logger.info("found Diagnosis: " + CalculateDiagnoses.renderAxioms(foundDiagnoses)); logger.info( "found all target diagnoses: " + (foundDiagnoses.size() > 0 && foundDiagnoses.containsAll(diagnoses))); // end (run conflict tree simulated session) // print time statistics logStatistics(nQueries, nTimes, type, "normal"); logStatistics(ctQueries, ctTimes, type, "treeSearch"); foundDiagnoses.clear(); } }
public void run() throws Exception { // Selection of files to be compiled File absJar = createJar(new File("abs.jar").getAbsoluteFile(), "j.A"); File relJar = createJar(new File("rel.jar"), "j.R"); File absDir = createDir(new File("abs.dir").getAbsoluteFile(), "d.A"); File relDir = createDir(new File("rel.dir"), "d.R"); File absTestFile = writeFile(new File("AbsTest.java").getAbsoluteFile(), "class AbsTest { class Inner { } }"); File relTestFile = writeFile(new File("RelTest.java"), "class RelTest { class Inner { } }"); File relTest2File = writeFile(new File("p/RelTest2.java"), "package p; class RelTest2 { class Inner { } }"); // This next class references other classes that will be found on the source path // and which will therefore need to be compiled as well. File mainFile = writeFile(new File("Main.java"), "class Main { j.A ja; j.R jr; d.A da; d.R dr; }" + ""); String sourcePath = createPath(absJar, relJar, absDir, relDir); File outDir = new File("classes"); outDir.mkdirs(); String[] args = { "-sourcepath", sourcePath, "-d", outDir.getPath(), absTestFile.getPath(), relTestFile.getPath(), relTest2File.getPath(), mainFile.getPath(), }; System.err.println("compile: " + Arrays.asList(args)); StringWriter sw = new StringWriter(); PrintWriter pw = new PrintWriter(sw); int rc = com.sun.tools.javac.Main.compile(args, pw); pw.close(); if (rc != 0) { System.err.println(sw.toString()); throw new Exception("unexpected exit from javac: " + rc); } Set<File> expect = getFiles( outDir, "d/A.class", "d/A$Inner.class", "d/R.class", "d/R$Inner.class", "j/A.class", "j/A$Inner.class", "j/R.class", "j/R$Inner.class", "AbsTest.class", "AbsTest$Inner.class", "RelTest.class", "RelTest$Inner.class", "p/RelTest2.class", "p/RelTest2$Inner.class", "Main.class"); Set<File> found = findFiles(outDir); if (!found.equals(expect)) { if (found.containsAll(expect)) throw new Exception("unexpected files found: " + diff(found, expect)); else if (expect.containsAll(found)) throw new Exception("expected files not found: " + diff(expect, found)); } for (File f : found) verifySourceFileAttribute(f); if (errors > 0) throw new Exception(errors + " errors occurred"); }