public void testOverrideMethods() throws Exception { HashSet<String> methodsThatShouldNotBeOverridden = new HashSet<String>(); methodsThatShouldNotBeOverridden.add("reopen"); methodsThatShouldNotBeOverridden.add("doOpenIfChanged"); methodsThatShouldNotBeOverridden.add("clone"); boolean fail = false; for (Method m : FilterIndexReader.class.getMethods()) { int mods = m.getModifiers(); if (Modifier.isStatic(mods) || Modifier.isFinal(mods) || m.isSynthetic()) { continue; } Class<?> declaringClass = m.getDeclaringClass(); String name = m.getName(); if (declaringClass != FilterIndexReader.class && declaringClass != Object.class && !methodsThatShouldNotBeOverridden.contains(name)) { System.err.println("method is not overridden by FilterIndexReader: " + name); fail = true; } else if (declaringClass == FilterIndexReader.class && methodsThatShouldNotBeOverridden.contains(name)) { System.err.println("method should not be overridden by FilterIndexReader: " + name); fail = true; } } assertFalse( "FilterIndexReader overrides (or not) some problematic methods; see log above", fail); // some more inner classes: checkOverrideMethods(FilterIndexReader.FilterTermEnum.class); checkOverrideMethods(FilterIndexReader.FilterTermDocs.class); // TODO: FilterTermPositions should extend correctly, this is borken, // but for backwards compatibility we let it be: // checkOverrideMethods(FilterIndexReader.FilterTermPositions.class); }
static { // check if we have JAI and or ImageIO // if these classes are here, then the runtine environment has // access to JAI and the JAI ImageI/O toolbox. boolean available = true; try { Class.forName("javax.media.jai.JAI"); } catch (Throwable e) { if (LOGGER.isLoggable(Level.FINE)) LOGGER.log(Level.FINE, e.getLocalizedMessage(), e); available = false; } JAIAvailable = available; available = true; try { Class<?> clazz = Class.forName("com.sun.media.imageioimpl.plugins.tiff.TIFFImageReaderSpi"); readerSpi = (ImageReaderSpi) clazz.newInstance(); Class<?> clazz1 = Class.forName("com.sun.media.imageioimpl.plugins.tiff.TIFFImageWriterSpi"); writerSpi = (ImageWriterSpi) clazz1.newInstance(); } catch (Throwable e) { if (LOGGER.isLoggable(Level.FINE)) LOGGER.log(Level.FINE, e.getLocalizedMessage(), e); readerSpi = null; writerSpi = null; available = false; } TiffAvailable = available; final HashSet<String> tempSet = new HashSet<String>(2); tempSet.add(".tfw"); tempSet.add(".tiffw"); tempSet.add(".wld"); TIFF_WORLD_FILE_EXT = Collections.unmodifiableSet(tempSet); }
@Override public Iterable<SDependency> getDeclaredDependencies() { assertCanRead(); ModuleDescriptor descriptor = getModuleDescriptor(); if (descriptor == null) { return Collections.emptyList(); } HashSet<SDependency> result = new HashSet<SDependency>(); final SRepository repo = getRepository(); if (repo == null) { throw new IllegalStateException( "It is not possible to resolve all declared dependencies with a null repository : module " + this); } // add declared dependencies for (Dependency d : descriptor.getDependencies()) { result.add(new SDependencyImpl(d.getModuleRef(), repo, d.getScope(), d.isReexport())); } // add dependencies provided by devkits as nonreexport dependencies for (SModuleReference usedDevkit : descriptor.getUsedDevkits()) { final SModule devkit = usedDevkit.resolve(repo); if (DevKit.class.isInstance(devkit)) { for (Solution solution : ((DevKit) devkit).getAllExportedSolutions()) { result.add( new SDependencyImpl( solution.getModuleReference(), repo, SDependencyScope.DEFAULT, false)); } } } return result; }
private static double jaccardSimilarity(String similar1, String similar2) { HashSet<String> h1 = new HashSet<String>(); HashSet<String> h2 = new HashSet<String>(); for (String s : similar1.split("\\s+")) { h1.add(s); } for (String s : similar2.split("\\s+")) { h2.add(s); } int sizeh1 = h1.size(); // Retains all elements in h3 that are contained in h2 ie intersection h1.retainAll(h2); // h1 now contains the intersection of h1 and h2 h2.removeAll(h1); // h2 now contains unique elements // Union int union = sizeh1 + h2.size(); int intersection = h1.size(); return (double) intersection / union; }
/** * Get all the acceptable values for a option for a board The outcome of this method can be used * to fill a combobox * * @param menu the name of a menu not the ide * @param boardName the name of a board not the ide * @return */ public String[] getMenuItemNames(String menuLabel, String boardName) { String menuID = null; String boardID = getBoardIDFromName(boardName); HashSet<String> ret = new HashSet<String>(); Map<String, String> menuInfo = mArduinoSupportedBoards.get("menu"); for (Entry<String, String> e2 : menuInfo.entrySet()) { if (e2.getValue().equals(menuLabel)) menuID = e2.getKey(); } String SearchKey = menuID + "." + boardID + "."; for (Entry<String, String> e2 : menuInfo.entrySet()) { int numsubkeys = e2.getKey().split("\\.").length; boolean startOk = e2.getKey().startsWith(SearchKey); if ((numsubkeys == 3) && (startOk)) ret.add(e2.getValue()); } // from Arduino IDE 1.5.4 menu is subset of the board. The previous code will not return a // result Map<String, String> boardInfo = mArduinoSupportedBoards.get(boardID); if (boardInfo != null) { SearchKey = "menu." + menuID + "."; for (Entry<String, String> e2 : boardInfo.entrySet()) { int numsubkeys = e2.getKey().split("\\.").length; boolean startOk = e2.getKey().startsWith(SearchKey); if ((numsubkeys == 3) && (startOk)) ret.add(e2.getValue()); } } return ret.toArray(new String[ret.size()]); }
/** * Creates attribute meta data that represents the attribute that will be generated for the * provided arguments. * * @return the {@link AttributeMetaData} for the provided arguments */ public static AttributeMetaData generateAttributeMetaData( ExampleSet exampleSet, String name, ExpressionType expressionType) { AttributeMetaData newAttribute = null; Attribute existingAtt = exampleSet.getAttributes().get(name); int ontology = expressionType.getAttributeType(); if (ontology == Ontology.BINOMINAL) { newAttribute = new AttributeMetaData(name, Ontology.BINOMINAL); HashSet<String> values = new HashSet<>(); values.add("false"); values.add("true"); newAttribute.setValueSet(values, SetRelation.EQUAL); } else { newAttribute = new AttributeMetaData(name, ontology); } // restore role if attribute existed already if (existingAtt != null) { newAttribute.setRole(exampleSet.getAttributes().getRole(existingAtt).getSpecialName()); } return newAttribute; }
public HMM(int n, List<Pair> pairs) { long time = System.currentTimeMillis(); this.n = n; this.pairs = pairs; System.out.println("Counting states"); // Find States and Labels for (Pair pair : pairs) { words.add(pair.word); tags.add(pair.tag); } System.out.println("Finding transition probabilities"); System.out.println(pairs.size()); // Find tag transition probabilities System.out.println("Counting ngrams"); countGrams(n, nGramCount, false); if (n > 1) { System.out.println("Counting n-1 grams"); countGrams(n - 1, nMinusOneGramCount, false); } // Find emission probabilities System.out.println("Finding emission probabilities"); for (Pair pair : pairs) { Util.tagWordCount(tagsMap, pair.word, pair.tag); Util.wordTagCount(wordsMap, pair.word, pair.tag); Util.incrementMap(wordCount, pair.word); Util.incrementMap(tagCount, pair.tag); } Util.mostCommonTagMap(wordsMap, mostCommonTagMap); mostCommonTag = Util.mostCommonTag(tagCount); System.out.println((System.currentTimeMillis() - time) / 1000.0); }
public static Set<String> getAlignmentSet( String test_base, String test_foreign, EMTrainerIBM2Maps em, boolean order) { // generate alignments in test sets List<Instance> testData = PreProcessor.read(test_base, test_foreign); HashSet<String> result = new HashSet<String>(); int k = 1; for (Instance i : testData) { int findex = 1; for (String f : i.foreign) { int l = i.base.size(); int m = i.foreign.size(); double tmax = em.qt(0, findex, l, m, f, EMTrainerIBM1._NULL); int imax = 0; int index = 1; for (String e : i.base) { double temp = em.qt(index, findex, l, m, f, e); if (temp > tmax) { tmax = temp; imax = index; } index++; } if (order) { result.add(k + " " + imax + " " + findex); } else { result.add(k + " " + findex + " " + imax); } findex++; } k++; } return result; }
DefaultAsyncFile( final VertxInternal vertx, final String path, String perms, final boolean read, final boolean write, final boolean createNew, final boolean flush, final Context context) throws Exception { if (!read && !write) { throw new FileSystemException("Cannot open file for neither reading nor writing"); } this.vertx = vertx; Path file = Paths.get(path); HashSet<OpenOption> options = new HashSet<>(); if (read) options.add(StandardOpenOption.READ); if (write) options.add(StandardOpenOption.WRITE); if (createNew) options.add(StandardOpenOption.CREATE); if (flush) options.add(StandardOpenOption.DSYNC); if (perms != null) { FileAttribute<?> attrs = PosixFilePermissions.asFileAttribute(PosixFilePermissions.fromString(perms)); ch = AsynchronousFileChannel.open(file, options, vertx.getBackgroundPool(), attrs); } else { ch = AsynchronousFileChannel.open(file, options, vertx.getBackgroundPool()); } this.context = context; }
private static Set<Dependency> collectFromDependencyMonitor( ThreadMXBean bean, Serializable locality, Map<Long, ThreadInfo> threadInfos) { HashSet<Dependency> results = new HashSet<Dependency>(); // Convert the held resources into serializable dependencies Set<Dependency<Serializable, Thread>> heldResources = DependencyMonitorManager.getHeldResources(); for (Dependency<Serializable, Thread> dep : heldResources) { Thread thread = dep.getDependsOn(); Serializable resource = dep.getDepender(); ThreadInfo info = threadInfos.get(thread.getId()); if (info == null) { info = bean.getThreadInfo(thread.getId()); } if (info != null) { results.add(new Dependency(resource, new LocalThread(locality, info))); } } Set<Dependency<Thread, Serializable>> blockedThreads = DependencyMonitorManager.getBlockedThreads(); // Convert the blocked threads into serializable dependencies for (Dependency<Thread, Serializable> dep : blockedThreads) { Thread thread = dep.getDepender(); ThreadInfo info = threadInfos.get(thread.getId()); if (info == null) { info = bean.getThreadInfo(thread.getId()); } final Serializable resource = dep.getDependsOn(); results.add(new Dependency(new LocalThread(locality, info), resource)); } return results; }
/** Handler for the 'scoreboard teams join' command. */ protected void joinTeam(ICommandSender par1ICommandSender, String[] par2ArrayOfStr, int par3) { Scoreboard scoreboard = this.getScoreboardFromWorldServer(); ScorePlayerTeam scoreplayerteam = scoreboard.func_96508_e(par2ArrayOfStr[par3++]); HashSet hashset = new HashSet(); String s; if (par1ICommandSender instanceof EntityPlayer && par3 == par2ArrayOfStr.length) { s = getCommandSenderAsPlayer(par1ICommandSender).getEntityName(); scoreboard.addPlayerToTeam(s, scoreplayerteam); hashset.add(s); } else { while (par3 < par2ArrayOfStr.length) { s = func_96332_d(par1ICommandSender, par2ArrayOfStr[par3++]); scoreboard.addPlayerToTeam(s, scoreplayerteam); hashset.add(s); } } if (!hashset.isEmpty()) { notifyAdmins( par1ICommandSender, "commands.scoreboard.teams.join.success", new Object[] { Integer.valueOf(hashset.size()), scoreplayerteam.func_96661_b(), joinNiceString(hashset.toArray(new String[0])) }); } }
@SuppressWarnings({"rawtypes"}) @Test public void testGetAttachemts() throws ParseException { SimpleDateFormat formatter = new SimpleDateFormat("MM/dd/yyyy"); // Date validAtDate = formatter.parse("8/26/2011"); Date validAtDate = new Date(); Date asOfDate = new Date(); System.out.println("TemporalReferenceGetAttachmentsTest.testGetAttachemts()"); System.out.println("--------------------------------------------------------"); portfolioBiz.setReference(true); AttachmentSet<String> as = new AttachmentSetFactory().createAttachmentSet(); HashSet<String> set = new HashSet<String>(); set.add("a"); set.add("ab"); as.setAttachments(set); long startTime = System.currentTimeMillis(); as.setGridPath("portfolio"); List<KeyMap> list = portfolioBiz.getAttachments(as, validAtDate.getTime()); long delta = System.currentTimeMillis() - startTime; for (KeyMap keyMap : list) { JsonLite jl = (JsonLite) keyMap; System.out.println(jl.toString(2, true, false)); } System.out.println(); System.out.println("Elapsed time (msec): " + delta); System.out.println(); }
/** * Add to the currentList Strings that are one character deletion away from the input string. * * @param word The misspelled word * @param numSuggestions is the maximum number of suggestions to return * @return the list of spelling suggestions */ @Override public List<String> suggestions(String word, int numSuggestions) { // initial variables List<String> queue = new LinkedList<String>(); // String to explore HashSet<String> visited = new HashSet< String>(); // to avoid exploring the same // string multiple times List<String> retList = new LinkedList<String>(); // words to return // insert first node queue.add(word); visited.add(word); // TODO: Implement the remainder of this method, see assignment for algorithm while ((!queue.isEmpty()) & (retList.size() != numSuggestions)) { String current = ((LinkedList<String>) queue).removeFirst(); List<String> muta = distanceOne(current, true); for (String m : muta) { if (!visited.contains(m)) { visited.add(m); queue.add(m); if (dict.isWord(m)) retList.add(m); } } } return retList; }
/** * Get all vertical and horizontal moves * * @param starting - starting coordinate * @return all valid vertical and horizontal moves */ public HashSet<Coordinate> VHMoves(Coordinate start) { HashSet<Coordinate> moves = new HashSet<Coordinate>(); int x = start.getXCoordinate(); int y = start.getYCoordinate(); // get north while (y >= 0) { moves.add(new Coordinate(start.getXCoordinate(), y)); y--; } x = start.getXCoordinate(); y = start.getYCoordinate(); // get south while (y < this.y) { moves.add(new Coordinate(start.getXCoordinate(), y)); y++; } x = start.getXCoordinate(); y = start.getYCoordinate(); // get east while (x < this.x) { moves.add(new Coordinate(x, start.getYCoordinate())); x++; } x = start.getXCoordinate(); y = start.getYCoordinate(); // get west while (x >= 0) { moves.add(new Coordinate(x, start.getYCoordinate())); x--; } moves.remove(start); // remove current location return moves; }
public static boolean processAddition( final MMenu menuModel, final ArrayList<MMenuElement> menuContributionsToRemove, MMenuContribution menuContribution, final HashSet<String> existingMenuIds, HashSet<String> existingSeparatorNames) { int idx = getIndex(menuModel, menuContribution.getPositionInParent()); if (idx == -1) { return false; } for (MMenuElement item : menuContribution.getChildren()) { if (item instanceof MMenu && existingMenuIds.contains(item.getElementId())) { // skip this, it's already there continue; } else if (item instanceof MMenuSeparator && existingSeparatorNames.contains(item.getElementId())) { // skip this, it's already there continue; } MMenuElement copy = (MMenuElement) EcoreUtil.copy((EObject) item); if (DEBUG) { trace("addMenuContribution " + copy, menuModel.getWidget(), menuModel); // $NON-NLS-1$ } menuContributionsToRemove.add(copy); menuModel.getChildren().add(idx++, copy); if (copy instanceof MMenu && copy.getElementId() != null) { existingMenuIds.add(copy.getElementId()); } else if (copy instanceof MMenuSeparator && copy.getElementId() != null) { existingSeparatorNames.add(copy.getElementId()); } } return true; }
public void computeIndeterminates() { HashMap<String, HashSet<PeptideHit>> scan2pep = new HashMap<String, HashSet<PeptideHit>>(); HashSet<PeptideHit> pepList; for (PeptideHit p : peptideHits) { String scan = p.getScanTuple(); if (scan2pep.containsKey(scan)) { pepList = scan2pep.get(scan); } else { pepList = new HashSet<PeptideHit>(); scan2pep.put(scan, pepList); } pepList.add(p); } for (HashSet<PeptideHit> pepSet : scan2pep.values()) { boolean indeterm = false; if (pepSet.size() > 1) { HashSet<String> pepSeqs = new HashSet<String>(); for (PeptideHit p : pepSet) { pepSeqs.add(p.getSequence()); } if (pepSeqs.size() > 1) indeterm = true; } for (PeptideHit p : pepSet) { p.setIndeterminate(indeterm); } } }
public ks.c bL(Parcel parcel) { int j = com.google.android.gms.common.internal.safeparcel.a.B(parcel); HashSet hashset = new HashSet(); int i = 0; String s = null; do { if (parcel.dataPosition() < j) { int k = com.google.android.gms.common.internal.safeparcel.a.A(parcel); switch (com.google.android.gms.common.internal.safeparcel.a.ar(k)) { default: com.google.android.gms.common.internal.safeparcel.a.b(parcel, k); break; case 1: // '\001' i = com.google.android.gms.common.internal.safeparcel.a.g(parcel, k); hashset.add(Integer.valueOf(1)); break; case 2: // '\002' s = com.google.android.gms.common.internal.safeparcel.a.o(parcel, k); hashset.add(Integer.valueOf(2)); break; } } else { if (parcel.dataPosition() != j) { throw new com.google.android.gms.common.internal.safeparcel.a.a( (new StringBuilder()).append("Overread allowed size end=").append(j).toString(), parcel); } return new ks.c(hashset, i, s); } } while (true); }
private DefaultMutableTreeNode ProListToEquivTree(ArrayList<Protein> proList, String name) { DefaultMutableTreeNode root, child; int size = 0; PeptideProteinNameSet pps = ProteinListToPPNSet(proList); root = new DefaultMutableTreeNode(pps); HashSet<String> usedProteins = new HashSet<String>(); for (Protein p : proList) { if (!usedProteins.contains(p.getName())) { if (p.getEquivalent().size() > 0) { ArrayList<Protein> equivList = new ArrayList<Protein>(); equivList.add(p); equivList.addAll(p.getEquivalent()); PeptideProteinNameSet ppsEquiv = ProteinListToPPNSet(equivList); ppsEquiv.setName(p.getName() + " Group (" + equivList.size() + ")"); child = new DefaultMutableTreeNode(ppsEquiv); size++; for (Protein ps : equivList) { child.add(new DefaultMutableTreeNode(ps)); usedProteins.add(ps.getName()); } root.add(child); } else { root.add(new DefaultMutableTreeNode(p)); usedProteins.add(p.getName()); size++; } } } pps.setName(name + " (" + size + ")"); return root; }
private static Set makeAtlasNames() { HashSet var0 = new HashSet(); var0.add("/terrain.png"); var0.add("/gui/items.png"); var0.add("/ctm.png"); return var0; }
private static HashSet<String> buildAppSignatureHashes() { HashSet<String> set = new HashSet<String>(); set.add(FBR_HASH); set.add(FBI_HASH); set.add(FBL_HASH); return set; }
// @Override public List<String> suggestions(String word, int numSuggestions) { // initial variables List<String> queue = new LinkedList<String>(); // String to explore HashSet<String> visited = new HashSet<String>(); // to avoid exploring the same // string multiple times List<String> retList = new LinkedList<String>(); // words to return // insert first node queue.add(word); visited.add(word); while (queue != null && numSuggestions > 0) { String curr = queue.remove(0); for (String text : distanceOne(curr, true)) { if (!visited.contains(text)) { visited.add(text); queue.add(text); // build the suggested list retList.add(text); numSuggestions--; if (numSuggestions == 0) return retList; } } } return retList; }
public static void validateItemLevelAquired(Item item) { // This method should be called: // 1) When an item gets obtained (Item.collect) // 2) When an item gets upgraded (ScrollOfUpgrade, ScrollOfWeaponUpgrade, ShortSword, // WandOfMagicMissile) // 3) When an item gets identified if (!item.levelKnown) { return; } Badge badge = null; if (!local.contains(Badge.ITEM_LEVEL_1) && item.level() >= 3) { badge = Badge.ITEM_LEVEL_1; local.add(badge); } if (!local.contains(Badge.ITEM_LEVEL_2) && item.level() >= 6) { badge = Badge.ITEM_LEVEL_2; local.add(badge); } if (!local.contains(Badge.ITEM_LEVEL_3) && item.level() >= 9) { badge = Badge.ITEM_LEVEL_3; local.add(badge); } if (!local.contains(Badge.ITEM_LEVEL_4) && item.level() >= 12) { badge = Badge.ITEM_LEVEL_4; local.add(badge); } displayBadge(badge); }
/** * The safest bike lane should have a safety weight no lower than the time weight of a flat * street. This method divides the safety lengths by the length ratio of the safest street, * ensuring this property. * * @param graph */ private void applyBikeSafetyFactor(Graph graph) { _log.info( GraphBuilderAnnotation.register( graph, Variety.GRAPHWIDE, "Multiplying all bike safety values by " + (1 / bestBikeSafety))); HashSet<Edge> seenEdges = new HashSet<Edge>(); for (Vertex vertex : graph.getVertices()) { for (Edge e : vertex.getOutgoing()) { if (!(e instanceof PlainStreetEdge)) { continue; } PlainStreetEdge pse = (PlainStreetEdge) e; if (!seenEdges.contains(e)) { seenEdges.add(e); pse.setBicycleSafetyEffectiveLength( pse.getBicycleSafetyEffectiveLength() / bestBikeSafety); } } for (Edge e : vertex.getIncoming()) { if (!(e instanceof PlainStreetEdge)) { continue; } PlainStreetEdge pse = (PlainStreetEdge) e; if (!seenEdges.contains(e)) { seenEdges.add(e); pse.setBicycleSafetyEffectiveLength( pse.getBicycleSafetyEffectiveLength() / bestBikeSafety); } } } }
public static void validateAllBagsBought(Item bag) { Badge badge = null; if (bag instanceof SeedPouch && !PixelDungeon.freeSeedPouch()) { badge = Badge.BAG_BOUGHT_SEED_POUCH; } else if (bag instanceof ScrollHolder && !PixelDungeon.freeScrollHolder()) { badge = Badge.BAG_BOUGHT_SCROLL_HOLDER; } else if (bag instanceof WandHolster && !PixelDungeon.freeWandHolster()) { badge = Badge.BAG_BOUGHT_WAND_HOLSTER; } else if (bag instanceof PotionBag && !PixelDungeon.freePotionBag()) { badge = Badge.BAG_BOUGHT_POTION_BAG; } if (badge != null) { local.add(badge); if (!local.contains(Badge.ALL_BAGS_BOUGHT) && local.contains(Badge.BAG_BOUGHT_SCROLL_HOLDER) && local.contains(Badge.BAG_BOUGHT_SEED_POUCH) && local.contains(Badge.BAG_BOUGHT_WAND_HOLSTER) && local.contains(Badge.BAG_BOUGHT_POTION_BAG)) { badge = Badge.ALL_BAGS_BOUGHT; local.add(badge); displayBadge(badge); } } }
public int[] intersection(int[] nums1, int[] nums2) { if (null == nums1 || null == nums2) { return null; } HashSet<Integer> hash1 = new HashSet<>(); HashSet<Integer> resultHash = new HashSet<>(); for (int i = 0; i < nums1.length; i++) { hash1.add(nums1[i]); } for (int i = 0; i < nums2.length; i++) { if (hash1.contains(nums2[i])) { resultHash.add(nums2[i]); } } int size = resultHash.size(); int[] result = new int[size]; int index = 0; for (Integer e : resultHash) { result[index++] = e; } return result; }
public static void validateVictory() { Badge badge = Badge.VICTORY; displayBadge(badge); switch (Dungeon.hero.heroClass) { case WARRIOR: badge = Badge.VICTORY_WARRIOR; break; case MAGE: badge = Badge.VICTORY_MAGE; break; case ROGUE: badge = Badge.VICTORY_ROGUE; break; case HUNTRESS: badge = Badge.VICTORY_HUNTRESS; break; } local.add(badge); if (!global.contains(badge)) { global.add(badge); saveNeeded = true; } if (global.contains(Badge.VICTORY_WARRIOR) && global.contains(Badge.VICTORY_MAGE) && global.contains(Badge.VICTORY_ROGUE) && global.contains(Badge.VICTORY_HUNTRESS)) { badge = Badge.VICTORY_ALL_CLASSES; displayBadge(badge); } }
public ArrayList<SchedulePiece> toSchedulePieces() { if (isScheduleUpdated) { // schedulePieces = null; schedulePieces = new ArrayList<SchedulePiece>(); isScheduleUpdated = false; // generate initial or additional schedule pieces Collections.sort(scheduleRecords); int previousTime = scheduleRecords.get(0).time; HashSet<Integer> hset = new HashSet<Integer>(); hset.add(scheduleRecords.get(0).scIndex); for (int index = 1; index < scheduleRecords.size(); index++) { ScheduleRecord r = scheduleRecords.get(index); // found the new piece? if (r.time != previousTime) { // finish and save the old piece // schedulePieces.add(new SchedulePiece(r.time, (Integer[]) hset.toArray())); schedulePieces.add(new SchedulePiece(previousTime, hset.toArray())); previousTime = r.time; } if (r.isStart) hset.add(r.scIndex); else hset.remove(r.scIndex); } // last piece schedulePieces.add(new SchedulePiece(previousTime, hset.toArray())); } return schedulePieces; }
static { profs = new HashSet<String>(); profs.add("profs"); profs.add("instituteurs"); profs.add("professeurs"); profs.add("enseignants"); }
/** * Sample and calculate the probability of hitting each type of marker (marker.class). Creates * 'numReads' reads of size 'readLen' and count how many of them hit each marker type. */ CountByType randomSampling(int readLen, int numReads) { CountByType countReads = new CountByType(); RandMarker randMarker = new RandMarker(snpEffectPredictor.getGenome()); for (int i = 0; i < numReads; i++) { // Random read Marker read = randMarker.rand(readLen); // Where does it hit? Markers regions = snpEffectPredictor.queryDeep(read); HashSet<String> doneRegion = new HashSet<String>(); for (Marker m : regions) { String mtype = markerTypes.getType(m); String msubtype = markerTypes.getSubType(m); if (!doneRegion.contains(mtype)) { countReads.inc(mtype); // Count reads doneRegion.add(mtype); // Do not count twice } if ((msubtype != null) && !doneRegion.contains(msubtype)) { countReads.inc(msubtype); // Count reads doneRegion.add(msubtype); // Do not count twice } } } return countReads; }
/** Tests all expected tables were created. */ @Test public void expectedTablesShouldBeCreated_ReturnsTrue() { final HashSet<String> tableNameHashSet = new HashSet<>(); tableNameHashSet.add(ArticleDatabase.Tables.ARTICLES); tableNameHashSet.add(ArticleDatabase.Tables.LINKED_ARTICLES); tableNameHashSet.add(ArticleDatabase.Tables.REFERENCE_ARTICLES); tableNameHashSet.add(ArticleDatabase.Tables.CATEGORIES); tableNameHashSet.add(ArticleDatabase.Tables.PHOTOS); tableNameHashSet.add(ArticleDatabase.Tables.VIDEOS); cursor = db.rawQuery("SELECT name FROM sqlite_master WHERE type='table'", null); assertTrue("Error: The database has not been created correctly", cursor.moveToFirst()); // Verify that the tables have been created do { tableNameHashSet.remove(cursor.getString(0)); } while (cursor.moveToNext()); assertTrue( "Error: The database was created but it did not create all expected tables", tableNameHashSet.isEmpty()); cursor.close(); }