/** * Test coding scheme restriction search. * * @throws Exception the exception */ public void testCodingSchemeRestrictionSearch() throws Exception { LexBIGServiceMetadata md = ServiceHolder.instance().getLexBIGService().getServiceMetadata(); md = md.restrictToValue("English", "LuceneQuery"); MetadataProperty[] result = md.resolve().getMetadataProperty(); HashSet<String> temp = new HashSet<String>(); for (int i = 0; i < result.length; i++) { temp.add(result[i].getCodingSchemeURI() + ":" + result[i].getCodingSchemeVersion()); } // should contain this assertTrue(temp.contains(THES_URN + ":" + THES_METADATA_VERSION)); // now do the restriction, and retest. md = ServiceHolder.instance().getLexBIGService().getServiceMetadata(); md = md.restrictToValue("English", "LuceneQuery"); md = md.restrictToCodingScheme( Constructors.createAbsoluteCodingSchemeVersionReference( THES_URN, THES_METADATA_VERSION)); result = md.resolve().getMetadataProperty(); temp = new HashSet<String>(); for (int i = 0; i < result.length; i++) { temp.add(result[i].getCodingSchemeURI() + ":" + result[i].getCodingSchemeVersion()); } // should be more than 1 unique code system. assertTrue(temp.size() >= 1); // should contain this assertTrue(temp.contains(THES_URN + ":" + THES_METADATA_VERSION)); }
public int longestConsecutive(int[] num) { // Start typing your Java solution below // DO NOT write main() function HashSet<Integer> set = new HashSet<Integer>(); int max = 0; for (int i = 0; i < num.length; i++) { set.add(num[i]); } for (int i = 0; i < num.length; i++) { if (set.contains(num[i])) { int count1 = 0, count2 = 0; while (set.contains(num[i] + 1 + count1)) { set.remove(num[i] + 1 + count1); count1++; } while (set.contains(num[i] - 1 - count2)) { set.remove(num[i] - 1 - count2); count2++; } max = Math.max(max, count1 + count2 + 1); set.remove(num[i]); } } return max; }
private Authentication fresh(Authentication authentication, ServletRequest req) { HttpServletRequest request = (HttpServletRequest) req; HttpSession session = request.getSession(false); if (session != null) { SessionRegistry sessionRegistry = (SessionRegistry) SpringBeanUtil.getBeanByName("sessionRegistry"); SessionInformation info = sessionRegistry.getSessionInformation(session.getId()); if (info != null) { // Non-expired - update last request date/time Object principal = info.getPrincipal(); if (principal instanceof org.springframework.security.core.userdetails.User) { org.springframework.security.core.userdetails.User userRefresh = (org.springframework.security.core.userdetails.User) principal; ServletContext sc = session.getServletContext(); HashSet<String> unrgas = springSecurityService.getUsersNeedRefreshGrantedAuthorities(); if (unrgas.size() > 0) { HashSet<String> loginedUsernames = new HashSet<String>(); List<Object> loggedUsers = sessionRegistry.getAllPrincipals(); for (Object lUser : loggedUsers) { if (lUser instanceof org.springframework.security.core.userdetails.User) { org.springframework.security.core.userdetails.User u = (org.springframework.security.core.userdetails.User) lUser; loginedUsernames.add(u.getUsername()); } } // 清除已经下线的但需要刷新的username for (Iterator iterator = unrgas.iterator(); iterator.hasNext(); ) { String unrgs = (String) iterator.next(); if (!loginedUsernames.contains(unrgs)) { iterator.remove(); } } if (unrgas.contains(userRefresh.getUsername())) { // 如果需要刷新权限的列表中有当前的用户,刷新登录用户权限 // FIXME:与springSecurityServiceImpl中的功能,相重复,需重构此方法和springSecurityServiceImpl MyJdbcUserDetailsManager mdudm = (MyJdbcUserDetailsManager) SpringBeanUtil.getBeanByType(MyJdbcUserDetailsManager.class); SecurityContextHolder.getContext() .setAuthentication( new UsernamePasswordAuthenticationToken( userRefresh, userRefresh.getPassword(), mdudm.getUserAuthorities(userRefresh.getUsername()))); session.setAttribute( HttpSessionSecurityContextRepository.SPRING_SECURITY_CONTEXT_KEY, SecurityContextHolder.getContext()); unrgas.remove(userRefresh.getUsername()); return SecurityContextHolder.getContext().getAuthentication(); } } } } } return authentication; }
public static void validateAllBagsBought(Item bag) { Badge badge = null; if (bag instanceof SeedPouch && !PixelDungeon.freeSeedPouch()) { badge = Badge.BAG_BOUGHT_SEED_POUCH; } else if (bag instanceof ScrollHolder && !PixelDungeon.freeScrollHolder()) { badge = Badge.BAG_BOUGHT_SCROLL_HOLDER; } else if (bag instanceof WandHolster && !PixelDungeon.freeWandHolster()) { badge = Badge.BAG_BOUGHT_WAND_HOLSTER; } else if (bag instanceof PotionBag && !PixelDungeon.freePotionBag()) { badge = Badge.BAG_BOUGHT_POTION_BAG; } if (badge != null) { local.add(badge); if (!local.contains(Badge.ALL_BAGS_BOUGHT) && local.contains(Badge.BAG_BOUGHT_SCROLL_HOLDER) && local.contains(Badge.BAG_BOUGHT_SEED_POUCH) && local.contains(Badge.BAG_BOUGHT_WAND_HOLSTER) && local.contains(Badge.BAG_BOUGHT_POTION_BAG)) { badge = Badge.ALL_BAGS_BOUGHT; local.add(badge); displayBadge(badge); } } }
public static void validateVictory() { Badge badge = Badge.VICTORY; displayBadge(badge); switch (Dungeon.hero.heroClass) { case WARRIOR: badge = Badge.VICTORY_WARRIOR; break; case MAGE: badge = Badge.VICTORY_MAGE; break; case ROGUE: badge = Badge.VICTORY_ROGUE; break; case HUNTRESS: badge = Badge.VICTORY_HUNTRESS; break; } local.add(badge); if (!global.contains(badge)) { global.add(badge); saveNeeded = true; } if (global.contains(Badge.VICTORY_WARRIOR) && global.contains(Badge.VICTORY_MAGE) && global.contains(Badge.VICTORY_ROGUE) && global.contains(Badge.VICTORY_HUNTRESS)) { badge = Badge.VICTORY_ALL_CLASSES; displayBadge(badge); } }
public void testOverrideMethods() throws Exception { HashSet<String> methodsThatShouldNotBeOverridden = new HashSet<String>(); methodsThatShouldNotBeOverridden.add("reopen"); methodsThatShouldNotBeOverridden.add("doOpenIfChanged"); methodsThatShouldNotBeOverridden.add("clone"); boolean fail = false; for (Method m : FilterIndexReader.class.getMethods()) { int mods = m.getModifiers(); if (Modifier.isStatic(mods) || Modifier.isFinal(mods) || m.isSynthetic()) { continue; } Class<?> declaringClass = m.getDeclaringClass(); String name = m.getName(); if (declaringClass != FilterIndexReader.class && declaringClass != Object.class && !methodsThatShouldNotBeOverridden.contains(name)) { System.err.println("method is not overridden by FilterIndexReader: " + name); fail = true; } else if (declaringClass == FilterIndexReader.class && methodsThatShouldNotBeOverridden.contains(name)) { System.err.println("method should not be overridden by FilterIndexReader: " + name); fail = true; } } assertFalse( "FilterIndexReader overrides (or not) some problematic methods; see log above", fail); // some more inner classes: checkOverrideMethods(FilterIndexReader.FilterTermEnum.class); checkOverrideMethods(FilterIndexReader.FilterTermDocs.class); // TODO: FilterTermPositions should extend correctly, this is borken, // but for backwards compatibility we let it be: // checkOverrideMethods(FilterIndexReader.FilterTermPositions.class); }
/** {@inheritDoc} */ public void rewriteAST(CompilationUnitRewrite cuRewrite, List textEditGroups) throws CoreException { TextEditGroup group = createTextEditGroup(FixMessages.ExpressionsFix_removeUnnecessaryParenthesis_description); textEditGroups.add(group); ASTRewrite rewrite = cuRewrite.getASTRewrite(); while (fExpressions.size() > 0) { ParenthesizedExpression parenthesizedExpression = (ParenthesizedExpression) fExpressions.iterator().next(); fExpressions.remove(parenthesizedExpression); ParenthesizedExpression down = parenthesizedExpression; while (fExpressions.contains(down.getExpression())) { down = (ParenthesizedExpression) down.getExpression(); fExpressions.remove(down); } ASTNode move = rewrite.createMoveTarget(down.getExpression()); ParenthesizedExpression top = parenthesizedExpression; while (fExpressions.contains(top.getParent())) { top = (ParenthesizedExpression) top.getParent(); fExpressions.remove(top); } rewrite.replace(top, move, group); } }
@Override @Nullable public LeakReport handleRequest(String request) { ArrayList<LeakInstance> leaks = new ArrayList<>(); // TODO: import contact Matcher phone = phone_pattern.matcher(request); while (phone.find()) { String phoneNumber = phone.group(0).replaceAll("\\D+", ""); if (phoneList.contains(phoneNumber)) { leaks.add(new LeakInstance("Phone Number", phoneNumber)); } } Matcher email = email_pattern.matcher(request); while (email.find()) { String emailAddress = email.group(0); if (emailList.contains(emailAddress)) { leaks.add(new LeakInstance("Email", emailAddress)); } } if (leaks.isEmpty()) { return null; } LeakReport rpt = new LeakReport(LeakReport.LeakCategory.CONTACT); rpt.addLeaks(leaks); return rpt; }
public static ArrayList<Integer> symmetricDiff(int[] A, int[] B) { ArrayList<Integer> result = new ArrayList<Integer>(); if (A == null || B == null) { return result; } HashSet<Integer> set = new HashSet<Integer>(); for (int i : B) { set.add(i); } for (int i : A) { if (!set.contains(i)) { result.add(i); } } set.clear(); for (int i : A) { set.add(i); } for (int i : B) { if (!set.contains(i)) { result.add(i); } } return result; }
/* * Strictly for analysis, look for "disagreements." The top guess from * each source is examined; if these meet the threshold and disagree, then * we log the information -- useful for testing or generating training data * for a better heuristic. */ private void findDisagreements(String url, List<EncodingClue> newClues) { HashSet<String> valsSeen = new HashSet<String>(); HashSet<String> sourcesSeen = new HashSet<String>(); boolean disagreement = false; for (int i = 0; i < newClues.size(); i++) { EncodingClue clue = newClues.get(i); if (!clue.isEmpty() && !sourcesSeen.contains(clue.source)) { if (valsSeen.size() > 0 && !valsSeen.contains(clue.value) && clue.meetsThreshold()) { disagreement = true; } if (clue.meetsThreshold()) { valsSeen.add(clue.value); } sourcesSeen.add(clue.source); } } if (disagreement) { // dump all values in case of disagreement StringBuffer sb = new StringBuffer(); sb.append("Disagreement: " + url + "; "); for (int i = 0; i < newClues.size(); i++) { if (i > 0) { sb.append(", "); } sb.append(newClues.get(i)); } LOG.trace(sb.toString()); } }
public ImmutableList<Message> a( MessagesCollection paramMessagesCollection, List<Message> paramList) { if (paramList.isEmpty()) ; ImmutableList.Builder localBuilder; for (ImmutableList localImmutableList = ImmutableList.d(); ; localImmutableList = localBuilder.b()) { return localImmutableList; HashSet localHashSet = Sets.a(); Iterator localIterator1 = paramList.iterator(); while (localIterator1.hasNext()) localHashSet.add(((Message) localIterator1.next()).t()); Iterator localIterator2 = paramMessagesCollection.b().iterator(); while (localIterator2.hasNext()) { Message localMessage2 = (Message) localIterator2.next(); if ((!localMessage2.u()) || (!localHashSet.contains(localMessage2.t()))) continue; BLog.a(a, "Deduped message %s", new Object[] {localMessage2}); localHashSet.remove(localMessage2.t()); } localBuilder = ImmutableList.e(); Iterator localIterator3 = paramList.iterator(); while (localIterator3.hasNext()) { Message localMessage1 = (Message) localIterator3.next(); if (!localHashSet.contains(localMessage1.t())) continue; localBuilder.b(localMessage1); } } }
// generate random numbers and insert them into docIdSets (another more efficient version is // generateRandomDataNew) private void getRandomDataSetsBatch( ArrayList<OpenBitSet> obs, ArrayList<DocIdSet> docs, int maxDoc, int listNum) throws Exception { Random rand = new Random(System.currentTimeMillis()); int numdocs; for (int i = 0; i < listNum; ++i) { numdocs = maxDoc; ArrayList<Integer> nums = new ArrayList<Integer>(); HashSet<Integer> seen = new HashSet<Integer>(); for (int j = 0; j < numdocs; j++) { int nextDoc = rand.nextInt(maxDoc); if (seen.contains(nextDoc)) { while (seen.contains(nextDoc)) { nextDoc = rand.nextInt(maxDoc); } } nums.add(nextDoc); seen.add(nextDoc); } Collections.sort(nums); // printList(nums, 0, nums.size()-1); obs.add(createObs(nums, maxDoc)); docs.add(createDocSetBatch(nums)); } }
public boolean isValidSudoku(char[][] board) { HashSet<Character> set = new HashSet<Character>(); // Check for each row for (int i = 0; i < 9; i++) { for (int j = 0; j < 9; j++) { if (board[i][j] == '.') continue; if (!set.contains(board[i][j])) set.add(board[i][j]); else return false; } set.clear(); // ÿѭ����һ����Ҫ��set�����,��Ȼ�� } // Check for each column for (int j = 0; j < 9; j++) { for (int i = 0; i < 9; i++) { if (board[i][j] == '.') continue; if (!set.contains(board[i][j])) set.add(board[i][j]); else return false; } set.clear(); } // Check for each sub-grid for (int k = 0; k < 9; k++) { // ���˫��forѭ���ж�һ��С�����Ƿ�Ϸ� for (int i = k / 3 * 3; i < k / 3 * 3 + 3; i++) { for (int j = (k % 3) * 3; j < (k % 3) * 3 + 3; j++) { if (board[i][j] == '.') continue; if (!set.contains(board[i][j])) set.add(board[i][j]); else return false; } } set.clear(); } return true; }
public Node commonAncestor(Node one, Node two) { // Implementation here HashSet<Node> pathHash = new HashSet<>(); Node currentOne = one; if (one == null || two == null) { return null; } while (!currentOne.isRoot()) { pathHash.add(currentOne); currentOne = currentOne.parent; } pathHash.add(currentOne); Node currentTwo = two; while (!currentTwo.isRoot()) { if (pathHash.contains(currentTwo)) { return currentTwo; } currentTwo = currentTwo.parent; } if (pathHash.contains(currentTwo)) { return currentTwo; } return null; }
public boolean isLegal(Position position, Move move) { final HashSet<MoveVector> possibleVectors = getPossibleVectors(); final MoveVector moveVector = new MoveVector(move); final Square s1 = move.getStartSquare(); final Square s2 = move.getEndSquare(); final PieceType pieceType = position.getPieceAt(move.getStartSquare()).getType(); if (pieceType.equals(PieceType.PAWN)) { return false; } if (!position.isMovablePieceAtSquare(s1)) { return false; } if (position.isCaptureOfOwnColor(move)) { return false; } if (!move.getPromotionPieceType().equals(PieceType.NONE)) { return false; } if (!possibleVectors.contains(moveVector)) { return false; } if (!possibleVectors.contains(moveVector)) { return false; } for (final Square square : Chess.getSquaresBetween(s1, s2, false)) { if (!position.getPieceAt(square).equals(Piece.NONE)) { return false; } } return true; }
public Component getTableCellRendererComponent( JTable table, Object value, boolean isSelected, boolean hasFocus, int viewRow, int column) { super.getTableCellRendererComponent(table, value, isSelected, hasFocus, viewRow, column); int row = sorter_.convertRowIndexToModel(viewRow); if (column == 3 || column == 4) { setHorizontalAlignment(SwingConstants.CENTER); } else { setHorizontalAlignment(SwingConstants.LEFT); } if (isSelected) { if (greyOnes_.contains(row)) { setForeground(Color.lightGray); } else { setForeground(Color.white); } } else { if (greyOnes_.contains(row)) { setForeground(Color.gray); } else { setForeground(Color.black); } } setText((String) value); return this; }
public void map( LongWritable key, Text value, OutputCollector<IntWritable, HITSNode> output, Reporter reporter) throws IOException { ArrayListOfIntsWritable links = new ArrayListOfIntsWritable(); String line = ((Text) value).toString(); StringTokenizer itr = new StringTokenizer(line); if (itr.hasMoreTokens()) { int curr = Integer.parseInt(itr.nextToken()); if (stopList.contains(curr)) { return; } valOut.setAdjacencyList(links); valOut.setHARank((float) 1.0); valOut.setType(HITSNode.TYPE_AUTH_COMPLETE); } while (itr.hasMoreTokens()) { keyOut.set(Integer.parseInt(itr.nextToken())); valOut.setNodeId(keyOut.get()); // System.out.println(keyOut.toString() + ", " + // valOut.toString()); if (!(stopList.contains(keyOut.get()))) { output.collect(keyOut, valOut); } } // emit mentioned mentioner -> mentioned (mentioners) in links // emit mentioner mentioned -> mentioner (mentions) outlinks // emit mentioned a // emit mentioner 1 }
/** * Auxiliary method for cycle detection. Performs a depth-first traversal with vertex markings to * detect a cycle. If a node with a temporary marking is found, then there is a cycle. Once all * children of a vertex have been traversed the parent node cannot be part of another cycle and is * thus permanently marked. * * @param jv current job vertex to check * @param temporarilyMarked set of temporarily marked nodes * @param permanentlyMarked set of permanently marked nodes * @return <code>true</code> if there is a cycle, <code>false</code> otherwise */ private boolean detectCycle( final AbstractJobVertex jv, final HashSet<JobVertexID> temporarilyMarked, final HashSet<JobVertexID> permanentlyMarked) { JobVertexID vertexID = jv.getID(); if (permanentlyMarked.contains(vertexID)) { return false; } else if (temporarilyMarked.contains(vertexID)) { return true; } else { temporarilyMarked.add(vertexID); for (int i = 0; i < jv.getNumberOfForwardConnections(); i++) { if (detectCycle( jv.getForwardConnection(i).getConnectedVertex(), temporarilyMarked, permanentlyMarked)) { return true; } } permanentlyMarked.add(vertexID); return false; } }
private void mergeToHeap( ActionCombinerValue newValList, UserActiveHistory oldVal, UserActiveHistory.Builder updatedBuilder) { HashSet<String> alreadyIn = new HashSet<String>(); for (String newItem : newValList.getActRecodeMap().keySet()) { if (updatedBuilder.getActRecordsCount() >= topNum) { break; } if (!alreadyIn.contains(newItem)) { updatedBuilder.addActRecords(newValList.getActRecodeMap().get(newItem)); alreadyIn.add(newItem); } } if (oldVal != null) { for (Recommend.UserActiveHistory.ActiveRecord eachOldVal : oldVal.getActRecordsList()) { if (updatedBuilder.getActRecordsCount() >= topNum) { break; } if (alreadyIn.contains(eachOldVal.getItem())) { continue; } updatedBuilder.addActRecords(eachOldVal); } } }
private boolean areSpouses(PelicanPerson person1, PelicanPerson person2) { if (person1 == null || person2 == null) return (false); // construct unique set entry from the id strings concatenated with a space if (matingList.contains(person1.id + " " + person2.id) || matingList.contains(person2.id + " " + person1.id)) return (true); return (false); }
public static void validateItemLevelAquired(Item item) { // This method should be called: // 1) When an item gets obtained (Item.collect) // 2) When an item gets upgraded (ScrollOfUpgrade, ScrollOfWeaponUpgrade, ShortSword, // WandOfMagicMissile) // 3) When an item gets identified if (!item.levelKnown) { return; } Badge badge = null; if (!local.contains(Badge.ITEM_LEVEL_1) && item.level() >= 3) { badge = Badge.ITEM_LEVEL_1; local.add(badge); } if (!local.contains(Badge.ITEM_LEVEL_2) && item.level() >= 6) { badge = Badge.ITEM_LEVEL_2; local.add(badge); } if (!local.contains(Badge.ITEM_LEVEL_3) && item.level() >= 9) { badge = Badge.ITEM_LEVEL_3; local.add(badge); } if (!local.contains(Badge.ITEM_LEVEL_4) && item.level() >= 12) { badge = Badge.ITEM_LEVEL_4; local.add(badge); } displayBadge(badge); }
@Test public void testSimple() throws Exception { JvmTypeReference reference = typeRefs .typeReference(Concrete.class.getName()) .wildCardExtends("java.lang.CharSequence") .create(); Iterable<JvmFeature> iterable = service.getAllJvmFeatures(reference); HashSet<JvmFeature> set = Sets.newHashSet(iterable); assertFalse( set.contains( findOperation( "org.eclipse.xtext.common.types.testSetups.Interface", "overriddenByAll(T)"))); assertFalse( set.contains( findOperation( "org.eclipse.xtext.common.types.testSetups.Abstract", "overriddenByAll(T)"))); assertTrue( set.contains( findOperation( "org.eclipse.xtext.common.types.testSetups.Concrete", "overriddenByAll(T)"))); assertFalse( set.contains( findOperation("org.eclipse.xtext.common.types.testSetups.Interface", "inherited()"))); assertTrue( set.contains( findOperation("org.eclipse.xtext.common.types.testSetups.Abstract", "inherited()"))); }
public static void validateRare(Mob mob) { Badge badge = null; if (mob instanceof Albino) { badge = Badge.RARE_ALBINO; } else if (mob instanceof Bandit) { badge = Badge.RARE_BANDIT; } else if (mob instanceof Shielded) { badge = Badge.RARE_SHIELDED; } else if (mob instanceof Senior) { badge = Badge.RARE_SENIOR; } else if (mob instanceof Acidic) { badge = Badge.RARE_ACIDIC; } if (!global.contains(badge)) { global.add(badge); saveNeeded = true; } if (global.contains(Badge.RARE_ALBINO) && global.contains(Badge.RARE_BANDIT) && global.contains(Badge.RARE_SHIELDED) && global.contains(Badge.RARE_SENIOR) && global.contains(Badge.RARE_ACIDIC)) { badge = Badge.RARE; displayBadge(badge); } }
@Test public void testReadResourceResources() throws Exception { final ModelNode address = new ModelNode(); address.add("subsystem", "datasources"); address.protect(); final ModelNode operation = new ModelNode(); operation.get(OP).set("read-resource-description"); operation.get(OP_ADDR).set(address); final ModelNode result = executeOperation(operation); final Map<String, ModelNode> children = getChildren(result.get("attributes").get("installed-drivers").get("value-type")); Assert.assertFalse(children.isEmpty()); HashSet<String> keys = new HashSet<String>(); for (final Entry<String, ModelNode> child : children.entrySet()) { Assert.assertTrue(child.getKey() != null); keys.add(child.getKey()); } Assert.assertTrue(keys.contains("driver-xa-datasource-class-name")); Assert.assertTrue(keys.contains("module-slot")); Assert.assertTrue(keys.contains("driver-name")); }
public static boolean processAddition( final MMenu menuModel, final ArrayList<MMenuElement> menuContributionsToRemove, MMenuContribution menuContribution, final HashSet<String> existingMenuIds, HashSet<String> existingSeparatorNames) { int idx = getIndex(menuModel, menuContribution.getPositionInParent()); if (idx == -1) { return false; } for (MMenuElement item : menuContribution.getChildren()) { if (item instanceof MMenu && existingMenuIds.contains(item.getElementId())) { // skip this, it's already there continue; } else if (item instanceof MMenuSeparator && existingSeparatorNames.contains(item.getElementId())) { // skip this, it's already there continue; } MMenuElement copy = (MMenuElement) EcoreUtil.copy((EObject) item); if (DEBUG) { trace("addMenuContribution " + copy, menuModel.getWidget(), menuModel); // $NON-NLS-1$ } menuContributionsToRemove.add(copy); menuModel.getChildren().add(idx++, copy); if (copy instanceof MMenu && copy.getElementId() != null) { existingMenuIds.add(copy.getElementId()); } else if (copy instanceof MMenuSeparator && copy.getElementId() != null) { existingSeparatorNames.add(copy.getElementId()); } } return true; }
private String getUniqueName( DbObject obj, HashMap<String, ? extends SchemaObject> map, String prefix) { String hash = Integer.toHexString(obj.getName().hashCode()).toUpperCase(); String name = null; synchronized (temporaryUniqueNames) { for (int i = 1, len = hash.length(); i < len; i++) { name = prefix + hash.substring(0, i); if (!map.containsKey(name) && !temporaryUniqueNames.contains(name)) { break; } name = null; } if (name == null) { prefix = prefix + hash + "_"; for (int i = 0; ; i++) { name = prefix + i; if (!map.containsKey(name) && !temporaryUniqueNames.contains(name)) { break; } } } temporaryUniqueNames.add(name); } return name; }
/** * The safest bike lane should have a safety weight no lower than the time weight of a flat * street. This method divides the safety lengths by the length ratio of the safest street, * ensuring this property. * * @param graph */ private void applyBikeSafetyFactor(Graph graph) { _log.info( GraphBuilderAnnotation.register( graph, Variety.GRAPHWIDE, "Multiplying all bike safety values by " + (1 / bestBikeSafety))); HashSet<Edge> seenEdges = new HashSet<Edge>(); for (Vertex vertex : graph.getVertices()) { for (Edge e : vertex.getOutgoing()) { if (!(e instanceof PlainStreetEdge)) { continue; } PlainStreetEdge pse = (PlainStreetEdge) e; if (!seenEdges.contains(e)) { seenEdges.add(e); pse.setBicycleSafetyEffectiveLength( pse.getBicycleSafetyEffectiveLength() / bestBikeSafety); } } for (Edge e : vertex.getIncoming()) { if (!(e instanceof PlainStreetEdge)) { continue; } PlainStreetEdge pse = (PlainStreetEdge) e; if (!seenEdges.contains(e)) { seenEdges.add(e); pse.setBicycleSafetyEffectiveLength( pse.getBicycleSafetyEffectiveLength() / bestBikeSafety); } } } }
/** Add given Job to list of kept jobs. */ private void add(JobInfo info) { boolean fire = false; synchronized (keptjobinfos) { if (!keptjobinfos.contains(info)) { keptjobinfos.add(info); long now = System.currentTimeMillis(); finishedTime.put(info, new Long(now)); Object parent = info.getParent(); if (parent != null && !keptjobinfos.contains(parent)) { keptjobinfos.add(parent); finishedTime.put(parent, new Long(now)); } fire = true; } } if (fire) { Object l[] = getListeners(); for (int i = 0; i < l.length; i++) { KeptJobsListener jv = (KeptJobsListener) l[i]; jv.finished(info); } } }
/** * Sample and calculate the probability of hitting each type of marker (marker.class). Creates * 'numReads' reads of size 'readLen' and count how many of them hit each marker type. */ CountByType randomSampling(int readLen, int numReads) { CountByType countReads = new CountByType(); RandMarker randMarker = new RandMarker(snpEffectPredictor.getGenome()); for (int i = 0; i < numReads; i++) { // Random read Marker read = randMarker.rand(readLen); // Where does it hit? Markers regions = snpEffectPredictor.queryDeep(read); HashSet<String> doneRegion = new HashSet<String>(); for (Marker m : regions) { String mtype = markerTypes.getType(m); String msubtype = markerTypes.getSubType(m); if (!doneRegion.contains(mtype)) { countReads.inc(mtype); // Count reads doneRegion.add(mtype); // Do not count twice } if ((msubtype != null) && !doneRegion.contains(msubtype)) { countReads.inc(msubtype); // Count reads doneRegion.add(msubtype); // Do not count twice } } } return countReads; }
public HashSet<String> getEntitiesinTweet(String tweet) { HashSet<String> entities = new HashSet<String>(); TwitterTokenizer tweetTokenizer = new TwitterTokenizer(); for (String token : tweetTokenizer.tokenize(tweet)) { token = token.trim(); token = token.replaceAll("( [^a-zA-Z0-9\\.]) | ( [^a-zA-Z0-9\\.] ) | ([^a-zA-Z0-9\\.] )", " "); try { Pattern p = Pattern.compile("^[A-Z]+.*"); String[] split = token.split("\\s+"); for (String s : split) { s = s.trim(); if (p.matcher(s).matches() && !stopWords.contains(s.toLowerCase())) { entities.add(s); } } } catch (Exception e) { e.printStackTrace(); } for (String np : npe.extract(token)) { if (!stopWords.contains(np.trim().toLowerCase())) { entities.add(np.trim()); } } } return entities; }