// --------------------------------------------------------------------------- private void printDependencies() throws TablesawException { m_printedDependencies = new HashSet<String>(); try { PrintWriter pw = new PrintWriter(new FileWriter("dependency.txt")); pw.println("Targets marked with a * have already been printed"); // Create a reduced set of stuff to print Set<String> ruleNames = new HashSet<String>(); for (String name : m_nameRuleMap.keySet()) ruleNames.add(name); for (String name : m_nameRuleMap.keySet()) { Rule rule = m_nameRuleMap.get(name); for (String dep : rule.getDependNames()) ruleNames.remove(dep); for (Rule dep : rule.getDependRules()) { if (dep.getName() != null) ruleNames.remove(dep.getName()); } } for (String name : ruleNames) { if (!name.startsWith(NAMED_RULE_PREFIX)) printDependencies(name, pw, 0); } pw.close(); } catch (IOException ioe) { throw new TablesawException("Cannot write to file dependency.txt", -1); } }
/** * @param start, a string * @param end, a string * @param dict, a set of string * @return an integer */ public int ladderLength(String start, String end, Set<String> dict) { Queue<String> queue = new LinkedList<String>(); dict.remove(start); queue.offer(start); int length = 1; while (!queue.isEmpty()) { int size = queue.size(); for (int i = 0; i < size; i++) { String word = queue.poll(); for (int j = 0; j < word.length(); j++) { char[] wordArray = word.toCharArray(); char charAtJ = wordArray[j]; for (char chr = 'a'; chr <= 'z'; chr++) { if (chr == charAtJ) { continue; } wordArray[j] = chr; String temp = String.valueOf(wordArray); if (temp.equals(end)) { return length + 1; } if (dict.contains(temp)) { queue.offer(temp); dict.remove(temp); } } } } length++; } return 0; }
@Override public boolean remove(Object obj) { if (obj instanceof String) { set.remove(new Tag((String) obj)); } return set.remove(obj); }
private NumericShaper(Range defaultContext, Set<Range> ranges) { shapingRange = defaultContext; rangeSet = EnumSet.copyOf(ranges); // throws NPE if ranges is null. // Give precedance to EASTERN_ARABIC if both ARABIC and // EASTERN_ARABIC are specified. if (rangeSet.contains(Range.EASTERN_ARABIC) && rangeSet.contains(Range.ARABIC)) { rangeSet.remove(Range.ARABIC); } // As well as the above case, give precedance to TAI_THAM_THAM if both // TAI_THAM_HORA and TAI_THAM_THAM are specified. if (rangeSet.contains(Range.TAI_THAM_THAM) && rangeSet.contains(Range.TAI_THAM_HORA)) { rangeSet.remove(Range.TAI_THAM_HORA); } rangeArray = rangeSet.toArray(new Range[rangeSet.size()]); if (rangeArray.length > BSEARCH_THRESHOLD) { // sort rangeArray for binary search Arrays.sort( rangeArray, new Comparator<Range>() { public int compare(Range s1, Range s2) { return s1.base > s2.base ? 1 : s1.base == s2.base ? 0 : -1; } }); } }
public boolean isModelBeanDefined(String beanType) { // boolean flag = definedModelBeans.contains(beanType); // if (flag) { // try { // Class.forName(pack+"."+beanType); // return true; // } catch (ClassNotFoundException cnfe) { // definedModelBeans.remove(beanType); // return false; // } // } else { // return false; // } boolean flag = definedModelBeans.contains(beanType); if (resolver == null) { try { Class.forName(pack + "." + beanType); return true; } catch (ClassNotFoundException cnfe) { definedModelBeans.remove(beanType); return false; } } else if (flag) { try { resolver.resolveType(pack + "." + beanType); return true; } catch (ClassNotFoundException cnfe) { definedModelBeans.remove(beanType); return false; } } else { return false; } }
public static void checkCompletionContains( JavaCodeInsightTestFixture fixture, String... expectedVariants) { LookupElement[] lookupElements = fixture.completeBasic(); Assert.assertNotNull(lookupElements); Set<String> missedVariants = CollectionFactory.hashSet(expectedVariants); for (LookupElement lookupElement : lookupElements) { String lookupString = lookupElement.getLookupString(); missedVariants.remove(lookupString); Object object = lookupElement.getObject(); if (object instanceof ResolveResult) { object = ((ResolveResult) object).getElement(); } if (object instanceof PsiMethod) { missedVariants.remove(lookupString + "()"); } else if (object instanceof PsiVariable) { missedVariants.remove('@' + lookupString); } else if (object instanceof NamedArgumentDescriptor) { missedVariants.remove(lookupString + ':'); } } if (missedVariants.size() > 0) { Assert.assertTrue("Some completion variants are missed " + missedVariants, false); } }
private Node composeNode(Node parent) { recursiveNodes.add(parent); if (parser.checkEvent(Event.ID.Alias)) { AliasEvent event = (AliasEvent) parser.getEvent(); String anchor = event.getAnchor(); if (!anchors.containsKey(anchor)) { throw new ComposerException( null, null, "found undefined alias " + anchor, event.getStartMark()); } Node result = anchors.get(anchor); if (recursiveNodes.remove(result)) { result.setTwoStepsConstruction(true); } return result; } NodeEvent event = (NodeEvent) parser.peekEvent(); String anchor = null; anchor = event.getAnchor(); // the check for duplicate anchors has been removed (issue 174) Node node = null; if (parser.checkEvent(Event.ID.Scalar)) { node = composeScalarNode(anchor); } else if (parser.checkEvent(Event.ID.SequenceStart)) { node = composeSequenceNode(anchor); } else { node = composeMappingNode(anchor); } recursiveNodes.remove(parent); return node; }
@SmallTest @Feature({"Sync"}) public void testSettingDataTypes() throws Exception { setUpTestAccountAndSignIn(); SyncTestUtil.waitForSyncActive(); SyncCustomizationFragment fragment = startSyncCustomizationFragment(); SwitchPreference syncEverything = getSyncEverything(fragment); Map<Integer, CheckBoxPreference> dataTypes = getDataTypes(fragment); assertDefaultSyncOnState(fragment); togglePreference(syncEverything); for (CheckBoxPreference dataType : dataTypes.values()) { assertTrue(dataType.isChecked()); assertTrue(dataType.isEnabled()); } Set<Integer> expectedTypes = new HashSet<Integer>(dataTypes.keySet()); expectedTypes.add(ModelType.PREFERENCES); expectedTypes.add(ModelType.PRIORITY_PREFERENCES); assertDataTypesAre(expectedTypes); togglePreference(dataTypes.get(ModelType.AUTOFILL)); togglePreference(dataTypes.get(ModelType.PASSWORDS)); // Nothing should have changed before the fragment closes. assertDataTypesAre(expectedTypes); closeFragment(fragment); expectedTypes.remove(ModelType.AUTOFILL); expectedTypes.remove(ModelType.PASSWORDS); assertDataTypesAre(expectedTypes); }
public String validateEdges() { StringBuilder sb = new StringBuilder(); Set<String> inIds = getInEdges(); for (String id : inIds.toArray(new String[0])) { Document chk = getParent().getRawDatabase().getDocumentByUNID(id); if (chk == null) { inIds.remove(id); inDirty_ = true; sb.append("IN: "); sb.append(id); sb.append(","); } } Set<String> outIds = getOutEdges(); for (String id : outIds.toArray(new String[0])) { Document chk = getParent().getRawDatabase().getDocumentByUNID(id); if (chk == null) { outIds.remove(id); outDirty_ = true; sb.append("OUT: "); sb.append(id); sb.append(","); } } return sb.toString(); }
/* * This problem contraints the word to be all lower-case character, which most likely means it requires * you to do stupid constant(26) time operations to the strings. Doing a graph construction, which is * more general, got Time Limit Exception because it is O(n^2) rather than O((26*n)^2). */ public static int ladderLength(String beginWord, String endWord, Set<String> wordDict) { Queue<String> queue = new LinkedList<String>(); queue.add(beginWord); wordDict.remove(beginWord); int ans = 1; while (!queue.isEmpty()) { for (int count = queue.size(); count > 0; --count) { String word = queue.poll(); StringBuilder altWord = new StringBuilder(word); for (int i = 0; i < word.length(); ++i) { char w = altWord.charAt(i); for (char c = 'a'; c <= 'z'; ++c) { if (w == c) continue; altWord.setCharAt(i, c); if (altWord.toString().equals(endWord)) return ans + 1; if (wordDict.contains(altWord.toString())) { queue.add(altWord.toString()); wordDict.remove(altWord.toString()); } altWord.setCharAt(i, w); } } } ++ans; } return 0; }
private void doSelect(TagView child, int position) { if (mAutoSelectEffect) { if (!child.isChecked()) { // 处理max_select=1的情况 if (mSelectedMax == 1 && mSelectedView.size() == 1) { Iterator<Integer> iterator = mSelectedView.iterator(); Integer preIndex = iterator.next(); TagView pre = (TagView) getChildAt(preIndex); pre.setChecked(false); child.setChecked(true); mSelectedView.remove(preIndex); mSelectedView.add(position); } else { if (mSelectedMax > 0 && mSelectedView.size() >= mSelectedMax) return; child.setChecked(true); mSelectedView.add(position); } } else { child.setChecked(false); mSelectedView.remove(position); } if (mOnSelectListener != null) { mOnSelectListener.onSelected(new HashSet<Integer>(mSelectedView)); } } }
/** * NOTE: this returns an unmodifiable copy of the keySet, so removing from here won't have an * effect, and calling a remove while iterating through the set will not cause a concurrent * modification exception. This behavior is necessary for now for the persisted cache feature. */ public Set<? extends K> getCacheLineKeys() { // note that this must be a HashSet and not a FastSet in order to have a null value Set<Object> keys; if (fileTable != null) { keys = new HashSet<Object>(); try { synchronized (this) { addAllFileTableKeys(keys); } } catch (IOException e) { Debug.logError(e, module); } if (keys.remove(ObjectType.NULL)) { keys.add(null); } } else { if (memoryTable.containsKey(ObjectType.NULL)) { keys = new HashSet<Object>(memoryTable.keySet()); keys.remove(ObjectType.NULL); keys.add(null); } else { keys = memoryTable.keySet(); } } return Collections.unmodifiableSet(UtilGenerics.<Set<? extends K>>cast(keys)); }
/** * Sets the targets to search. * * @param targets set with the targets to search. * @throws NullPointerException if <code>targets == null</code> * @throws IllegalStateException if the scanner hasn't finished processing yet (<code> * isFinished() == true</code>) */ public void setTargets(Set targets) { if (targets == null) { throw new NullPointerException(); } if (isFinished()) { throw new IllegalStateException( ResourceBundleFactory.getBundle(BUNDLE_NAME).getString("SCANNER_RUNNING" /* NOI18N */)); } Set copy = new HashSet(targets); for (Iterator it = copy.iterator(); it.hasNext(); ) { Object file = it.next(); if (file instanceof String) { targets.remove(file); file = new File((String) file); targets.add(file); } if (!((File) file).isDirectory()) { _queue.push((File) file); targets.remove(file); } } _dirs = new File[targets.size()]; targets.toArray(_dirs); }
private void handlePressurePlateEvents() { Set<Vector3i> toRemoveSignal = Sets.newHashSet(activatedPressurePlates); Iterable<EntityRef> players = entityManager.getEntitiesWith(CharacterComponent.class, LocationComponent.class); for (EntityRef player : players) { Vector3f playerLocation = player.getComponent(LocationComponent.class).getWorldPosition(); Vector3i locationBeneathPlayer = new Vector3i(playerLocation.x + 0.5f, playerLocation.y - 0.5f, playerLocation.z + 0.5f); Block blockBeneathPlayer = worldProvider.getBlock(locationBeneathPlayer); if (blockBeneathPlayer == signalPressurePlate) { EntityRef entityBeneathPlayer = blockEntityRegistry.getBlockEntityAt(locationBeneathPlayer); SignalProducerComponent signalProducer = entityBeneathPlayer.getComponent(SignalProducerComponent.class); if (signalProducer != null) { if (signalProducer.signalStrength == 0) { startProducingSignal(entityBeneathPlayer, -1); activatedPressurePlates.add(locationBeneathPlayer); } else { toRemoveSignal.remove(locationBeneathPlayer); } } } } for (Vector3i pressurePlateLocation : toRemoveSignal) { EntityRef pressurePlate = blockEntityRegistry.getBlockEntityAt(pressurePlateLocation); SignalProducerComponent signalProducer = pressurePlate.getComponent(SignalProducerComponent.class); if (signalProducer != null) { stopProducingSignal(pressurePlate); activatedPressurePlates.remove(pressurePlateLocation); } } }
@Test public void remove() { assertFalse(toTest.remove("Test1")); toTest.add("Test1"); assertTrue(toTest.remove("Test1")); }
@Override public void updatePanel(Solution solution) { NurseRoster nurseRoster = (NurseRoster) solution; List<ShiftDate> shiftDateList = nurseRoster.getShiftDateList(); List<Shift> shiftList = nurseRoster.getShiftList(); Set<Employee> deadEmployeeSet = new LinkedHashSet<Employee>(employeeToPanelMap.keySet()); deadEmployeeSet.remove(null); for (Employee employee : nurseRoster.getEmployeeList()) { deadEmployeeSet.remove(employee); EmployeePanel employeePanel = employeeToPanelMap.get(employee); if (employeePanel == null) { employeePanel = new EmployeePanel(this, shiftDateList, shiftList, employee); employeeListPanel.add(employeePanel); employeeToPanelMap.put(employee, employeePanel); } employeePanel.clearShiftAssignments(); } unassignedPanel.clearShiftAssignments(); for (ShiftAssignment shiftAssignment : nurseRoster.getShiftAssignmentList()) { Employee employee = shiftAssignment.getEmployee(); EmployeePanel employeePanel = employeeToPanelMap.get(employee); employeePanel.addShiftAssignment(shiftAssignment); } for (Employee deadEmployee : deadEmployeeSet) { EmployeePanel deadEmployeePanel = employeeToPanelMap.remove(deadEmployee); employeeListPanel.remove(deadEmployeePanel); } for (EmployeePanel employeePanel : employeeToPanelMap.values()) { employeePanel.update(); } }
public Pair<Set<Instruction>, Set<Instruction>> getConstConditionalExpressions() { Set<Instruction> trueSet = new HashSet<Instruction>(); Set<Instruction> falseSet = new HashSet<Instruction>(); for (Instruction instruction : myInstructions) { if (instruction instanceof BranchingInstruction) { BranchingInstruction branchingInstruction = (BranchingInstruction) instruction; if (branchingInstruction.getPsiAnchor() != null && branchingInstruction.isConditionConst()) { if (!branchingInstruction.isTrueReachable()) { falseSet.add(branchingInstruction); } if (!branchingInstruction.isFalseReachable()) { trueSet.add(branchingInstruction); } } } } for (Instruction instruction : myInstructions) { if (instruction instanceof BranchingInstruction) { BranchingInstruction branchingInstruction = (BranchingInstruction) instruction; if (branchingInstruction.isTrueReachable()) { falseSet.remove(branchingInstruction); } if (branchingInstruction.isFalseReachable()) { trueSet.remove(branchingInstruction); } } } return Pair.create(trueSet, falseSet); }
@Override public void destroyConnection(final Connection connection) { if (connection == null) { return; } if (logger.isFinestEnabled()) { logger.finest("Destroying " + connection); } if (activeConnections.remove(connection)) { // this should not be needed; but some tests are using DroppingConnection which is not a // TcpIpConnection. if (connection instanceof TcpIpConnection) { ioThreadingModel.onConnectionRemoved((TcpIpConnection) connection); } } final Address endPoint = connection.getEndPoint(); if (endPoint != null) { connectionsInProgress.remove(endPoint); connectionsMap.remove(endPoint, connection); fireConnectionRemovedEvent(connection, endPoint); } if (connection.isAlive()) { connection.close(); closedCount.inc(); } }
@Override public void onChange(String imsi, int days, int daysThreshold) { if (days >= daysThreshold) { if (!workers.contains(imsi)) { workers.add(imsi); if (tourists.contains(imsi)) { tourists.remove(imsi); listener.removeTourist(imsi, currentTime); } } } else { // 在一个 detector 里不是worker,可能在另一个 detector 里是worker for (MetricsDetector detector : detectors) { if (detector.isWorker(currentTime, imsi)) { return; } } if (workers.contains(imsi)) { workers.remove(imsi); } } if (logger.isInfoEnabled()) { if (logger.isDebugEnabled()) { StringBuilder sb = new StringBuilder(); for (String worker : workers) { sb.append(worker); sb.append(","); } logger.info( format("is worker change: imsi:[%s],days:[%d],works:[%s]", imsi, days, sb.toString())); } else { logger.info(format("is worker change: imsi:[%s],days:[%d]", imsi, days)); } } }
@Test public void modulesShouldBeDelegatedToInRandomOrderWhenOrderClashes() throws InterruptedException { GraphDatabaseService database = builder() .setConfig( "com.graphaware.module.test1.1", TestModuleBootstrapper.MODULE_ENABLED.getDefaultValue()) .setConfig( "com.graphaware.module.test3.1", TestModuleBootstrapper.MODULE_ENABLED.getDefaultValue()) .setConfig( "com.graphaware.module.test2.1", TestModuleBootstrapper.MODULE_ENABLED.getDefaultValue()) .newGraphDatabase(); registerShutdownHook(database); try (Transaction tx = database.beginTx()) { database.createNode(); tx.success(); } assertEquals(3, TEST_RUNTIME_MODULES.size()); Set<String> remaining = new HashSet<>(Arrays.asList("test1", "test2", "test3")); assertTrue(remaining.remove(TEST_RUNTIME_MODULES.get(0).getId())); assertTrue(remaining.remove(TEST_RUNTIME_MODULES.get(1).getId())); assertTrue(remaining.remove(TEST_RUNTIME_MODULES.get(2).getId())); assertTrue(remaining.isEmpty()); }
/** * Logout a user. * * <p>This method removes the Principals that were added by the <code>commit</code> method. * * @exception LoginException if the logout fails. * @return true in all cases since this <code>LoginModule</code> should not be ignored. */ public boolean logout() throws LoginException { if (subject.isReadOnly()) { cleanState(); throw new LoginException("Subject is read-only"); } Set principals = subject.getPrincipals(); principals.remove(ldapPrincipal); principals.remove(userPrincipal); if (authzIdentity != null) { principals.remove(authzPrincipal); } // clean out state cleanState(); succeeded = false; commitSucceeded = false; ldapPrincipal = null; userPrincipal = null; authzPrincipal = null; if (debug) { System.out.println("\t\t[LdapLoginModule] logged out Subject"); } return true; }
/** * remove removes a node from the graph and removes all edges with work as start or end point. No * change to the graph if the node doesn't exist. */ public void remove(BaseWork work) { if (!workGraph.containsKey(work)) { return; } List<BaseWork> children = getChildren(work); List<BaseWork> parents = getParents(work); for (BaseWork w : children) { edgeProperties.remove(new ImmutablePair<BaseWork, BaseWork>(work, w)); invertedWorkGraph.get(w).remove(work); if (invertedWorkGraph.get(w).size() == 0) { roots.add(w); } } for (BaseWork w : parents) { edgeProperties.remove(new ImmutablePair<BaseWork, BaseWork>(w, work)); workGraph.get(w).remove(work); if (workGraph.get(w).size() == 0) { leaves.add(w); } } roots.remove(work); leaves.remove(work); workGraph.remove(work); invertedWorkGraph.remove(work); }
@Override public void release(ResultSet resultSet, Statement statement) { log.tracef("Releasing result set [%s]", resultSet); if (statement == null) { try { statement = resultSet.getStatement(); } catch (SQLException e) { throw convert(e, "unable to access Statement from ResultSet"); } } if (statement != null) { final Set<ResultSet> resultSets = xref.get(statement); if (resultSets == null) { log.unregisteredStatement(); } else { resultSets.remove(resultSet); if (resultSets.isEmpty()) { xref.remove(statement); } } } else { final boolean removed = unassociatedResultSets.remove(resultSet); if (!removed) { log.unregisteredResultSetWithoutStatement(); } } close(resultSet); }
public synchronized RemoteFileDesc getBest() throws NoSuchElementException { if (!hasMore()) return null; RemoteFileDesc ret; // try a verified host if (!verifiedHosts.isEmpty()) { LOG.debug("getting a verified host"); ret = (RemoteFileDesc) verifiedHosts.first(); verifiedHosts.remove(ret); } else { LOG.debug("getting a non-verified host"); // use the legacy ranking logic to select a non-verified host Iterator dual = new DualIterator(testedLocations.iterator(), newHosts.iterator()); ret = LegacyRanker.getBest(dual); newHosts.remove(ret); testedLocations.remove(ret); if (ret.needsPush()) { for (Iterator iter = ret.getPushProxies().iterator(); iter.hasNext(); ) pingedHosts.remove(iter.next()); } else pingedHosts.remove(ret); } pingNewHosts(); if (LOG.isDebugEnabled()) LOG.debug("the best host we came up with is " + ret + " " + ret.getPushAddr()); return ret; }
public static String getOrthographyValue(String content) { if (content == null || content.isEmpty()) return null; Set<Integer> types = new HashSet<Integer>(); for (int i = 1; i < content.length(); i++) { char c = content.charAt(i); types.add(Character.getType(c)); } // we are ignoring spaces types.remove(Character.getType(' ')); // we are ignoring CONTROL chars types.remove(Character.getType(':')); if (Character.getType(content.charAt(0)) == Character.UPPERCASE_LETTER) { if (lowerCaseTypesSet.containsAll(types)) return "upperInitial"; } types.add(Character.getType(content.charAt(0))); if (upperCaseTypesSet.containsAll(types)) return "allCaps"; if (lowerCaseTypesSet.containsAll(types)) return "lowercase"; if (mixedCaseTypesSet.containsAll(types)) return "mixedCaps"; return null; }
/** * Analyse an expression. * * @param availDecls Total set of declarations available. * @param ast The AST for the expression. * @return The <code>Set</code> of declarations used by the expression. * @throws RecognitionException If an error occurs in the parser. */ @SuppressWarnings("unchecked") private MVELAnalysisResult analyze( final Set<String> identifiers, final BoundIdentifiers availableIdentifiers) { MVELAnalysisResult result = new MVELAnalysisResult(); result.setIdentifiers(identifiers); final Set<String> notBound = new HashSet<String>(identifiers); notBound.remove("this"); Map<String, Class<?>> usedDecls = new HashMap<String, Class<?>>(); Map<String, Class<?>> usedGlobals = new HashMap<String, Class<?>>(); for (Entry<String, Class<?>> entry : availableIdentifiers.getDeclarations().entrySet()) { if (identifiers.contains(entry.getKey())) { usedDecls.put(entry.getKey(), entry.getValue()); notBound.remove(entry.getKey()); } } for (Entry<String, Class<?>> entry : availableIdentifiers.getGlobals().entrySet()) { if (identifiers.contains(entry.getKey())) { usedGlobals.put(entry.getKey(), entry.getValue()); notBound.remove(entry.getKey()); } } result.setBoundIdentifiers( new BoundIdentifiers(usedDecls, usedGlobals, availableIdentifiers.getThisClass())); result.setNotBoundedIdentifiers(notBound); return result; }
/** * Removes user from op and voice lists * * @param user */ protected void removeUser(User user) { ops.remove(user); voices.remove(user); superOps.remove(user); halfOps.remove(user); owners.remove(user); }
private void handleRefresh(Set<IFileData> oldfiles) { IvyXmlWriter xw = pybase_main.beginMessage("RESOURCE"); int ctr = 0; for (IFileData fd : all_files) { IFileData old = null; for (IFileData ofd : oldfiles) { if (ofd.getFile().equals(fd.getFile())) { old = ofd; break; } } if (old == null) { outputDelta(xw, "ADDED", fd); ++ctr; } else if (old.getLastDateLastModified() != fd.getLastDateLastModified()) { oldfiles.remove(old); outputDelta(xw, "CHANGED", fd); ++ctr; } else { oldfiles.remove(old); } } for (IFileData fd : oldfiles) { outputDelta(xw, "REMOVED", fd); ++ctr; } if (ctr > 0) { pybase_main.finishMessage(xw); } }
private boolean traverse(CallPosition callPos) { // make everything like it was before we returned previous match PatternPosition currentPos = callPos.getPatternPosition(); PatternRelationship pRel = callPos.getPatternRelationship(); pRel.mark(); visitedRels.remove(callPos.getLastVisitedRelationship()); Node currentNode = currentPos.getCurrentNode(); Iterator<Relationship> relItr = callPos.getRelationshipIterator(); while (relItr.hasNext()) { Relationship rel = relItr.next(); if (visitedRels.contains(rel)) { continue; } if (!checkProperties(pRel, rel)) { continue; } Node otherNode = rel.getOtherNode(currentNode); PatternNode otherPosition = pRel.getOtherNode(currentPos.getPatternNode()); pRel.mark(); visitedRels.add(rel); if (traverse(new PatternPosition(otherNode, otherPosition, pRel, rel, optional), true)) { callPos.setLastVisitedRelationship(rel); return true; } visitedRels.remove(rel); pRel.unMark(); } pRel.unMark(); if (callPos.shouldPopUncompleted()) { uncompletedPositions.pop(); } callStack.pop(); foundElements.pop(); return false; }
public void disposeRepository(IHgRepositoryLocation hgRepo) { Assert.isNotNull(hgRepo); for (HgRoot hgRoot : rootRepos.keySet()) { Set<IHgRepositoryLocation> pRepos = rootRepos.get(hgRoot); if (pRepos != null) { boolean removed = false; synchronized (entriesLock) { for (IHgRepositoryLocation repo : pRepos) { if (repo.equals(hgRepo)) { removed = pRepos.remove(repo); break; } } } if (removed) { repositoryRemoved(hgRepo); } } } IHgRepositoryLocation removed = null; synchronized (repoHistory) { for (IHgRepositoryLocation loc : repoHistory) { if (loc.equals(hgRepo)) { repoHistory.remove(loc); removed = loc; break; } } } if (removed != null) { repositoryRemoved(removed); } }