/** java.util.LinkedHashSet#remove(java.lang.Object) */ public void test_removeLjava_lang_Object() { // Test for method boolean // java.util.LinkedHashSet.remove(java.lang.Object) int size = hs.size(); hs.remove(new Integer(98)); assertTrue("Failed to remove element", !hs.contains(new Integer(98))); assertTrue("Failed to decrement set size", hs.size() == size - 1); LinkedHashSet s = new LinkedHashSet(); s.add(null); assertTrue("Cannot handle null", s.remove(null)); }
public boolean addOperator(Operator op) { final String nameTypeKey = op.name + op.type; Operator matchingOp = nameTypeToKey.get(nameTypeKey); if (matchingOp != null) super.remove(matchingOp); // removes found match from the main list nameTypeToKey.put(nameTypeKey, op); // writes over found match in nameTypeToKey map return super.add(op); // adds new operator to the main list }
/** * Method to remove an element from the List * * @param element The Element to remove * @return Whether it was removed successfully. */ public boolean remove(Object element, boolean allowCascadeDelete) { boolean success = delegate.remove(element); if (ownerOP != null && ownerOP.getExecutionContext().getManageRelations()) { ownerOP .getExecutionContext() .getRelationshipManager(ownerOP) .relationRemove(ownerMmd.getAbsoluteFieldNumber(), element); } if (ownerOP != null && allowCascadeDelete) { // Cascade delete if (SCOUtils.useQueuedUpdate(ownerOP)) { // Queue the cascade delete ownerOP .getExecutionContext() .addOperationToQueue( new CollectionRemoveOperation( ownerOP, ownerMmd.getAbsoluteFieldNumber(), element, allowCascadeDelete)); } else if (SCOUtils.hasDependentElement(ownerMmd)) { // Perform the cascade delete ownerOP.getExecutionContext().deleteObjectInternal(element); } } if (success) { makeDirty(); if (ownerOP != null && !ownerOP.getExecutionContext().getTransaction().isActive()) { ownerOP.getExecutionContext().processNontransactionalUpdate(); } } return success; }
/** * Removes the * * @param spacePart * @param spaceRootFOUri */ private void removeSpaceRootUri(DataSpacesURI spacePart, String spaceRootFOUri) { LinkedHashSet<String> allRootUris = accessibleFileObjectUris.get(spacePart); allRootUris.remove(spaceRootFOUri); if (allRootUris.isEmpty()) { accessibleFileObjectUris.remove(spacePart); } else { accessibleFileObjectUris.put(spacePart, allRootUris); } }
@Override public void remove(String abbreviation, String actionId) { final List<String> actions = myAbbreviation2ActionId.get(abbreviation); if (actions != null) { actions.remove(actionId); } final LinkedHashSet<String> abbreviations = myActionId2Abbreviations.get(actionId); if (abbreviations != null) { abbreviations.remove(abbreviation); } else { final LinkedHashSet<String> abbrs = myActionId2Abbreviations.get(actionId); if (abbrs != null) { final LinkedHashSet<String> customValues = new LinkedHashSet<String>(abbrs); customValues.remove(abbreviation); myActionId2Abbreviations.put(actionId, customValues); } } }
/** * Remove the given subwindow from this UI. * * <p>Since Vaadin 6.5, {@link Window.CloseListener}s are called also when explicitly removing a * window by calling this method. * * <p>Since Vaadin 6.5, returns a boolean indicating if the window was removed or not. * * @param window Window to be removed. * @return true if the subwindow was removed, false otherwise */ public boolean removeWindow(Window window) { if (!windows.remove(window)) { // Window window is not a subwindow of this UI. return false; } window.setParent(null); markAsDirty(); window.fireClose(); return true; }
/** * A thread-safe method for removing a given custom graphic from this DNodeView. * * @return true if the custom graphic was found an removed. Returns false if cg is null or is not * a custom graphic associated with this DNodeView. * @since Cytoscape 2.6 */ public boolean removeCustomGraphic(CustomGraphicLayer cg) { boolean retVal = false; synchronized (CG_LOCK) { if (orderedCustomGraphicLayers != null) { retVal = orderedCustomGraphicLayers.remove(cg); graphicsPositions.remove(cg); } } ensureContentChanged(); return retVal; }
// This may not catch 100% of packets, but should get most of them, a small number may end up // being compressed by main thread @SuppressWarnings("unchecked") public void manageChunkQueue(boolean flag1) { List<ChunkCoordIntPair> playerChunkQueue = player.chunkCoordIntPairQueue; try { if (!playerChunkQueue.isEmpty()) { chunkUpdateQueue.addAll(playerChunkQueue); playerChunkQueue.clear(); } } catch (ConcurrentModificationException e) { // seems to be called from a separate thread during teleports (rogue plugins?) } int chunkCompressionThreadSize = ChunkCompressionThread.getPlayerQueueSize(this.player); if (!chunkUpdateQueue.isEmpty() && (lowPriorityCount() + chunkCompressionThreadSize + MapChunkThread.getQueueLength(this.player)) < 4) { ChunkCoordIntPair playerChunk = getPlayerChunk(); Iterator<ChunkCoordIntPair> i = chunkUpdateQueue.iterator(); ChunkCoordIntPair first = i.next(); while (first != null && !activeChunks.contains(first)) { i.remove(); if (i.hasNext()) { first = i.next(); } else { first = null; } } if (first != null) { if (updateCounter.get() > 0) { int cx = playerChunk.x; int cz = playerChunk.z; boolean chunkFound = false; for (int c = 0; c < spiralx.length; c++) { ChunkCoordIntPair testChunk = new ChunkCoordIntPair(spiralx[c] + cx, spiralz[c] + cz); if (chunkUpdateQueue.contains(testChunk)) { first = testChunk; chunkFound = true; break; } } if (!chunkFound) { updateCounter.decrementAndGet(); } } chunkUpdateQueue.remove(first); MapChunkThread.sendPacketMapChunk(first, this.player, this.player.world); sendChunkTiles(first.x, first.z, player); } } }
/** * Removes the given ProcedureInfo * * @param procInfo */ protected void removeProcedure(ProcedureInfo procInfo) { m_procedures.remove(procInfo); m_procInfoOverrides.remove(procInfo.name); m_prefetchQueries.remove(procInfo.name); m_paramMappings.remove(procInfo.name); if (LOG.isDebugEnabled()) LOG.debug( "Removed Procedure " + procInfo.name + " from project " + this.project_name.toUpperCase()); }
/** * Ensures that all the functions used in this term are compiled. * * @param callStack Set of objects whose compile methods are parents of this method invocation. * Ordered by invocation order. Used to detect cycles. */ public int compile(LinkedHashSet callStack) { compiled = true; callStack.add(this); int errors = 0; for (int i = 0; i < args.length; ++i) { errors += args[i].compile(callStack); } // errors += f.compile(callStack); callStack.remove(this); return errors; }
@Override public void flushVariable(@NotNull final DfaVariableValue variable) { List<DfaValue> updatedStack = ContainerUtil.map(myStack, value -> handleFlush(variable, value)); myStack.clear(); for (DfaValue value : updatedStack) { myStack.push(value); } doFlush(variable, false); flushDependencies(variable); myUnknownVariables.remove(variable); myUnknownVariables.removeAll(myFactory.getVarFactory().getAllQualifiedBy(variable)); myCachedHash = null; }
private synchronized void removeTaskAttempt(int volumeId, TaskAttempt taskAttempt) { if (!unassignedTaskForEachVolume.containsKey(volumeId)) return; LinkedHashSet<TaskAttempt> tasks = unassignedTaskForEachVolume.get(volumeId); if (tasks.remove(taskAttempt)) { remainTasksNum.getAndDecrement(); } if (tasks.isEmpty()) { unassignedTaskForEachVolume.remove(volumeId); if (volumeId > REMOTE) { diskVolumeLoads.remove(volumeId); } } }
public void unregister(final Class<?> eventType, final Object target, final Method method) { if (listeners != null) { ListenerMethod toBeRemove = Iterators.find( listeners.iterator(), new Predicate<ListenerMethod>() { @Override public boolean apply(ListenerMethod m) { return m.matches(eventType, target, method); } }); if (toBeRemove != null) { listeners.remove(toBeRemove); } } }
private static String resolveVariableReplacement( String value, Props props, LinkedHashSet<String> visitedVariables) { StringBuffer buffer = new StringBuffer(); int startIndex = 0; Matcher matcher = VARIABLE_REPLACEMENT_PATTERN.matcher(value); while (matcher.find(startIndex)) { if (startIndex < matcher.start()) { // Copy everything up front to the buffer buffer.append(value.substring(startIndex, matcher.start())); } String subVariable = matcher.group(1); // Detected a cycle if (visitedVariables.contains(subVariable)) { throw new IllegalArgumentException( String.format( "Circular variable substitution found: [%s] -> [%s]", StringUtils.join(visitedVariables, "->"), subVariable)); } else { // Add substitute variable and recurse. String replacement = props.get(subVariable); visitedVariables.add(subVariable); if (replacement == null) { throw new UndefinedPropertyException( String.format( "Could not find variable substitution for variable(s) [%s]", StringUtils.join(visitedVariables, "->"))); } buffer.append(resolveVariableReplacement(replacement, props, visitedVariables)); visitedVariables.remove(subVariable); } startIndex = matcher.end(); } if (startIndex < value.length()) { buffer.append(value.substring(startIndex)); } return buffer.toString(); }
private LinkedHashSet<String> filterAliases( final LinkedHashSet<String> aliasCandidates, final BuildListener listener) { // make sure there is no null aliasCandidates.remove(null); final LinkedHashSet<String> aliases = new LinkedHashSet<String>(aliasCandidates.size()); for (final String aliasCandidate : aliasCandidates) { final FormValidation validation = AliasProvider.validateAlias(aliasCandidate); if (validation != null) { printToConsole(listener, validation.getMessage()); continue; } aliases.add(aliasCandidate); } return aliases; }
/** * Sorts the given files topologically by their revisions. This makes most sense when they all * denote different versions of the same file. For non-comparable revisions, the sort is stable. * Does NOT sort in-place. */ public static List<FileInRevision> sortByRevision(Collection<? extends FileInRevision> toSort) { if (toSort.isEmpty()) { return Collections.emptyList(); } // TODO better handling of multiple different repos final Repository repo = toSort.iterator().next().getRepository(); final LinkedHashSet<Revision> remainingRevisions = new LinkedHashSet<>(); final Multimap<Revision, FileInRevision> filesForRevision = new Multimap<>(); for (final FileInRevision f : toSort) { remainingRevisions.add(f.getRevision()); filesForRevision.put(f.getRevision(), f); } final List<FileInRevision> ret = new ArrayList<>(); while (!remainingRevisions.isEmpty()) { final Revision smallest = repo.getSmallestRevision(remainingRevisions); ret.addAll(filesForRevision.get(smallest)); remainingRevisions.remove(smallest); } return ret; }
/** * Start a PAMR router and a file system server * * @throws Exception */ @BeforeClass public static void setUp() throws Exception { RouterConfig config = new RouterConfig(); config.setPort(PAMRConfig.PA_NET_ROUTER_PORT.getValue()); router = Router.createAndStart(config); protocolsToTest.remove(CentralPAPropertyRepository.PA_COMMUNICATION_PROTOCOL.getValue()); StringBuilder apstring = new StringBuilder(); for (String p : protocolsToTest) { apstring.append(p + ","); } apstring.deleteCharAt(apstring.length() - 1); CentralPAPropertyRepository.PA_COMMUNICATION_ADDITIONAL_PROTOCOLS.setValue(apstring.toString()); spacesDir = new File(System.getProperty("java.io.tmpdir"), "ProActive SpaceMountManagerTest"); if (server == null) { server = new FileSystemServerDeployer("inputserver", spacesDir.toString(), true, true); System.out.println("Started File Server at " + Arrays.toString(server.getVFSRootURLs())); } }
/** * Un-autobundles notifications that are now grouped by the app. Additionally cancels autobundling * if the status change of this notification resulted in the loose notification count being under * the limit. */ private void maybeUnbundle(StatusBarNotification sbn, boolean notificationGone, int user) { List<String> notificationsToUnAutobundle = new ArrayList<>(); boolean removeSummary = false; synchronized (mUnbundledNotifications) { Map<String, LinkedHashSet<String>> unbundledNotificationsByUser = mUnbundledNotifications.get(sbn.getUserId()); if (unbundledNotificationsByUser == null || unbundledNotificationsByUser.size() == 0) { return; } LinkedHashSet<String> notificationsForPackage = unbundledNotificationsByUser.get(sbn.getPackageName()); if (notificationsForPackage == null || notificationsForPackage.size() == 0) { return; } if (notificationsForPackage.remove(sbn.getKey())) { if (!notificationGone) { // Add the current notification to the unbundling list if it still exists. notificationsToUnAutobundle.add(sbn.getKey()); } // If the status change of this notification has brought the number of loose // notifications back below the limit, remove the summary and un-autobundle. if (notificationsForPackage.size() == AUTOBUNDLE_AT_COUNT - 1) { removeSummary = true; for (String key : notificationsForPackage) { notificationsToUnAutobundle.add(key); } } } } if (notificationsToUnAutobundle.size() > 0) { if (removeSummary) { adjustAutobundlingSummary(sbn.getPackageName(), null, false, user); } adjustNotificationBundling(sbn.getPackageName(), notificationsToUnAutobundle, false, user); } }
/** * This is related to the status change listeners ({@link #statusListeners} ). * * <p style="color='ORANGE'">Don't need to override this method. * * @param listener * @return */ public boolean removeStatusListener(L listener) { synchronized (statusListeners) { boolean ret = statusListeners.remove(listener); return ret; } }
public void projectClosed(Project project) { projects.remove(project); }
public void focusGained(Project project) { projects.remove(project); projects.add(project); }
/** * Removes the given element id from the picked element list * * @param id a valid element id */ public final void removeElement(String id) { LinkedHashSet<String> elements = (LinkedHashSet<String>) selection.getValue(); elements.remove(id); markDirty(); }
/** * Stop supplying events to the specified listener. * * @param sel Listener to be removed. */ public final void removeSimEventListener(SimEventListener sel) { listeners.remove(sel); }
public synchronized void removeListener(BitcoinPaymentListener listener) { listeners.remove(listener); }
public void disconnect(ActionListener listener) { listeners.remove(listener); }
/** * @param addClearListItem - used for detecting whether the "Clear List" action should be added to * the end of the returned list of actions * @return */ public AnAction[] getRecentProjectsActions(boolean addClearListItem) { validateRecentProjects(); final Set<String> openedPaths = ContainerUtil.newHashSet(); for (Project openProject : ProjectManager.getInstance().getOpenProjects()) { ContainerUtil.addIfNotNull(openedPaths, getProjectPath(openProject)); } final LinkedHashSet<String> paths; synchronized (myStateLock) { paths = ContainerUtil.newLinkedHashSet(myState.recentPaths); } paths.remove(null); paths.removeAll(openedPaths); ArrayList<AnAction> actions = new ArrayList<AnAction>(); Set<String> duplicates = getDuplicateProjectNames(openedPaths, paths); for (final String path : paths) { final String projectName = getProjectName(path); String displayName; synchronized (myStateLock) { displayName = myState.names.get(path); } if (StringUtil.isEmptyOrSpaces(displayName)) { displayName = duplicates.contains(path) ? path : projectName; } // It's better don't to remove non-existent projects. Sometimes projects stored // on USB-sticks or flash-cards, and it will be nice to have them in the list // when USB device or SD-card is mounted if (new File(path).exists()) { actions.add(new ReopenProjectAction(path, projectName, displayName)); } } if (actions.isEmpty()) { return AnAction.EMPTY_ARRAY; } ArrayList<AnAction> list = new ArrayList<AnAction>(); for (AnAction action : actions) { list.add(action); } if (addClearListItem) { AnAction clearListAction = new AnAction(IdeBundle.message("action.clear.list")) { public void actionPerformed(AnActionEvent e) { final int rc = Messages.showOkCancelDialog( e.getData(PlatformDataKeys.PROJECT), "Would you like to clear the list of recent projects?", "Clear Recent Projects List", Messages.getQuestionIcon()); if (rc == 0) { synchronized (myStateLock) { myState.recentPaths.clear(); } WelcomeFrame.clearRecents(); } } }; list.add(Separator.getInstance()); list.add(clearListAction); } return list.toArray(new AnAction[list.size()]); }
public ArrayList<String> PlayerTrack( String playername, boolean IPdisp, boolean recursive, boolean override, boolean wildcard, boolean geolocate) { ArrayList<String> output = new ArrayList<String>(); PreparedStatement ps = null; ResultSet rs = null; try { ps = conn.prepareStatement( "SELECT `ip` " + "FROM `" + table + "` " + "WHERE LOWER(`accountname`) = ? " + "ORDER BY `time` DESC"); ps.setString(1, playername.toLowerCase()); rs = ps.executeQuery(); LinkedHashSet<String> ips = new LinkedHashSet<String>(); while (rs.next()) { if ((!plugin.untraceableIP.contains(rs.getString("ip"))) || (override)) { ips.add(rs.getString("ip")); if (geolocate) { output.add(rs.getString("ip")); geolocate = false; } } // output.addAll( IPRTrack( rs.getString("ip"), wildcard, IPdisp, recursive, override ) ); } if (ips.size() == 0) return null; java.util.Iterator<String> ips_itr = ips.iterator(); LinkedHashSet<String> names = new LinkedHashSet<String>(); names.add(playername); while (ips_itr.hasNext()) { names.addAll(IPTrack(ips_itr.next(), IPdisp, recursive, override)); } if (recursive) { // OH GOD OH GOD OH GOD LinkedHashSet<String> names_spent = new LinkedHashSet<String>(); names_spent.add(playername); java.util.Iterator<String> names_itr = names.iterator(); while (names_itr.hasNext()) { String thisName = names_itr.next(); if (names_spent.contains(thisName)) continue; names_spent.add(thisName); ArrayList<String> trackThis = PlayerTrack(thisName, IPdisp, false, override, false, false); if (trackThis == null) continue; if (names.addAll(trackThis)) names_itr = names.iterator(); } } LinkedHashSet<String> names_check = new LinkedHashSet<String>(names); java.util.Iterator<String> output_itr = names_check.iterator(); while (output_itr.hasNext()) { String thisName = output_itr.next(); if (thisName.equalsIgnoreCase(playername)) names.remove(thisName); } output.addAll(names); } catch (SQLException ex) { PlayerTracker.log.log(Level.SEVERE, "[P-Tracker] Couldn't execute MySQL statement: ", ex); } return output; }
public static Issue createIssue( final TaskRepository repository, final TaskData taskData, final Set<TaskAttribute> oldAttributes, final Configuration cfg) throws CoreException { final Issue issue = taskData.getTaskId().isEmpty() ? new Issue() : new Issue(RedmineUtil.parseIntegerId(taskData.getTaskId())); final TaskAttribute root = taskData.getRoot(); TaskAttribute taskAttribute = null; /* Default Attributes */ for (final RedmineAttribute redmineAttribute : RedmineAttribute.values()) { if (!redmineAttribute.isOperationValue() && !redmineAttribute.isReadOnly()) { setProperty(redmineAttribute, root, issue); } } /* Watcher */ final TaskAttribute watchersAttribute = root.getAttribute(RedmineAttribute.WATCHERS.getTaskKey()); if (watchersAttribute != null) { final LinkedHashSet<String> watchers = new LinkedHashSet<String>(watchersAttribute.getValues()); final TaskAttribute newWatcherAttribute = watchersAttribute.getAttribute(RedmineAttribute.WATCHERS_ADD.getTaskKey()); if (newWatcherAttribute != null && !newWatcherAttribute.getMetaData().isReadOnly()) { issue.setWatchersAddAllowed(true); for (final String newWatcher : newWatcherAttribute.getValues()) { watchers.add(newWatcher); } } final TaskAttribute oldWatcherAttribute = watchersAttribute.getAttribute(RedmineAttribute.WATCHERS_REMOVE.getTaskKey()); if (oldWatcherAttribute != null && !oldWatcherAttribute.getMetaData().isReadOnly()) { issue.setWatchersDeleteAllowed(true); for (final String oldWatcher : oldWatcherAttribute.getValues()) { watchers.remove(oldWatcher); } } if (watchers.size() > 0) { final int[] watcherIds = new int[watchers.size()]; int lv = 0; for (final String idVal : watchers) { watcherIds[lv++] = Integer.parseInt(idVal); } issue.setWatcherIds(watcherIds); } } /* Custom Attributes */ final int[] customFieldIds = cfg.getProjects() .getById(issue.getProjectId()) .getCustomFieldIdsByTrackerId(issue.getTrackerId()); if (customFieldIds != null && customFieldIds.length > 0) { final CustomValues customValues = new CustomValues(); issue.setCustomValues(customValues); for (final int customFieldId : customFieldIds) { taskAttribute = root.getAttribute(IRedmineConstants.TASK_KEY_PREFIX_ISSUE_CF + customFieldId); if (taskAttribute != null) { if (TaskAttribute.TYPE_MULTI_SELECT.equals(taskAttribute.getMetaData().getType())) { final List<String> values = taskAttribute.getValues(); for (final String value : values) { customValues.setCustomValue( customFieldId, formatCustomValue(value, customFieldId, cfg)); } } else { customValues.setCustomValue( customFieldId, formatCustomValue(taskAttribute.getValue(), customFieldId, cfg)); } } } } /* Operations */ taskAttribute = root.getMappedAttribute(TaskAttribute.OPERATION); if (taskAttribute != null) { final RedmineOperation redmineOperation = RedmineOperation.fromTaskKey(taskAttribute.getValue()); taskAttribute = root.getAttribute(TaskAttribute.PREFIX_OPERATION + taskAttribute.getValue()); if (redmineOperation != null && taskAttribute != null) { String value = null; if (redmineOperation.isAssociated()) { taskAttribute = root.getAttribute(redmineOperation.getInputId()); if (taskAttribute != null) { value = taskAttribute.getValue(); } } else if (redmineOperation.needsRestoreValue()) { value = taskAttribute .getMetaData() .getValue(IRedmineConstants.TASK_ATTRIBUTE_OPERATION_RESTORE); } if (value != null) { final RedmineAttribute redmineAttribute = RedmineAttribute.fromTaskKey(redmineOperation.getInputId()); setProperty(redmineAttribute, root, issue); } } } setTaskKind(issue, cfg, root); return issue; }
public List<Repository> getMappedRepositories( Repository repository, ResourceStoreRequest request, List<Repository> resolvedRepositories) throws NoSuchRepositoryException { if (!compiled) { compile(); } // NEXUS-2852: to make our life easier, we will work with repository IDs, // and will fill the result with Repositories at the end LinkedHashSet<String> reposIdSet = new LinkedHashSet<String>(resolvedRepositories.size()); for (Repository resolvedRepositorty : resolvedRepositories) { reposIdSet.add(resolvedRepositorty.getId()); } // for tracking what is applied ArrayList<RepositoryPathMapping> appliedMappings = new ArrayList<RepositoryPathMapping>(); // if include found, add it to the list. boolean firstAdd = true; for (RepositoryPathMapping mapping : blockings) { if (mapping.matches(repository, request)) { if (getLogger().isDebugEnabled()) { getLogger() .debug( "The request path [" + request.toString() + "] is blocked by rule " + mapping.toString()); } return Collections.emptyList(); } } // include, if found a match // NEXUS-2852: watch to not add multiple times same repository // ie. you have different inclusive rules that are triggered by same request // and contains some repositories. This is now solved using LinkedHashSet and using repo IDs. for (RepositoryPathMapping mapping : inclusions) { if (mapping.matches(repository, request)) { appliedMappings.add(mapping); if (firstAdd) { reposIdSet.clear(); firstAdd = false; } // add only those that are in initial resolvedRepositories list and that are non-user // managed // (preserve ordering) if (mapping.getMappedRepositories().size() == 1 && "*".equals(mapping.getMappedRepositories().get(0))) { for (Repository repo : resolvedRepositories) { reposIdSet.add(repo.getId()); } } else { for (Repository repo : resolvedRepositories) { if (mapping.getMappedRepositories().contains(repo.getId()) || !repo.isUserManaged()) { reposIdSet.add(repo.getId()); } } } } } // then, if exlude found, remove those for (RepositoryPathMapping mapping : exclusions) { if (mapping.matches(repository, request)) { appliedMappings.add(mapping); if (mapping.getMappedRepositories().size() == 1 && "*".equals(mapping.getMappedRepositories().get(0))) { reposIdSet.clear(); break; } for (String repositoryId : mapping.getMappedRepositories()) { Repository mappedRepository = repositoryRegistry.getRepository(repositoryId); // but only if is user managed if (mappedRepository.isUserManaged()) { reposIdSet.remove(mappedRepository.getId()); } } } } // store the applied mappings to request context ArrayList<String> appliedMappingsList = new ArrayList<String>(appliedMappings.size()); for (RepositoryPathMapping mapping : appliedMappings) { appliedMappingsList.add(mapping.toString()); } request.addAppliedMappingsList(repository, appliedMappingsList); // log it if needed if (getLogger().isDebugEnabled()) { if (appliedMappings.isEmpty()) { getLogger().debug("No mapping exists for request path [" + request.toString() + "]"); } else { StringBuilder sb = new StringBuilder( "Request for path \"" + request.toString() + "\" with the initial list of processable repositories of \"" + ResourceStoreUtils.getResourceStoreListAsString(resolvedRepositories) + "\" got these mappings applied:\n"); for (RepositoryPathMapping mapping : appliedMappings) { sb.append(" * ").append(mapping.toString()).append("\n"); } getLogger().debug(sb.toString()); if (reposIdSet.size() == 0) { getLogger() .debug( "Mapping for path [" + request.toString() + "] excluded all storages from servicing the request."); } else { getLogger() .debug( "Request path for [" + request.toString() + "] is MAPPED to reposes: " + reposIdSet); } } } ArrayList<Repository> result = new ArrayList<Repository>(reposIdSet.size()); try { for (String repoId : reposIdSet) { result.add(repositoryRegistry.getRepository(repoId)); } } catch (NoSuchRepositoryException e) { getLogger() .error( "Some of the Routes contains references to non-existant repositories! Please check the following mappings: \"" + appliedMappingsList.toString() + "\"."); throw e; } return result; }
public void removeChunk(int x, int z) { activeChunks.remove(convertToKey(x, z)); }