public void discardTask(final ResolveTask task) { // TODO: Could we post "discardTask" tasks to a queue and have them done in batches by a // ContextRunnable? do { final Map<ResolveTask, ResolveTask> tasks = getBuilder().getTasks(task.getValueRequirement()); if (tasks == null) { return; } synchronized (tasks) { if (tasks.containsKey(null)) { continue; } final int rc = task.getRefCount(); if (rc == 0) { // Not referenced by us by definition return; } if (rc != 1) { if (!task.isFinished()) { // Can't discard this -- something might be waiting on a result from it?? return; } } final ResolveTask removed = tasks.remove(task); if (removed == null) { // Task has already been discarded return; } if (removed != task) { // Task has already been discarded and replaced by an equivalent; don't discard that tasks.put(removed, removed); return; } } task.release(this); getBuilder().decrementActiveResolveTasks(); } while (true); }
/** * Flushes data that is unlikely to be needed again from the resolution caches. Anything discarded * will either never be needed again for any pending resolutions, or is a cached production that * can be recalculated if necessary. Discards can be a multiple stage process - repeated calls all * the while this function returns true must be used to flush all possible state and make as much * memory available as possible for the garbage collector. * * @return true if one or more states were discarded, false if there was nothing that can be * discarded */ @SuppressWarnings("unchecked") protected boolean flushCachedStates() { // TODO: use heuristics to throw data away more sensibly (e.g. LRU) int removed = 0; final List<ResolvedValueProducer> discards = new ArrayList<ResolvedValueProducer>(); GraphBuildingContext context = null; final Iterator<MapEx<ResolveTask, ResolvedValueProducer>> itrSpecifications = _specifications.values().iterator(); while (itrSpecifications.hasNext()) { final MapEx<ResolveTask, ResolvedValueProducer> producers = itrSpecifications.next(); synchronized (producers) { if (producers.containsKey(null)) { continue; } final Iterator<ResolvedValueProducer> itrProducer = producers.values().iterator(); while (itrProducer.hasNext()) { final ResolvedValueProducer producer = itrProducer.next(); if (producer.getRefCount() == 1) { discards.add(producer); itrProducer.remove(); removed++; } } if (producers.isEmpty()) { itrSpecifications.remove(); producers.put(null, null); } } if (!discards.isEmpty()) { if (context == null) { context = new GraphBuildingContext(this); } for (final ResolvedValueProducer discard : discards) { discard.release(context); } discards.clear(); } } // Unfinished resolveTasks will be removed from the _requirements cache when their refCount hits // 1 (the cache only). Finished // ones are kept, but should be released when we are low on memory. final Iterator<Map<ResolveTask, ResolveTask>> itrRequirements = _requirements.values().iterator(); while (itrRequirements.hasNext()) { final Map<ResolveTask, ResolveTask> tasks = itrRequirements.next(); synchronized (tasks) { if (tasks.containsKey(null)) { continue; } final Iterator<ResolveTask> itrTask = tasks.keySet().iterator(); while (itrTask.hasNext()) { final ResolveTask task = itrTask.next(); if (task.getRefCount() == 1) { discards.add(task); itrTask.remove(); removed++; } } if (tasks.isEmpty()) { itrRequirements.remove(); tasks.put(null, null); } } if (!discards.isEmpty()) { if (context == null) { context = new GraphBuildingContext(this); } for (final ResolvedValueProducer discard : discards) { discard.release(context); } discards.clear(); } } if (context != null) { getContext().mergeThreadContext(context); } if (s_logger.isInfoEnabled()) { if (removed > 0) { s_logger.info("Discarded {} production task(s)", removed); } else { s_logger.info("No production tasks to discard"); } } return removed > 0; }