public ResolvedValueProducer declareTaskProducing( final ValueSpecification valueSpecification, final ResolveTask task, final ResolvedValueProducer producer) { do { final MapEx<ResolveTask, ResolvedValueProducer> tasks = getBuilder().getOrCreateTasks(valueSpecification); ResolvedValueProducer result = null; if (tasks != null) { ResolvedValueProducer discard = null; synchronized (tasks) { if (!tasks.isEmpty()) { if (tasks.containsKey(null)) { continue; } final Map.Entry<ResolveTask, ResolvedValueProducer> resolveTask = tasks.getHashEntry(task); if (resolveTask != null) { if (resolveTask.getKey() == task) { // Replace an earlier attempt from this task with the new producer discard = resolveTask.getValue(); producer.addRef(); // The caller already holds an open reference resolveTask.setValue(producer); result = producer; } else { // An equivalent task is doing the work result = resolveTask.getValue(); } result .addRef(); // Either the caller holds an open reference on the producer, or we've // got the task lock } } if (result == null) { final ResolvedValue value = getBuilder().getResolvedValue(valueSpecification); if (value != null) { result = new SingleResolvedValueProducer(task.getValueRequirement(), value); } } if (result == null) { // No matching tasks producer.addRef(); // Caller already holds open reference tasks.put(task, producer); result = producer; result.addRef(); // Caller already holds open reference (this is the producer) } } if (discard != null) { discard.release(this); } } else { final ResolvedValue value = getBuilder().getResolvedValue(valueSpecification); if (value != null) { result = new SingleResolvedValueProducer(task.getValueRequirement(), value); } } return result; } while (true); }
@SuppressWarnings("rawtypes") public void checkMapResolve(Map object, Object fieldName) { if (resolveStatus == NeedToResolve) { ResolveFieldDeserializer fieldResolver = new ResolveFieldDeserializer(object, fieldName); ResolveTask task = getLastResolveTask(); task.fieldDeserializer = fieldResolver; task.ownerContext = context; setResolveStatus(DefaultJSONParser.NONE); } }
@SuppressWarnings("rawtypes") public void checkMapResolve(Map object, String fieldName) { if (resolveStatus == NeedToResolve) { MapResolveFieldDeserializer fieldResolver = new MapResolveFieldDeserializer(object, fieldName); ResolveTask task = getLastResolveTask(); task.setFieldDeserializer(fieldResolver); task.setOwnerContext(context); setResolveStatus(DefaultJSONParser.NONE); } }
@SuppressWarnings("rawtypes") public void checkListResolve(Collection array) { if (resolveStatus == NeedToResolve) { final int index = array.size() - 1; final List list = (List) array; ResolveTask task = getLastResolveTask(); task.setFieldDeserializer(new ListResolveFieldDeserializer(this, list, index)); task.setOwnerContext(context); setResolveStatus(DefaultJSONParser.NONE); } }
/** * Schedule the task for execution. * * @param runnable task to execute, not null */ public void run(final ResolveTask runnable) { // Run inline unless the stack is full, or the task attempts to defer execution. Only run if the // task hasn't been discarded (ie // no-one is going to consume its results) if (runnable.addRef()) { // Added a reference for the run-queue (which will be removed by tryRun) if ((++_stackDepth > MAX_CALLBACK_DEPTH) || !runnable.tryRun(this)) { submit(runnable); } _stackDepth--; } }
public ResolveTask getOrCreateTaskResolving( final ValueRequirement valueRequirement, final ResolveTask parentTask, final Collection<FunctionExclusionGroup> functionExclusion) { final ResolveTask newTask = new ResolveTask(valueRequirement, parentTask, functionExclusion); do { ResolveTask task; final Map<ResolveTask, ResolveTask> tasks = getBuilder().getOrCreateTasks(valueRequirement); synchronized (tasks) { if (tasks.containsKey(null)) { // The cache has been flushed continue; } task = tasks.get(newTask); if (task == null) { newTask.addRef(); // Already got a reference, increment for the collection tasks.put(newTask, newTask); } else { task.addRef(); // Got the task lock, increment so we can return it } } if (task != null) { s_logger.debug("Using existing task {}", task); newTask.release(this); // Discard local allocation return task; } else { getBuilder().incrementActiveResolveTasks(); // Don't call run; we want to fork this out to a new worker thread, never call inline newTask.addRef(); // Reference held by the run queue submit(newTask); return newTask; } } while (true); }
private ResolveTask[] getTasksResolving(final ValueRequirement valueRequirement) { do { final ResolveTask[] result; final Map<ResolveTask, ResolveTask> tasks = getBuilder().getTasks(valueRequirement); if (tasks == null) { return null; } synchronized (tasks) { if (tasks.containsKey(null)) { // The cache has been flushed continue; } result = new ResolveTask[tasks.size()]; int i = 0; for (final ResolveTask task : tasks.keySet()) { result[i++] = task; task.addRef(); // Got the task lock } } return result; } while (true); }
public void discardTask(final ResolveTask task) { // TODO: Could we post "discardTask" tasks to a queue and have them done in batches by a // ContextRunnable? do { final Map<ResolveTask, ResolveTask> tasks = getBuilder().getTasks(task.getValueRequirement()); if (tasks == null) { return; } synchronized (tasks) { if (tasks.containsKey(null)) { continue; } final int rc = task.getRefCount(); if (rc == 0) { // Not referenced by us by definition return; } if (rc != 1) { if (!task.isFinished()) { // Can't discard this -- something might be waiting on a result from it?? return; } } final ResolveTask removed = tasks.remove(task); if (removed == null) { // Task has already been discarded return; } if (removed != task) { // Task has already been discarded and replaced by an equivalent; don't discard that tasks.put(removed, removed); return; } } task.release(this); getBuilder().decrementActiveResolveTasks(); } while (true); }
@SuppressWarnings("unchecked") protected void abortLoops() { s_logger.debug("Checking for tasks to abort"); s_abortLoops.begin(); try { final Collection<ResolveTask> toCheck = new ArrayList<ResolveTask>(); for (final MapEx<ResolveTask, ResolvedValueProducer> tasks : _specifications.values()) { synchronized (tasks) { if (!tasks.containsKey(null)) { toCheck.addAll((Collection<ResolveTask>) tasks.keySet()); } } } for (final Map<ResolveTask, ResolveTask> tasks : _requirements.values()) { synchronized (tasks) { if (!tasks.containsKey(null)) { toCheck.addAll(tasks.keySet()); } } } final GraphBuildingContext context = new GraphBuildingContext(this); int cancelled = 0; final Map<Chain, Chain.LoopState> checked = Maps.newHashMapWithExpectedSize(toCheck.size()); for (ResolveTask task : toCheck) { cancelled += task.cancelLoopMembers(context, checked); } getContext().mergeThreadContext(context); if (s_logger.isInfoEnabled()) { if (cancelled > 0) { s_logger.info("Cancelled {} looped task(s)", cancelled); } else { s_logger.info("No looped tasks to cancel"); } } } finally { s_abortLoops.end(); } }
@SuppressWarnings("rawtypes") public void checkListResolve(Collection array) { if (resolveStatus == NeedToResolve) { if (array instanceof List) { final int index = array.size() - 1; final List list = (List) array; ResolveTask task = getLastResolveTask(); task.fieldDeserializer = new ResolveFieldDeserializer(this, list, index); task.ownerContext = context; setResolveStatus(DefaultJSONParser.NONE); } else { ResolveTask task = getLastResolveTask(); task.fieldDeserializer = new ResolveFieldDeserializer(array); task.ownerContext = context; setResolveStatus(DefaultJSONParser.NONE); } } }
public ResolvedValueProducer resolveRequirement( final ValueRequirement rawRequirement, final ResolveTask dependent, final Collection<FunctionExclusionGroup> functionExclusion) { final ValueRequirement requirement = simplifyType(rawRequirement); s_logger.debug("Resolve requirement {}", requirement); if ((dependent != null) && dependent.hasParent(requirement)) { s_logger.debug("Can't introduce a ValueRequirement loop"); return null; } RequirementResolver resolver = null; final ResolveTask[] tasks = getTasksResolving(requirement); if (tasks != null) { int i = 0; int l = tasks.length; while (i < l) { final ResolveTask task = tasks[i]; if ((dependent == null) || !dependent.hasParent(task)) { if ((task.isFinished() && !task.wasRecursionDetected()) || (ObjectUtils.equals(functionExclusion, task.getFunctionExclusion()) && task.hasParentValueRequirements(dependent))) { // The task we've found has either already completed, without hitting a recursion // constraint. Or // the task is identical to the fallback task we'd create naturally. In either case, // release everything // else and use it. for (int j = 0; j < i; j++) { tasks[j].release(this); } for (int j = i + 1; j < l; j++) { tasks[j].release(this); } return task; } i++; } else { task.release(this); tasks[i] = tasks[--l]; } } // Anything left in the array is suitable for use in a RequirementResolver if (l > 0) { resolver = new RequirementResolver(requirement, dependent, functionExclusion); if (l != tasks.length) { resolver.setTasks(this, Arrays.copyOf(tasks, l)); } else { resolver.setTasks(this, tasks); } for (i = 0; i < l; i++) { tasks[i].release(this); } } } if (resolver != null) { resolver.start(this); return resolver; } else { s_logger.debug("Using direct resolution {}/{}", requirement, dependent); return getOrCreateTaskResolving(requirement, dependent, functionExclusion); } }
/** * Flushes data that is unlikely to be needed again from the resolution caches. Anything discarded * will either never be needed again for any pending resolutions, or is a cached production that * can be recalculated if necessary. Discards can be a multiple stage process - repeated calls all * the while this function returns true must be used to flush all possible state and make as much * memory available as possible for the garbage collector. * * @return true if one or more states were discarded, false if there was nothing that can be * discarded */ @SuppressWarnings("unchecked") protected boolean flushCachedStates() { // TODO: use heuristics to throw data away more sensibly (e.g. LRU) int removed = 0; final List<ResolvedValueProducer> discards = new ArrayList<ResolvedValueProducer>(); GraphBuildingContext context = null; final Iterator<MapEx<ResolveTask, ResolvedValueProducer>> itrSpecifications = _specifications.values().iterator(); while (itrSpecifications.hasNext()) { final MapEx<ResolveTask, ResolvedValueProducer> producers = itrSpecifications.next(); synchronized (producers) { if (producers.containsKey(null)) { continue; } final Iterator<ResolvedValueProducer> itrProducer = producers.values().iterator(); while (itrProducer.hasNext()) { final ResolvedValueProducer producer = itrProducer.next(); if (producer.getRefCount() == 1) { discards.add(producer); itrProducer.remove(); removed++; } } if (producers.isEmpty()) { itrSpecifications.remove(); producers.put(null, null); } } if (!discards.isEmpty()) { if (context == null) { context = new GraphBuildingContext(this); } for (final ResolvedValueProducer discard : discards) { discard.release(context); } discards.clear(); } } // Unfinished resolveTasks will be removed from the _requirements cache when their refCount hits // 1 (the cache only). Finished // ones are kept, but should be released when we are low on memory. final Iterator<Map<ResolveTask, ResolveTask>> itrRequirements = _requirements.values().iterator(); while (itrRequirements.hasNext()) { final Map<ResolveTask, ResolveTask> tasks = itrRequirements.next(); synchronized (tasks) { if (tasks.containsKey(null)) { continue; } final Iterator<ResolveTask> itrTask = tasks.keySet().iterator(); while (itrTask.hasNext()) { final ResolveTask task = itrTask.next(); if (task.getRefCount() == 1) { discards.add(task); itrTask.remove(); removed++; } } if (tasks.isEmpty()) { itrRequirements.remove(); tasks.put(null, null); } } if (!discards.isEmpty()) { if (context == null) { context = new GraphBuildingContext(this); } for (final ResolvedValueProducer discard : discards) { discard.release(context); } discards.clear(); } } if (context != null) { getContext().mergeThreadContext(context); } if (s_logger.isInfoEnabled()) { if (removed > 0) { s_logger.info("Discarded {} production task(s)", removed); } else { s_logger.info("No production tasks to discard"); } } return removed > 0; }