/** Enqueue the specified job */ public void addJob(Job job) { if (job == null || !_alive) return; // This does nothing // if (job instanceof JobImpl) // ((JobImpl)job).addedToQueue(); long numReady = 0; boolean alreadyExists = false; boolean dropped = false; // getNext() is now outside the jobLock, is that ok? synchronized (_jobLock) { if (_readyJobs.contains(job)) alreadyExists = true; numReady = _readyJobs.size(); if (!alreadyExists) { // if (_timedJobs.contains(job)) // alreadyExists = true; // Always remove and re-add, since it needs to be // re-sorted in the TreeSet. boolean removed = _timedJobs.remove(job); if (removed && _log.shouldLog(Log.WARN)) _log.warn("Rescheduling job: " + job); } if ((!alreadyExists) && shouldDrop(job, numReady)) { job.dropped(); dropped = true; } else { if (!alreadyExists) { if (job.getTiming().getStartAfter() <= _context.clock().now()) { // don't skew us - its 'start after' its been queued, or later job.getTiming().setStartAfter(_context.clock().now()); if (job instanceof JobImpl) ((JobImpl) job).madeReady(); _readyJobs.offer(job); } else { _timedJobs.add(job); // only notify for _timedJobs, as _readyJobs does not use that lock // only notify if sooner, to reduce contention if (job.getTiming().getStartAfter() < _nextPumperRun) _jobLock.notifyAll(); } } } } _context.statManager().addRateData("jobQueue.readyJobs", numReady, 0); if (dropped) { _context.statManager().addRateData("jobQueue.droppedJobs", 1, 0); _log.logAlways( Log.WARN, "Dropping job due to overload! # ready jobs: " + numReady + ": job = " + job); } }
public void run() { try { while (_alive) { long now = _context.clock().now(); long timeToWait = -1; try { synchronized (_jobLock) { Job lastJob = null; long lastTime = Long.MIN_VALUE; for (Iterator<Job> iter = _timedJobs.iterator(); iter.hasNext(); ) { Job j = iter.next(); // find jobs due to start before now long timeLeft = j.getTiming().getStartAfter() - now; if (lastJob != null && lastTime > j.getTiming().getStartAfter()) { _log.error( "Job " + lastJob + " out of order with job " + j + " difference of " + DataHelper.formatDuration(lastTime - j.getTiming().getStartAfter())); } lastJob = j; lastTime = lastJob.getTiming().getStartAfter(); if (timeLeft <= 0) { if (j instanceof JobImpl) ((JobImpl) j).madeReady(); _readyJobs.offer(j); iter.remove(); } else { // if ( (timeToWait <= 0) || (timeLeft < timeToWait) ) // _timedJobs is now a TreeSet, so once we hit one that is // not ready yet, we can break // NOTE: By not going through the whole thing, a single job changing // setStartAfter() to some far-away time, without // calling addJob(), could clog the whole queue forever. // Hopefully nobody does that, and as a backup, we hope // that the TreeSet will eventually resort it from other addJob() calls. timeToWait = timeLeft; // failsafe - remove and re-add, peek at the next job, // break and go around again if (timeToWait > 10 * 1000 && iter.hasNext()) { if (_log.shouldLog(Log.INFO)) _log.info( "Failsafe re-sort job " + j + " with delay " + DataHelper.formatDuration(timeToWait)); iter.remove(); Job nextJob = iter.next(); _timedJobs.add(j); long nextTimeLeft = nextJob.getTiming().getStartAfter() - now; if (timeToWait > nextTimeLeft) { _log.error( "Job " + j + " out of order with job " + nextJob + " difference of " + DataHelper.formatDuration(timeToWait - nextTimeLeft)); timeToWait = Math.max(10, nextTimeLeft); } } break; } } if (timeToWait < 0) timeToWait = 1000; else if (timeToWait < 10) timeToWait = 10; else if (timeToWait > 10 * 1000) timeToWait = 10 * 1000; // if (_log.shouldLog(Log.DEBUG)) // _log.debug("Waiting " + timeToWait + " before rechecking the timed queue"); _nextPumperRun = _context.clock().now() + timeToWait; _jobLock.wait(timeToWait); } // synchronize (_jobLock) } catch (InterruptedException ie) { } } // while (_alive) } catch (Throwable t) { _context.clock().removeUpdateListener(this); if (_log.shouldLog(Log.ERROR)) _log.error("wtf, pumper killed", t); } }