public void delete(HistoryFileInfo fileInfo) { if (LogGlobal.isDebugEnabled()) { /* LOG.debug("Removing from cache "+fileInfo) */ LOG.removing_from_cache(fileInfo.toString()).debug(); } cache.remove(fileInfo.getJobId()); }
public HistoryFileInfo addIfAbsent(HistoryFileInfo fileInfo) { JobId jobId = fileInfo.getJobId(); if (LOG.isDebugEnabled()) { LOG.debug("Adding " + jobId + " to job list cache with " + fileInfo.getJobIndexInfo()); } HistoryFileInfo old = cache.putIfAbsent(jobId, fileInfo); if (cache.size() > maxSize) { // There is a race here, where more then one thread could be trying to // remove entries. This could result in too many entries being removed // from the cache. This is considered OK as the size of the cache // should be rather large, and we would rather have performance over // keeping the cache size exactly at the maximum. Iterator<JobId> keys = cache.navigableKeySet().iterator(); long cutoff = System.currentTimeMillis() - maxAge; while (cache.size() > maxSize && keys.hasNext()) { JobId key = keys.next(); HistoryFileInfo firstValue = cache.get(key); if (firstValue != null) { synchronized (firstValue) { if (firstValue.isMovePending()) { if (firstValue.didMoveFail() && firstValue.jobIndexInfo.getFinishTime() <= cutoff) { cache.remove(key); // Now lets try to delete it try { firstValue.delete(); } catch (IOException e) { LOG.error( "Error while trying to delete history files" + " that could not be moved to done.", e); } } else { LOG.warn( "Waiting to remove " + key + " from JobListCache because it is not in done yet."); } } else { cache.remove(key); } } } } } return old; }
public HistoryFileInfo get(JobId jobId) { return cache.get(jobId); }
public Collection<HistoryFileInfo> values() { return new ArrayList<HistoryFileInfo>(cache.values()); }
public void delete(HistoryFileInfo fileInfo) { if (LOG.isDebugEnabled()) { LOG.debug("Removing from cache " + fileInfo); } cache.remove(fileInfo.getJobId()); }