public void cleanupCompletedTransactions() { if (!completedTransactions.isEmpty()) { try { log.tracef( "About to cleanup completed transaction. Initial size is %d", completedTransactions.size()); // this iterator is weekly consistent and will never throw ConcurrentModificationException Iterator<Map.Entry<GlobalTransaction, Long>> iterator = completedTransactions.entrySet().iterator(); long timeout = configuration.transaction().completedTxTimeout(); int removedEntries = 0; long beginning = timeService.time(); while (iterator.hasNext()) { Map.Entry<GlobalTransaction, Long> e = iterator.next(); long ageMillis = timeService.timeDuration(e.getValue(), TimeUnit.MILLISECONDS); if (ageMillis >= timeout) { iterator.remove(); removedEntries++; } } long duration = timeService.timeDuration(beginning, TimeUnit.MILLISECONDS); log.tracef( "Finished cleaning up completed transactions. %d transactions were removed, total duration was %d millis, " + "current number of completed transactions is %d", removedEntries, duration, completedTransactions.size()); } catch (Exception e) { log.errorf(e, "Failed to cleanup completed transactions: %s", e.getMessage()); } } }
public void cleanupTimedOutTransactions() { if (trace) log.tracef( "About to cleanup remote transactions older than %d ms", configuration.transaction().completedTxTimeout()); long beginning = timeService.time(); long cutoffCreationTime = beginning - TimeUnit.MILLISECONDS.toNanos(configuration.transaction().completedTxTimeout()); List<GlobalTransaction> toKill = new ArrayList<>(); // Check remote transactions. for (Map.Entry<GlobalTransaction, RemoteTransaction> e : remoteTransactions.entrySet()) { GlobalTransaction gtx = e.getKey(); RemoteTransaction remoteTx = e.getValue(); if (remoteTx != null) { if (trace) log.tracef("Checking transaction %s", gtx); // Check the time. if (remoteTx.getCreationTime() - cutoffCreationTime < 0) { long duration = timeService.timeDuration( remoteTx.getCreationTime(), beginning, TimeUnit.MILLISECONDS); log.remoteTransactionTimeout(gtx, duration); toKill.add(gtx); } } } // Rollback the orphaned transactions and release any held locks. for (GlobalTransaction gtx : toKill) { killTransaction(gtx); } }
@Override public CompletableFuture<Void> visitGetAllCommand(InvocationContext ctx, GetAllCommand command) throws Throwable { if (trace) { log.tracef( "Visit Get All Command %s. Is it in transaction scope? %s. Is it local? %s", command, ctx.isInTxScope(), ctx.isOriginLocal()); } if (!ctx.isInTxScope()) { return ctx.continueInvocation(); } long start = timeService.time(); return ctx.onReturn( (rCtx, rCommand, rv, throwable) -> { if (throwable != null) { throw throwable; } long end = timeService.time(); initStatsIfNecessary(rCtx); int numRemote = 0; Collection<?> keys = ((GetAllCommand) rCommand).getKeys(); for (Object key : keys) { if (isRemote(key)) numRemote++; } // TODO: tbh this seems like it doesn't work properly for statistics as each // one will have the duration of all the time for all gets... Maybe do an average // instead ? Either way this isn't very indicative if (numRemote > 0) { cacheStatisticManager.add( NUM_REMOTE_GET, numRemote, getGlobalTransaction(rCtx), rCtx.isOriginLocal()); cacheStatisticManager.add( REMOTE_GET_EXECUTION, timeService.timeDuration(start, end, NANOSECONDS), getGlobalTransaction(rCtx), rCtx.isOriginLocal()); } cacheStatisticManager.add( ALL_GET_EXECUTION, timeService.timeDuration(start, end, NANOSECONDS), getGlobalTransaction(rCtx), rCtx.isOriginLocal()); cacheStatisticManager.add( NUM_GET, keys.size(), getGlobalTransaction(rCtx), rCtx.isOriginLocal()); return null; }); }
@Override public <KOut, VOut> Map<KOut, VOut> reduce(ReduceCommand<KOut, VOut> reduceCommand) throws InterruptedException { Cache<?, ?> cache = cacheManager.getCache(reduceCommand.getCacheName()); Set<KOut> keys = reduceCommand.getKeys(); String taskId = reduceCommand.getTaskId(); Reducer<KOut, VOut> reducer = reduceCommand.getReducer(); boolean useIntermediateKeys = reduceCommand.isEmitCompositeIntermediateKeys(); boolean noInputKeys = keys == null || keys.isEmpty(); Cache<Object, List<VOut>> tmpCache = cacheManager.getCache(reduceCommand.getCacheName()); Map<KOut, VOut> result = new HashMap<KOut, VOut>(); if (noInputKeys) { // illegal state, raise exception throw new IllegalStateException( "Reduce phase of MapReduceTask " + taskId + " on node " + cdl.getAddress() + " executed with empty input keys"); } else { // first hook into lifecycle MapReduceTaskLifecycleService taskLifecycleService = MapReduceTaskLifecycleService.getInstance(); log.tracef("For m/r task %s invoking %s at %s", taskId, reduceCommand, cdl.getAddress()); int interruptCount = 0; long start = log.isTraceEnabled() ? timeService.time() : 0; try { taskLifecycleService.onPreExecute(reducer, cache); for (KOut key : keys) { interruptCount++; if (checkInterrupt(interruptCount++) && Thread.currentThread().isInterrupted()) throw new InterruptedException(); // load result value from map phase List<VOut> value; if (useIntermediateKeys) { value = tmpCache.get(new IntermediateCompositeKey<KOut>(taskId, key)); } else { value = tmpCache.get(key); } // and reduce it VOut reduced = reducer.reduce(key, value.iterator()); result.put(key, reduced); log.tracef( "For m/r task %s reduced %s to %s at %s ", taskId, key, reduced, cdl.getAddress()); } } finally { if (log.isTraceEnabled()) { log.tracef( "Reduce for task %s took %s milliseconds", reduceCommand.getTaskId(), timeService.timeDuration(start, TimeUnit.MILLISECONDS)); } taskLifecycleService.onPostExecute(reducer); } } return result; }
@Override public CompletableFuture<Void> visitGetKeyValueCommand( InvocationContext ctx, GetKeyValueCommand command) throws Throwable { if (trace) { log.tracef( "Visit Get Key Value command %s. Is it in transaction scope? %s. Is it local? %s", command, ctx.isInTxScope(), ctx.isOriginLocal()); } if (!ctx.isInTxScope()) { return ctx.continueInvocation(); } long start = timeService.time(); return ctx.onReturn( (rCtx, rCommand, rv, throwable) -> { if (throwable != null) { throw throwable; } long end = timeService.time(); initStatsIfNecessary(rCtx); Object key = ((GetKeyValueCommand) rCommand).getKey(); if (isRemote(key)) { cacheStatisticManager.increment( NUM_REMOTE_GET, getGlobalTransaction(rCtx), rCtx.isOriginLocal()); cacheStatisticManager.add( REMOTE_GET_EXECUTION, timeService.timeDuration(start, end, NANOSECONDS), getGlobalTransaction(rCtx), rCtx.isOriginLocal()); } cacheStatisticManager.add( ALL_GET_EXECUTION, timeService.timeDuration(start, end, NANOSECONDS), getGlobalTransaction(rCtx), rCtx.isOriginLocal()); cacheStatisticManager.increment( NUM_GET, getGlobalTransaction(rCtx), rCtx.isOriginLocal()); return null; }); }
@Override @ManagedAttribute( description = "Number of seconds since the cluster-wide cache statistics were last reset", displayName = "Seconds since cluster-wide cache statistics were reset", units = Units.SECONDS, displayType = DisplayType.SUMMARY) public long getTimeSinceReset() { long result = -1; if (isStatisticsEnabled()) { result = ts.timeDuration(resetNanoseconds.get(), TimeUnit.SECONDS); } return result; }
private void updateTime( ExtendedStatistic duration, ExtendedStatistic counter, long initTime, long endTime, GlobalTransaction globalTransaction, boolean local) { cacheStatisticManager.add( duration, timeService.timeDuration(initTime, endTime, NANOSECONDS), globalTransaction, local); cacheStatisticManager.increment(counter, globalTransaction, local); }
private <KIn, VIn, KOut, VOut> Map<KOut, List<VOut>> combineForLocalReduction( MapCombineCommand<KIn, VIn, KOut, VOut> mcc, CollectableCollector<KOut, VOut> collector) { String taskId = mcc.getTaskId(); Reducer<KOut, VOut> combiner = mcc.getCombiner(); Map<KOut, List<VOut>> result = null; if (combiner != null) { result = new HashMap<KOut, List<VOut>>(); log.tracef("For m/r task %s invoking combiner %s at %s", taskId, mcc, cdl.getAddress()); MapReduceTaskLifecycleService taskLifecycleService = MapReduceTaskLifecycleService.getInstance(); long start = log.isTraceEnabled() ? timeService.time() : 0; try { Cache<?, ?> cache = cacheManager.getCache(mcc.getCacheName()); taskLifecycleService.onPreExecute(combiner, cache); Map<KOut, List<VOut>> collectedValues = collector.collectedValues(); for (Entry<KOut, List<VOut>> e : collectedValues.entrySet()) { VOut combined; List<VOut> list = e.getValue(); List<VOut> l = new LinkedList<VOut>(); if (list.size() > 1) { combined = combiner.reduce(e.getKey(), list.iterator()); } else { combined = list.get(0); } l.add(combined); result.put(e.getKey(), l); log.tracef( "For m/r task %s combined %s to %s at %s", taskId, e.getKey(), combined, cdl.getAddress()); } } finally { if (log.isTraceEnabled()) { log.tracef( "Combine for task %s took %s milliseconds", mcc.getTaskId(), timeService.timeDuration(start, TimeUnit.MILLISECONDS)); } taskLifecycleService.onPostExecute(combiner); } } else { // Combiner not specified result = collector.collectedValues(); } return result; }
private CompletableFuture<Void> visitWriteCommand( InvocationContext ctx, WriteCommand command, Object key) throws Throwable { if (trace) { log.tracef( "Visit write command %s. Is it in transaction scope? %s. Is it local? %s", command, ctx.isInTxScope(), ctx.isOriginLocal()); } if (!ctx.isInTxScope()) { return ctx.continueInvocation(); } long start = timeService.time(); return ctx.onReturn( (rCtx, rCommand, rv, throwable) -> { long end = timeService.time(); initStatsIfNecessary(rCtx); if (throwable != null) { processWriteException(rCtx, getGlobalTransaction(rCtx), throwable); } else { if (isRemote(key)) { cacheStatisticManager.add( REMOTE_PUT_EXECUTION, timeService.timeDuration(start, end, NANOSECONDS), getGlobalTransaction(rCtx), rCtx.isOriginLocal()); cacheStatisticManager.increment( NUM_REMOTE_PUT, getGlobalTransaction(rCtx), rCtx.isOriginLocal()); } } cacheStatisticManager.increment( NUM_PUT, getGlobalTransaction(rCtx), rCtx.isOriginLocal()); cacheStatisticManager.markAsWriteTransaction( getGlobalTransaction(rCtx), rCtx.isOriginLocal()); return null; }); }
private boolean launchNewDistTask() { long duration = ts.timeDuration(statsUpdateTimestamp, ts.time(), TimeUnit.MILLISECONDS); return duration > staleStatsTreshold; }
protected <KIn, VIn, KOut, VOut> Set<KOut> combine( MapCombineCommand<KIn, VIn, KOut, VOut> mcc, CollectableCollector<KOut, VOut> collector) throws Exception { String taskId = mcc.getTaskId(); boolean emitCompositeIntermediateKeys = mcc.isEmitCompositeIntermediateKeys(); Reducer<KOut, VOut> combiner = mcc.getCombiner(); Set<KOut> mapPhaseKeys = new HashSet<KOut>(); Cache<Object, DeltaAwareList<VOut>> tmpCache = null; if (emitCompositeIntermediateKeys) { tmpCache = cacheManager.getCache(DEFAULT_TMP_CACHE_CONFIGURATION_NAME); } else { tmpCache = cacheManager.getCache(taskId); } if (tmpCache == null) { throw new IllegalStateException( "Temporary cache for MapReduceTask " + taskId + " not found on " + cdl.getAddress()); } DistributionManager dm = tmpCache.getAdvancedCache().getDistributionManager(); if (combiner != null) { Cache<?, ?> cache = cacheManager.getCache(mcc.getCacheName()); log.tracef("For m/r task %s invoking combiner %s at %s", taskId, mcc, cdl.getAddress()); MapReduceTaskLifecycleService taskLifecycleService = MapReduceTaskLifecycleService.getInstance(); Map<KOut, VOut> combinedMap = new ConcurrentHashMap<KOut, VOut>(); long start = log.isTraceEnabled() ? timeService.time() : 0; try { taskLifecycleService.onPreExecute(combiner, cache); Map<KOut, List<VOut>> collectedValues = collector.collectedValues(); for (Entry<KOut, List<VOut>> e : collectedValues.entrySet()) { List<VOut> list = e.getValue(); VOut combined; if (list.size() > 1) { combined = combiner.reduce(e.getKey(), list.iterator()); combinedMap.put(e.getKey(), combined); } else { combined = list.get(0); combinedMap.put(e.getKey(), combined); } log.tracef( "For m/r task %s combined %s to %s at %s", taskId, e.getKey(), combined, cdl.getAddress()); } } finally { if (log.isTraceEnabled()) { log.tracef( "Combine for task %s took %s milliseconds", mcc.getTaskId(), timeService.timeDuration(start, TimeUnit.MILLISECONDS)); } taskLifecycleService.onPostExecute(combiner); } Map<Address, List<KOut>> keysToNodes = mapKeysToNodes(dm, taskId, combinedMap.keySet(), emitCompositeIntermediateKeys); start = log.isTraceEnabled() ? timeService.time() : 0; try { for (Entry<Address, List<KOut>> entry : keysToNodes.entrySet()) { List<KOut> keysHashedToAddress = entry.getValue(); try { log.tracef( "For m/r task %s migrating intermediate keys %s to %s", taskId, keysHashedToAddress, entry.getKey()); for (KOut key : keysHashedToAddress) { VOut value = combinedMap.get(key); DeltaAwareList<VOut> delta = new DeltaAwareList<VOut>(value); if (emitCompositeIntermediateKeys) { tmpCache.put(new IntermediateCompositeKey<KOut>(taskId, key), delta); } else { tmpCache.put(key, delta); } mapPhaseKeys.add(key); } } catch (Exception e) { throw new CacheException( "Could not move intermediate keys/values for M/R task " + taskId, e); } } } finally { if (log.isTraceEnabled()) { log.tracef( "Migrating keys for task %s took %s milliseconds (Migrated %s keys)", mcc.getTaskId(), timeService.timeDuration(start, TimeUnit.MILLISECONDS), mapPhaseKeys.size()); } } } else { // Combiner not specified so lets insert each key/uncombined-List pair into tmp cache Map<KOut, List<VOut>> collectedValues = collector.collectedValues(); Map<Address, List<KOut>> keysToNodes = mapKeysToNodes(dm, taskId, collectedValues.keySet(), emitCompositeIntermediateKeys); long start = log.isTraceEnabled() ? timeService.time() : 0; try { for (Entry<Address, List<KOut>> entry : keysToNodes.entrySet()) { List<KOut> keysHashedToAddress = entry.getValue(); try { log.tracef( "For m/r task %s migrating intermediate keys %s to %s", taskId, keysHashedToAddress, entry.getKey()); for (KOut key : keysHashedToAddress) { List<VOut> value = collectedValues.get(key); DeltaAwareList<VOut> delta = new DeltaAwareList<VOut>(value); if (emitCompositeIntermediateKeys) { tmpCache.put(new IntermediateCompositeKey<KOut>(taskId, key), delta); } else { tmpCache.put(key, delta); } mapPhaseKeys.add(key); } } catch (Exception e) { throw new CacheException( "Could not move intermediate keys/values for M/R task " + taskId, e); } } } finally { if (log.isTraceEnabled()) { log.tracef( "Migrating keys for task %s took %s milliseconds (Migrated %s keys)", mcc.getTaskId(), timeService.timeDuration(start, TimeUnit.MILLISECONDS), mapPhaseKeys.size()); } } } return mapPhaseKeys; }
protected <KIn, VIn, KOut, VOut> CollectableCollector<KOut, VOut> map( MapCombineCommand<KIn, VIn, KOut, VOut> mcc) throws InterruptedException { Cache<KIn, VIn> cache = cacheManager.getCache(mcc.getCacheName()); Set<KIn> keys = mcc.getKeys(); Set<KIn> inputKeysCopy = null; Mapper<KIn, VIn, KOut, VOut> mapper = mcc.getMapper(); DistributionManager dm = cache.getAdvancedCache().getDistributionManager(); boolean inputKeysSpecified = keys != null && !keys.isEmpty(); Set<KIn> inputKeys = keys; if (!inputKeysSpecified) { inputKeys = filterLocalPrimaryOwner(cache.keySet(), dm); } else { inputKeysCopy = new HashSet<KIn>(keys); } // hook map function into lifecycle and execute it MapReduceTaskLifecycleService taskLifecycleService = MapReduceTaskLifecycleService.getInstance(); DefaultCollector<KOut, VOut> collector = new DefaultCollector<KOut, VOut>(); log.tracef("For m/r task %s invoking %s with input keys %s", mcc.getTaskId(), mcc, inputKeys); int interruptCount = 0; long start = log.isTraceEnabled() ? timeService.time() : 0; try { taskLifecycleService.onPreExecute(mapper, cache); for (KIn key : inputKeys) { if (checkInterrupt(interruptCount++) && Thread.currentThread().isInterrupted()) throw new InterruptedException(); VIn value = cache.get(key); mapper.map(key, value, collector); if (inputKeysSpecified) { inputKeysCopy.remove(key); } } Set<KIn> keysFromCacheLoader = null; if (inputKeysSpecified) { // load only specified remaining input keys - iff in CL and pinned to this primary owner keysFromCacheLoader = filterLocalPrimaryOwner(inputKeysCopy, dm); } else { // load everything from CL pinned to this primary owner keysFromCacheLoader = filterLocalPrimaryOwner(loadAllKeysFromCacheLoaderUsingFilter(inputKeys), dm); } log.tracef( "For m/r task %s cache loader input keys %s", mcc.getTaskId(), keysFromCacheLoader); interruptCount = 0; for (KIn key : keysFromCacheLoader) { if (checkInterrupt(interruptCount++) && Thread.currentThread().isInterrupted()) throw new InterruptedException(); VIn value = loadValueFromCacheLoader(key); if (value != null) { mapper.map(key, value, collector); } } } finally { if (log.isTraceEnabled()) { log.tracef( "Map phase for task %s took %s milliseconds", mcc.getTaskId(), timeService.timeDuration(start, TimeUnit.MILLISECONDS)); } taskLifecycleService.onPostExecute(mapper); } return collector; }