public void put(final Key key, final Value value) { if (map.containsKey(key)) { queue.remove(key); } while (queue.size() >= maxSize) { Key oldestKey = queue.poll(); if (oldestKey != null) { map.remove(oldestKey); queue.remove(oldestKey); } } queue.add(key); map.put(key, value); }
protected ReceiveStatus fetch( List<? super M> list, Duration timeoutDuration, FunctionThrowsExceptions<java.util.Queue<Message>, Boolean> fetcher) throws DataStreamInfrastructureException, InterruptedException { ConcurrentLinkedQueue<Message> fetched = new ConcurrentLinkedQueue<>(); boolean outOfRangeReached = false; try { outOfRangeReached = fetcher.apply(fetched); } catch (TimeoutException | UncheckedTimeoutException e) { // do nothing } catch (InterruptedException e) { throw e; } catch (JMSException e) { throw new DataStreamInfrastructureException(e); } catch (Exception e) { throw Throwables.propagate(e); } int fetchedCount = fetched.size(); // the working thread may still being adding new element to fetched if (fetchedCount > 0) { Message msg = null; while (fetchedCount-- > 0) { msg = fetched.remove(); list.add(convert(msg)); } return new SimpleReceiveStatus(position(msg), enqueuedTime(msg), outOfRangeReached); } else { return new SimpleReceiveStatus(null, null, outOfRangeReached); } }
/** * Removes any tasks waiting to be run. Will not interrupt any tasks currently running if {@link * #tick(ExceptionHandlerInterface)} is being called. But will avoid additional tasks from being * run on the current {@link #tick(ExceptionHandlerInterface)} call. * * <p>If tasks are added concurrently during this invocation they may or may not be removed. * * @return List of runnables which were waiting in the task queue to be executed (and were now * removed) */ public List<Runnable> clearTasks() { List<TaskContainer> containers; synchronized (scheduledQueue.getModificationLock()) { containers = new ArrayList<TaskContainer>(executeQueue.size() + scheduledQueue.size()); Iterator<? extends TaskContainer> it = executeQueue.iterator(); while (it.hasNext()) { TaskContainer tc = it.next(); /* we must use executeQueue.remove(Object) instead of it.remove() * This is to assure it is atomically removed (without executing) */ if (!tc.running && executeQueue.remove(tc)) { int index = ListUtils.getInsertionEndIndex(containers, tc, true); containers.add(index, tc); } } it = scheduledQueue.iterator(); while (it.hasNext()) { TaskContainer tc = it.next(); if (!tc.running) { int index = ListUtils.getInsertionEndIndex(containers, tc, true); containers.add(index, tc); } } scheduledQueue.clear(); } return ContainerHelper.getContainedRunnables(containers); }
public void checkqueue() { if (!ismoving && !movequeue.isEmpty()) { ismoving = true; UI.instance.mainview.moveto = movequeue.poll(); movequeue.remove(0); } }
void removePagesForLedger(long ledgerId) { int removedPageCount = pageMapAndList.removeEntriesForALedger(ledgerId); if (pageCount.addAndGet(-removedPageCount) < 0) { throw new RuntimeException( "Page count of ledger cache has been decremented to be less than zero."); } ledgersToFlush.remove(ledgerId); }
public void cancelTask(BaseTask task) { LogUtils.logD("ReportTaskEngine.cancelTask" + task); if (task == null) { return; } task.markAsCanceled(); mTaskQueue.remove(task); }
public synchronized boolean add(E e) { counter++; if (counter == 16) { queue.remove(); counter--; } return queue.add(e); }
/* * Process a sync request */ public synchronized void sync() { if (pendingSyncs.size() == 0) { LOG.warn("Not expecting a sync."); return; } Request r = pendingSyncs.remove(); commitProcessor.commit(r); }
public void markForDeletion(File file) { TempFile tempFile = deleteMap.get(file.getPath()); if (tempFile != null) { deleteQueue.remove(tempFile); deleteMap.remove(tempFile); } else { tempFile = new TempFile(file); deleteQueue.add(new TempFile(file)); deleteMap.put(tempFile.getFile().getPath(), tempFile); file.deleteOnExit(); // just in case } }
public void parseRenderQueue() { if (render_queue.size() > 0) { Object[] itemArray = render_queue.toArray(); for (Object item : itemArray) { if (QueueItem.ADD == ((QueueItem) item).getAction()) { addRenderItem(((QueueItem) item).getEnt()); } else if (QueueItem.REMOVE == ((QueueItem) item).getAction()) { removeRenderItem(((QueueItem) item).getEnt()); } render_queue.remove(item); } } }
/** * Every time a message gets broadcasted, make sure we update the cluster. * * @param o the message to broadcast. * @return The same message. */ public BroadcastAction filter(Object originalMessage, Object o) { if (o instanceof String) { String message = (String) o; // Avoid re-broadcasting if (!receivedMessages.remove(message)) { try { jchannel.send(new Message(null, null, message)); } catch (ChannelException e) { logger.log(Level.WARNING, "", e); } } return new BroadcastAction(message); } else { return new BroadcastAction(o); } }
/** * https://issues.apache.org/jira/browse/WICKET-5316 * * @throws Exception */ @Test public void failToReleaseUnderLoad() throws Exception { final Duration duration = Duration.seconds(20); /* seconds */ final ConcurrentLinkedQueue<Exception> errors = new ConcurrentLinkedQueue<Exception>(); final long endTime = System.currentTimeMillis() + duration.getMilliseconds(); // set the synchronizer timeout one second longer than the test runs to prevent // starvation to become an issue final PageAccessSynchronizer sync = new PageAccessSynchronizer(duration.add(Duration.ONE_SECOND)); final CountDownLatch latch = new CountDownLatch(100); for (int count = 0; count < 100; count++) { new Thread() { @Override public void run() { try { while (System.currentTimeMillis() < endTime) { try { logger.debug(Thread.currentThread().getName() + " locking"); sync.lockPage(0); Thread.sleep(1); logger.debug(Thread.currentThread().getName() + " locked"); sync.unlockAllPages(); logger.debug(Thread.currentThread().getName() + " unlocked"); Thread.sleep(5); } catch (InterruptedException e) { throw new RuntimeException(e); } } } catch (Exception e) { logger.error(e.getMessage(), e); errors.add(e); } finally { latch.countDown(); } } }.start(); } latch.await(); if (!errors.isEmpty()) { logger.error("Number of lock errors that occurred: {}", errors.size()); throw errors.remove(); } }
// Handle any requests that have been made against the client. private void handleInputQueue() { if (!addedQueue.isEmpty()) { getLogger().debug("Handling queue"); // If there's stuff in the added queue. Try to process it. Collection<MemcachedNode> toAdd = new HashSet<MemcachedNode>(); // Transfer the queue into a hashset. There are very likely more // additions than there are nodes. Collection<MemcachedNode> todo = new HashSet<MemcachedNode>(); try { MemcachedNode qa = null; while ((qa = addedQueue.remove()) != null) { todo.add(qa); } } catch (NoSuchElementException e) { // Found everything } // Now process the queue. for (MemcachedNode qa : todo) { boolean readyForIO = false; if (qa.isActive()) { if (qa.getCurrentWriteOp() != null) { readyForIO = true; getLogger().debug("Handling queued write %s", qa); } } else { toAdd.add(qa); } qa.copyInputQueue(); if (readyForIO) { try { if (qa.getWbuf().hasRemaining()) { handleWrites(qa.getSk(), qa); } } catch (IOException e) { getLogger().warn("Exception handling write", e); queueReconnect(qa); } } qa.fixupOps(); } addedQueue.addAll(toAdd); } }
private void onLanguageSelected(Message message) throws InvalidObjectException { String[] parts = message.getText().split("-->", 2); SendMessage sendMessageRequest = new SendMessage(); sendMessageRequest.setChatId(message.getChatId().toString()); if (LocalisationService.getInstance().getSupportedLanguages().containsKey(parts[0].trim())) { DatabaseManager.getInstance().putUserLanguage(message.getFrom().getId(), parts[0].trim()); sendMessageRequest.setText( LocalisationService.getInstance().getString("languageModified", parts[0].trim())); } else { sendMessageRequest.setText(LocalisationService.getInstance().getString("errorLanguage")); } sendMessageRequest.setReplayToMessageId(message.getMessageId()); ReplyKeyboardHide replyKeyboardHide = new ReplyKeyboardHide(); replyKeyboardHide.setHideKeyboard(true); replyKeyboardHide.setSelective(true); sendMessageRequest.setReplayMarkup(replyKeyboardHide); SenderHelper.SendApiMethod(sendMessageRequest, TOKEN); languageMessages.remove(message.getFrom().getId()); }
public void heartbeat(long delta) { if (engine != null) { TickStage.checkStage(TickStage.TICKSTART); } else { TickStage.checkStage(TickStage.STAGE1); } SpoutRegion region; SpoutTask task; while ((task = newTasks.poll()) != null) { int taskId = task.getTaskId(); ParallelTaskInfo info = activeTasks.get(taskId); if (info == null) { info = new ParallelTaskInfo(task); ParallelTaskInfo previous = activeTasks.putIfAbsent(taskId, info); if (previous != null) { info = previous; } task.setParallelInfo(info); } Collection<? extends World> worlds = (this.world == null) ? engine.getWorlds() : world; for (World w : worlds) { SpoutWorld sw = (SpoutWorld) w; for (Region r : sw.getRegions()) { info.add((SpoutRegion) r); } } } while ((region = newRegions.poll()) != null) { for (ParallelTaskInfo info : activeTasks.values(ParallelTaskInfo.EMPTY_ARRAY)) { info.add(region); } } while ((region = deadRegions.poll()) != null) { while (newRegions.remove(region)) {; } for (ParallelTaskInfo info : activeTasks.values(ParallelTaskInfo.EMPTY_ARRAY)) { while (info.remove(region)) {; } } } }
public void run(Timeout timeout) { if (isClosed.get()) return; Object attachment = channel.getPipeline().getContext(NettyAsyncHttpProvider.class).getAttachment(); if (attachment != null) { if (NettyResponseFuture.class.isAssignableFrom(attachment.getClass())) { NettyResponseFuture<?> future = (NettyResponseFuture<?>) attachment; if (!future.isDone() && !future.isCancelled()) { log.warn("Future not in appropriate state {}", future); return; } } } if (activeChannels.remove(channel)) { log.debug("Channel idle. Expiring {}", channel); close(channel); } timeout.cancel(); }
// Apply common parent filter private boolean isReliable( ConcurrentLinkedQueue<Node> originallist, ConcurrentHashMap<Node, int[]> intermap) { ConcurrentLinkedQueue<Node> lowest = new ConcurrentLinkedQueue<Node>(); for (Node entry : originallist) { boolean toAdd = true; if (isSignificant(intermap.get(entry), this._STATMIN)) { for (Node node : lowest) { // For each node already in the list, if (isChild(entry, node)) { // if entry is child of an existing node, remove the // node lowest.remove(node); } else if (isChild(node, entry)) { // if entry is parent of an existing node, dont add the // entry toAdd = false; break; } } if (toAdd) lowest.add(entry); } } if (lowest.isEmpty()) return false; // Then for each lowest entry, if one is not reliable, return false for (Node node : lowest) { if (getReliabilityScore(intermap.get(node), this._STATMIN) < 0.9) { return false; } } return true; }
/** * This function should be called only from ReadersEntry.finalize() * * @param state The reader's states that we wish to remove from the ConcurrentLinkedQueues */ protected void removeState(State state) { // We don't need to lock in exclusive mode thanks to the GC readersStateQ.remove(state); readersStateArrayRef.set(null); }
public boolean removeListener(NotificationListener list) { return listeners.remove(list); }
public void recoverMClient(MemcachedClient mc) { // TODO Auto-generated method stub availablePool.add(mc); busyPool.remove(mc); }
@Override public void removeBroadcasterLifeCyclePolicyListener(BroadcasterLifeCyclePolicyListener b) { lifeCycleListeners.remove(b); }
public void removeTickListener(PhysicsTickListener listener) { tickListeners.remove(listener); }
@Override public synchronized long claimRecords(SDFSEvent evt, LargeBloomFilter bf) throws IOException { if (this.isClosed()) throw new IOException("Hashtable " + this.fileName + " is close"); executor = new ThreadPoolExecutor( Main.writeThreads + 1, Main.writeThreads + 1, 10, TimeUnit.SECONDS, worksQueue, new ProcessPriorityThreadFactory(Thread.MIN_PRIORITY), executionHandler); csz = new AtomicLong(0); Lock l = this.gcLock.writeLock(); l.lock(); this.runningGC = true; lbf = null; lbf = new LargeBloomFilter(maxSz, .01); l.unlock(); try { SDFSLogger.getLog() .info("Claiming Records [" + this.getSize() + "] from [" + this.fileName + "]"); SDFSEvent tEvt = SDFSEvent.claimInfoEvent( "Claiming Records [" + this.getSize() + "] from [" + this.fileName + "]", evt); tEvt.maxCt = this.maps.size(); Iterator<ProgressiveFileByteArrayLongMap> iter = maps.iterator(); while (iter.hasNext()) { tEvt.curCt++; ProgressiveFileByteArrayLongMap m = null; try { m = iter.next(); executor.execute(new ClaimShard(m, bf, lbf, csz)); } catch (Exception e) { tEvt.endEvent( "Unable to claim records for " + m + " because : [" + e.toString() + "]", SDFSEvent.ERROR); SDFSLogger.getLog().error("Unable to claim records for " + m, e); throw new IOException(e); } } executor.shutdown(); try { while (!executor.awaitTermination(10, TimeUnit.SECONDS)) { SDFSLogger.getLog().debug("Awaiting fdisk completion of threads."); } } catch (InterruptedException e) { throw new IOException(e); } this.kSz.getAndAdd(-1 * csz.get()); tEvt.endEvent("removed [" + csz.get() + "] records"); SDFSLogger.getLog().info("removed [" + csz.get() + "] records"); iter = maps.iterator(); while (iter.hasNext()) { ProgressiveFileByteArrayLongMap m = null; try { m = iter.next(); if (m.isFull() && !m.isActive()) { double pf = (double) m.size() / (double) m.maxSize(); // SDFSLogger.getLog().info("pfull=" + pf); if (pf < .4 || pf == Double.NaN) { // SDFSLogger.getLog().info("deleting " + // m.toString()); m.iterInit(); KVPair p = m.nextKeyValue(); while (p != null) { ProgressiveFileByteArrayLongMap _m = this.getWriteMap(); try { _m.put(p.key, p.value); } catch (HashtableFullException e) { _m.setActive(false); _m = this.createWriteMap(); _m.put(p.key, p.value); } finally { this.activeWriteMaps.offer(_m); } p = m.nextKeyValue(); } int mapsz = maps.size(); maps.remove(m); mapsz = mapsz - maps.size(); // SDFSLogger.getLog().info( // "removing map " + m.toString() + " sz=" // + maps.size() + " rm=" + mapsz); m.vanish(); m = null; } } } catch (Exception e) { tEvt.endEvent( "Unable to compact " + m + " because : [" + e.toString() + "]", SDFSEvent.ERROR); SDFSLogger.getLog().error("to compact " + m, e); throw new IOException(e); } } return csz.get(); } finally { l.lock(); this.runningGC = false; l.unlock(); executor = null; } }
private void performLagTest(int lagTrials) { String lagString = ""; long time = 0; try { for (int i = 0; i < lagTrials - 1; i++) { time = System.currentTimeMillis(); ownedOutputStream.write(("lagtest" + "\0").getBytes()); lagString = ownedInputStream.readLine(); if (lagString != null) { lagString = lagString.trim(); } else { ownedOutputStream.writeBytes("kill" + "\0"); otherOutputStream.writeBytes("kill" + "\0"); System.out.println("Stop."); workers.remove(this); stop = true; return; } if (lagString.equals("lagtest")) { time = System.currentTimeMillis() - time; if (connNum == 1) { latencies.put(1, latencies.get(1) + time); } else { latencies.put(2, latencies.get(2) + time); } } } time = System.currentTimeMillis(); ownedOutputStream.write(("flagtest" + "\0").getBytes()); lagString = ownedInputStream.readLine(); if (lagString != null) { lagString = lagString.trim(); } else { ownedOutputStream.writeBytes("kill" + "\0"); otherOutputStream.writeBytes("kill" + "\0"); System.out.println("Stop."); workers.remove(this); stop = true; return; } if (lagString.equals("flagtest")) { time = System.currentTimeMillis() - time; if (connNum == 1) { latencies.put(1, latencies.get(1) + time); latencies.put(1, latencies.get(1) / lagTrials); System.out.println("Conn 1 Lag = " + latencies.get(1)); } else { latencies.put(2, latencies.get(2) + time); latencies.put(2, latencies.get(2) / lagTrials); System.out.println("Conn 2 Lag = " + latencies.get(2)); } } } catch (IOException e) { workers.remove(this); stop = true; } if (latencies.get(1) + latencies.get(2) > 40) { isLagHigh = 2; } else { isLagHigh = 1; } if (connNum == 1) { playerStatus.put(1, "ready"); } else if (connNum == 2) { playerStatus.put(2, "ready"); } }
public void run() { try { String xyLine = ""; while (!stop) { xyLine = ownedInputStream.readLine(); if (xyLine != null) { xyLine = xyLine.trim(); } else { ownedOutputStream.writeBytes("kill" + "\0"); otherOutputStream.writeBytes("kill" + "\0"); System.out.println("Stop."); workers.remove(this); stop = true; return; } if (xyLine.equals("ready")) { if (connNum == 1) { performLagTest(30); for (int i = 0; i < waitForOtherPlayerRetries; i++) { if (playerStatus.get(2).equals("ready")) { ownedOutputStream.write(("ready" + "\0").getBytes()); try { Thread.sleep(waitForOtherPlayerDelay); } catch (InterruptedException e) { e.printStackTrace(); } break; } else { try { Thread.sleep(waitForOtherPlayerDelay); } catch (InterruptedException e) { e.printStackTrace(); } } } } else if (connNum == 2) { performLagTest(30); for (int i = 0; i < waitForOtherPlayerRetries; i++) { if (playerStatus.get(1).equals("ready")) { ownedOutputStream.write(("ready" + "\0").getBytes()); break; } else { try { Thread.sleep(waitForOtherPlayerDelay); } catch (InterruptedException e) { e.printStackTrace(); } } } } } else if (xyLine.equals("start")) { otherOutputStream.write((xyLine + "\0").getBytes()); } else if (xyLine.equals("lt")) { performLagTest(5); } else if (xyLine != null && xyLine.length() > 0 && xyLine.charAt(0) == 'b') { ballX = Double.parseDouble(xyLine.substring(xyLine.indexOf("x") + 1, xyLine.indexOf("y"))); ballY = Double.parseDouble( xyLine.substring(xyLine.indexOf("y") + 1, xyLine.lastIndexOf("x"))); ballXVel = Double.parseDouble( xyLine.substring(xyLine.lastIndexOf("x") + 2, xyLine.lastIndexOf("y"))); ballYVel = Double.parseDouble( xyLine.substring(xyLine.lastIndexOf("y") + 2, xyLine.indexOf("r"))); ballRot = Double.parseDouble(xyLine.substring(xyLine.indexOf("r") + 1)); double combinedLag = (((latencies.get(1) * 2) + (latencies.get(2) * 2)) * (constantLagFactor) / isLagHigh) / msPerFrame; // convert lag from ms to frames if (connNum == 1) { distance = screenWidth - (dragRoom * 2) + (dragRoom - ballX); time = distance / Math.abs(ballXVel); newBallXVel = distance / (time - combinedLag); } else { distance = screenWidth - (dragRoom * 2) + (ballX - (screenWidth - dragRoom)); time = distance / Math.abs(ballXVel); newBallXVel = distance / (time - combinedLag); } if (ballXVel < 0) { newBallXVel = newBallXVel * -1; } distance = ballYVel * time; newBallYVel = distance / (time - combinedLag); ballX = (double) Math.round(ballX * 100) / 100; ballY = (double) Math.round(ballY * 100) / 100; newBallXVel = (double) Math.round(newBallXVel * 100) / 100; newBallYVel = (double) Math.round(newBallYVel * 100) / 100; xyLine = "bx" + ballX + "y" + ballY + "xv" + newBallXVel + "yv" + newBallYVel + "r" + ballRot + "\0"; otherOutputStream.write((xyLine + "\0").getBytes()); } else if (xyLine.equals("g")) { playerStatus.put(1, "lagtest"); playerStatus.put(2, "lagtest"); performLagTest(5); otherOutputStream.write((xyLine + "\0").getBytes()); for (int i = 0; i < waitForOtherPlayerRetries; i++) { if (playerStatus.get(1).equals("ready") && playerStatus.get(2).equals("ready")) { ownedOutputStream.write(("ready" + "\0").getBytes()); otherOutputStream.write(("ready" + "\0").getBytes()); System.out.println("Sent ready after goal"); break; } else { try { Thread.sleep(waitForOtherPlayerDelay); } catch (InterruptedException e) { e.printStackTrace(); } } } } else if (xyLine != null) { otherOutputStream.write((xyLine + "\0").getBytes()); } } ownedInputStream.close(); ownedOutputStream.close(); } catch (IOException e) { System.out.println("Stop."); workers.remove(this); stop = true; return; } }
public void unregisterDroneListener(DroneListener listener) { if (listener == null) return; droneListeners.remove(listener); }
@Override public void onComplete(AsyncEvent event) throws IOException { if (clients.remove(event.getAsyncContext()) && clientcount.decrementAndGet() == 0) { ticker.removeTickListener(this); } }
@Override public void removeObserver(RoomObserver observer) { mObservers.remove(observer); }
@Override public AtmosphereResource removeEventListener(AtmosphereResourceEventListener e) { listeners.remove(e); return this; }
public void unregisterGamer(Gamer gamer) { gamers.remove(gamer); }