/** * Performs a depth-first, post-order traversal over a DAG. * * @param initialNodes The nodes from which to perform the traversal. Not allowed to contain * {@code null}. * @throws CycleException if a cycle is found while performing the traversal. */ @SuppressWarnings("PMD.PrematureDeclaration") public void traverse(Iterable<? extends T> initialNodes) throws CycleException, IOException, InterruptedException { // This corresponds to the current chain of nodes being explored. Enforcing this invariant makes // this data structure useful for debugging. Deque<Explorable> toExplore = Lists.newLinkedList(); for (T node : initialNodes) { toExplore.add(new Explorable(node)); } Set<T> inProgress = Sets.newHashSet(); LinkedHashSet<T> explored = Sets.newLinkedHashSet(); while (!toExplore.isEmpty()) { Explorable explorable = toExplore.peek(); T node = explorable.node; // This could happen if one of the initial nodes is a dependency of the other, for example. if (explored.contains(node)) { toExplore.removeFirst(); continue; } inProgress.add(node); // Find children that need to be explored to add to the stack. int stackSize = toExplore.size(); for (Iterator<T> iter = explorable.children; iter.hasNext(); ) { T child = iter.next(); if (inProgress.contains(child)) { throw createCycleException(child, toExplore); } else if (!explored.contains(child)) { toExplore.addFirst(new Explorable(child)); // Without this break statement: // (1) Children will be explored in reverse order instead of the specified order. // (2) CycleException may contain extra nodes. // Comment out the break statement and run the unit test to verify this for yourself. break; } } if (stackSize == toExplore.size()) { // Nothing was added to toExplore, so the current node can be popped off the stack and // marked as explored. toExplore.removeFirst(); inProgress.remove(node); explored.add(node); // Now that the internal state of this traversal has been updated, notify the observer. onNodeExplored(node); } } Preconditions.checkState(inProgress.isEmpty(), "No more nodes should be in progress."); onTraversalComplete(Iterables.unmodifiableIterable(explored)); }
public void setLimit(int limit) { this.limit = limit; while (past.size() > limit) { past.removeLast(); } while (future.size() > limit) { future.removeLast(); } }
QueryIterator exec(Op op, QueryIterator input) { push(input); int x = stack.size(); op.visit(this); int y = stack.size(); if (x != y) Log.warn(this, "Possible stack misalignment"); QueryIterator qIter = pop(); return qIter; }
private void solveCaptchas() { boolean isAnotherChallenge = (reChallenges.size() + workingOn < files.length); if (isAnotherChallenge) { captchan.frame.setTitle("*Captchan"); boolean isFirstCaptcha = (workingOn == 0); if (isFirstCaptcha) // If initializing captchas, show progress reCaptcha.reFrame.setTitle( "Enter Captcha (" + (1 + reChallenges.size()) + "/" + files.length + ")"); reCaptcha.reFrame.setVisible(true); } }
public void run() { Deque<VisitingContext> stack = new ArrayDeque<>(); stack.add(new VisitingContext(rootNode)); boolean goOn = stack.peek().hasNext(); if (goOn) { do { Map.Entry<String, JsonValue> current = stack.peek().nextElement(); if (!(current.getValue() instanceof JsonStructure)) { String key = stack.peek().getNSPrefix() + current.getKey(); String value = null; JsonValue jsonValue = current.getValue(); switch (jsonValue.getValueType()) { case NULL: value = null; break; case FALSE: value = Boolean.FALSE.toString(); break; case TRUE: Boolean.TRUE.toString(); break; case NUMBER: value = jsonValue.toString(); break; case STRING: value = ((JsonString) jsonValue).getString(); break; default: throw new ConfigException("Internal failure while processing JSON document."); } targetStore.put(key, value); } else if (current.getValue() instanceof JsonObject) { String key = stack.peek().getNSPrefix() + current.getKey(); JsonObject node = (JsonObject) current.getValue(); stack.push(new VisitingContext(node, key)); } else if (current.getValue() instanceof JsonArray) { throw new ConfigException("Arrays are not supported at the moment."); } else { throw new ConfigException("Internal failure while processing JSON document."); } goOn = stack.peek().hasNext(); while (!goOn && stack.size() > 0) { stack.remove(); goOn = (stack.size() > 0) && stack.peek().hasNext(); } } while (goOn); } }
private void checkSymbol(Symbol symbol) { if (!atLeastOneReference.isEmpty()) { int level = referenceInstance(symbol); if (level >= 0) { for (int i = 0; i < level; i++) { atLeastOneReference.pop(); } while (atLeastOneReference.size() != outerClasses.size()) { atLeastOneReference.push(Boolean.TRUE); } } } }
@Test public void Deque_addLast_removeLast_AddsRemovesTwo() { deque.addLast("firstString"); deque.addLast("secondString"); assertFalse(deque.isEmpty()); assertEquals(deque.size(), 2); String returnedString = deque.removeLast(); assertEquals(returnedString, "secondString"); assertFalse(deque.isEmpty()); assertEquals(deque.size(), 1); returnedString = deque.removeLast(); assertEquals(returnedString, "firstString"); assertTrue(deque.isEmpty()); assertEquals(deque.size(), 0); }
@Test public void includeWithError() { String includedResource1 = "org/raml/parser/rules/included-with-error.yaml"; String includedResource2 = "org/raml/parser/rules/included-with-error-2.yaml"; List<ValidationResult> errors = validateRaml("org/raml/parser/rules/includes-yaml-with-error.yaml"); assertThat(errors.size(), is(3)); assertThat(errors.get(0).getMessage(), containsString("Unknown key: invalidKeyRoot")); assertThat(errors.get(0).getIncludeName(), nullValue()); assertThat(errors.get(0).getLine() + 1, is(6)); assertThat(errors.get(0).getStartColumn() + 1, is(1)); assertThat(errors.get(0).getEndColumn() + 1, is(15)); assertThat(errors.get(1).getMessage(), containsString("Unknown key: invalidKey1")); assertThat(errors.get(1).getIncludeName(), is(includedResource1)); assertThat(errors.get(1).getLine() + 1, is(2)); assertThat(errors.get(1).getStartColumn() + 1, is(1)); assertThat(errors.get(1).getEndColumn() + 1, is(12)); Deque<IncludeInfo> includeContext = errors.get(1).getIncludeContext(); assertThat(includeContext.size(), is(1)); IncludeInfo includeInfo = includeContext.pop(); assertThat(includeInfo.getLine() + 1, is(7)); assertThat(includeInfo.getStartColumn() + 1, is(14)); assertThat(includeInfo.getEndColumn() + 1, is(69)); assertThat(includeInfo.getIncludeName(), is(includedResource1)); assertThat(includeContext.isEmpty(), is(true)); assertThat(errors.get(2).getMessage(), containsString("Unknown key: invalidKey2")); assertThat(errors.get(2).getIncludeName(), is(includedResource2)); assertThat(errors.get(2).getLine() + 1, is(3)); assertThat(errors.get(2).getStartColumn() + 1, is(1)); assertThat(errors.get(2).getEndColumn() + 1, is(12)); includeContext = errors.get(2).getIncludeContext(); assertThat(includeContext.size(), is(2)); includeInfo = includeContext.pop(); assertThat(includeInfo.getLine() + 1, is(3)); assertThat(includeInfo.getStartColumn() + 1, is(6)); assertThat(includeInfo.getEndColumn() + 1, is(63)); assertThat(includeInfo.getIncludeName(), is(includedResource2)); includeInfo = includeContext.pop(); assertThat(includeInfo.getLine() + 1, is(7)); assertThat(includeInfo.getStartColumn() + 1, is(14)); assertThat(includeInfo.getEndColumn() + 1, is(69)); assertThat(includeInfo.getIncludeName(), is(includedResource1)); assertThat(includeContext.isEmpty(), is(true)); }
private void cycleKeyList(Object key) { keyList.addLast(key); if (keyList.size() > size) { Object oldestKey = keyList.removeFirst(); delegate.removeObject(oldestKey); } }
/** * Removes several last tasks in the queue, leaving only {@code remaining} amount of them, counted * from the head of the queue. */ public void dismissLastTasks(int remaining) { synchronized (myQueue) { while (myQueue.size() > remaining) { myQueue.pollLast(); } } }
public void addEntry(LogEntry entry) { logDeque.push(entry); if (logDeque.size() > maxSize) { logDeque.removeLast(); } }
@Test public void Deque_addLast_AddsTwo() { deque.addLast("firstString"); deque.addLast("secondString"); assertFalse(deque.isEmpty()); assertEquals(deque.size(), 2); }
private static void solve(State initState) { Set<State> visited = new HashSet<State>(); Map<State, State> pred = new HashMap<State, State>(); Map<State, Integer> dist = new HashMap<State, Integer>(); Deque<State> bfs = new ArrayDeque<State>(); bfs.offer(initState); dist.put(initState, 0); while (bfs.size() > 0) { State s = bfs.poll(); int n = dist.get(s); visited.add(s); if (s.isFinal()) { outputState(s, pred); return; } for (State child : s.getChildren()) { if (visited.contains(child)) continue; if (!pred.containsKey(child)) pred.put(child, s); if (!dist.containsKey(child)) { dist.put(child, n + 1); bfs.offer(child); } } } System.out.printf("%d %d %d\n", initState.a, initState.b, initState.c); System.out.println("============"); }
private String[] breakpoint(String s, String regex) { int len = regex.length(); String[] gg; { Deque<String> addit = new ArrayDeque<>(); { int l = s.length() - len; if (l >= 0) { String c = s.substring(l, l + len); laf: while (c.equals(regex)) { addit.add(""); l -= len; if (l < 0) break laf; c = s.substring(l, l + len); } } } String[] ss = s.split(regex); int size = addit.size(); gg = Arrays.copyOf(ss, size + ss.length); for (int i = ss.length; i < size + ss.length; i++) { gg[i] = addit.pop(); } } if (gg == null) throw new Error("Critical System Error"); return gg; }
public void recoverTree(TreeNode root) { Deque<TreeNode> stack = new ArrayDeque<>(); TreeNode first = null; TreeNode second = null; TreeNode prev = null; TreeNode curr = root; // in order traverse while (curr != null || stack.size() > 0) { if (curr != null) { stack.push(curr); curr = curr.left; } else { curr = stack.pop(); if (prev != null && prev.val > curr.val) { if (first == null) { first = prev; } second = curr; // this needs to be outside of if } prev = curr; curr = curr.right; } } // now swap first and second int tmp = first.val; first.val = second.val; second.val = tmp; }
@Override public void endMessage() { if (DEBUG) if (groups.size() != 1) throw new IllegalStateException("end of message in the middle of a record " + fields); this.currentRecord = groups.pop(); }
private int getAvailableBufferCount() { if (RUN_IN_SERIES) { return 0; } else { return availableBuffers.size(); } }
/** * Uses the extraction template generated by the {@link #templateGeneration} process and utilizes * some own characteristics of the target page pt to extract the content of it which first needs * to be parsed to a DOM tree <em>tp</em>. * * @param tt The previouslz generated extraction template * @param tp_i The root DOM element of the document to predict the main content for */ public void newsContentExtraction(Deque<Token> tt, Token tp_i) { // templateNode <-- tt.firstElement Token templateNode = tt.getFirst(); if (templateNode.getName().equals(tp_i.getName()) && templateNode.getLevel() == tp_i.getLevel() && templateNode.getSibNo() == tp_i.getSibNo()) { tt.removeFirst(); if (tt.size() > 0) { Token nextTemplateNode = tt.getFirst(); if (tp_i.getChildren() == null || tp_i.getChildren().length == 0) { LOG.info(tp_i.getText()); while (nextTemplateNode.getParentNo() == templateNode.getNo()) { tt.removeFirst(); templateNode = nextTemplateNode; nextTemplateNode = tt.getFirst(); } } else { if (nextTemplateNode.getParentNo() != templateNode.getNo()) { System.out.println(this.deleteEmbeddedNoise(tp_i.getSubtreeText())); } for (int j = 0; j < tp_i.getChildren().length; j++) { this.newsContentExtraction(tt, tp_i.getChildren()[j]); } } } else { LOG.info(this.deleteEmbeddedNoise(tp_i.getSubtreeText())); } } }
private void updateEstimates(int secondsRemaining) { // Archivo.logger.debug("Adding {} to estimates", secondsRemaining); recentEndTimeEstimates.addLast(secondsRemaining); if (recentEndTimeEstimates.size() > MAX_END_TIME_ESTIMATES) { recentEndTimeEstimates.removeFirst(); } }
/** * Remove all the constants from the stash. * * @return a list of types */ public List<ConstantDefinition> removeAll() { List<ConstantDefinition> types = new ArrayList<>(constantDeque.size()); while (!constantDeque.isEmpty()) { types.add(remove()); } return types; }
/** * Given the current configurations (e.g., hadoop version and execution mode), return the correct * file name to compare with the current test run output. * * @param outDir The directory where the reference log files are stored. * @param testName The test file name (terminated by ".out"). * @return The file name appended with the configuration values if it exists. */ public String outPath(String outDir, String testName) { String ret = (new File(outDir, testName)).getPath(); // List of configurations. Currently the list consists of hadoop version and execution // mode only List<String> configs = new ArrayList<String>(); configs.add(this.hadoopVer); Deque<String> stack = new LinkedList<String>(); StringBuilder sb = new StringBuilder(); sb.append(testName); stack.push(sb.toString()); // example file names are input1.q.out_0.20.0_minimr or input2.q.out_0.17 for (String s : configs) { sb.append('_'); sb.append(s); stack.push(sb.toString()); } while (stack.size() > 0) { String fileName = stack.pop(); File f = new File(outDir, fileName); if (f.exists()) { ret = f.getPath(); break; } } return ret; }
public void insert(T state) { past.addFirst(present); if (past.size() > limit) { past.removeLast(); } future.clear(); present = state; }
@Override public boolean execute(Deque<Operand> stack) { if (stack.size() < 2) throw new IllegalArgumentException("Stack requires at least 2 operands!"); Operand op2 = stack.pop(); Operand op1 = stack.pop(); stack.push(new Operand(op1.m_iValue * op2.m_iValue)); return true; }
/** * Add a entry to the history list. This causes that the redo list is cleared and in case the undo * list is getting too long the oldest entry is removed. * * @param entry the entry to add to this list */ @SuppressWarnings("nls") public void addEntry(final HistoryAction entry) { undoList.addLast(entry); redoList.clear(); while (undoList.size() > MAX_HISTORY_LENGHT) { undoList.removeFirst(); } }
/** * Get a JdbConnection object either by the ones available in the queue or replace the first * expired connection. When a connection is given to a client, it is tagged with the current time. * This enables us to check the duration it has been out and replace if required. * * @return JDBConnection This contains the actual jdbc connection object to db. * @throws ConnectionPoolException Throws if no available connections */ public synchronized JdbConnection borrow() throws ConnectionPoolException { if (pooled.size() > 0) { borrowed.put(pooled.peek(), Instant.now()); return pooled.removeFirst(); } else { return createReplacementIfExpiredConnFound(); } }
@Override protected boolean onAccessDenied(ServletRequest request, ServletResponse response) throws Exception { Subject subject = getSubject(request, response); if (!subject.isAuthenticated() && !subject.isRemembered()) { // 如果没有登录,直接进行之后的流程 return true; } Session session = subject.getSession(); // String username = (String) subject.getPrincipal(); String account = ((ShiroUser) subject.getPrincipal()).getAccount(); Serializable sessionId = session.getId(); // TODO 同步控制 Deque<Serializable> deque = cache.get(account); if (deque == null) { deque = new LinkedList<Serializable>(); cache.put(account, deque); } // 如果队列里没有此sessionId,且用户没有被踢出;放入队列 if (!deque.contains(sessionId) && session.getAttribute("kickout") == null) { deque.push(sessionId); } // 如果队列里的sessionId数超出最大会话数,开始踢人 while (deque.size() > maxSession) { Serializable kickoutSessionId = null; if (kickoutAfter) { // 如果踢出后者 kickoutSessionId = deque.removeFirst(); } else { // 否则踢出前者 kickoutSessionId = deque.removeLast(); } try { Session kickoutSession = sessionManager.getSession(new DefaultSessionKey(kickoutSessionId)); if (kickoutSession != null) { // 设置会话的kickout属性表示踢出了 kickoutSession.setAttribute("kickout", true); } } catch (Exception e) { // ignore exception } } // 如果被踢出了,直接退出,重定向到踢出后的地址 if (session.getAttribute("kickout") != null) { // 会话被踢出了 try { subject.logout(); } catch (Exception e) { // ignore } saveRequest(request); WebUtils.issueRedirect(request, response, kickoutUrl); return false; } return true; }
public void exec(String inCommand) throws CalcCommandsException { Double d1, d2; if (stack.size() > 1) { d1 = stack.removeLast(); d2 = stack.removeLast(); stack.addLast(d2 * d1); } else if ((stack.size() == 0)) { throw new CalcCommandsException( "List is empty! " + this.getClass().getSimpleName() + ".exec(" + inCommand + ") fail!"); } else { throw new CalcCommandsException( "List contains only one number! " + this.getClass().getSimpleName() + ".exec(" + inCommand + ") fail!"); } }
@Override public void endElement(String uri, String localName, String qName) { if (parentNames.size() > 0) { String name = parentNames.peek(); if (qName.equals(name)) { parentNames.pop(); } } }
public void endTask(final String message) { if (logger.isInfoEnabled()) { logger.info("{}: finished: {}", getStackedWorkunitNames(), message); } if (workunits.size() > 1) { workunits.pop(); } }
@Override public synchronized void pushMessage(T buffer) { messageBuffer.addLast(buffer); if (messageBuffer.size() > PIPE_PRESSURE_THRESHOLD) { channelPushController.stopMessageFlow(); } notify(); }