private static void createPathString( DefaultModelSchemaExtractionContext<?> extractionContext, StringBuilder out) { StringBuilder prefix = new StringBuilder(" "); Deque<String> descriptions = Lists.newLinkedList(); DefaultModelSchemaExtractionContext<?> current = extractionContext; while (current != null) { descriptions.push(current.getDescription()); current = current.getParent(); } out.append(descriptions.pop()); out.append('\n'); while (!descriptions.isEmpty()) { out.append(prefix); out.append("\\--- "); out.append(descriptions.pop()); if (!descriptions.isEmpty()) { out.append('\n'); prefix.append(" "); } } }
@Override public void visitClass(ClassTree tree) { visitNode(tree); super.visitClass(tree); classTrees.pop(); currentParents.pop(); }
/** * Remove a Security Manager and restore the Security Manager it replaced. This handles the case * where another Security Manager was set in place of this one to insure the previous security * manager is not replaced prematurely. * * @param sm Security Manager to remove */ public void removeSecurityManager(SecurityManager sm) { if (sm == null) { throw new IllegalArgumentException("SecurityManager is required"); } synchronized (stack) { if (stack.peek() == null) { throw new IllegalStateException("empty stack"); } if (sm.equals(stack.peek().sm)) { // If sm was the last SecurityManager to be registered restore an older // one by finding the last contiguous element which has been released // and restore the Security Manager that one replaced SecurityManager smToRestore = stack.pop().original; while (stack.peek() != null && stack.peek().released == true) { smToRestore = stack.pop().original; } restore(smToRestore); } else { // If another Security Manager has been registered since we were called // mark ourself as being released so when the more recent ones are removed // our original is restored properly for (StackElement e : stack) { if (e.sm.equals(sm)) { e.released = true; } } } } }
// Append the line mapping entries. void traverse(MappingVisitor v) throws IOException { // The mapping list is ordered as a pre-order traversal. The mapping // positions give us enough information to rebuild the stack and this // allows the building of the source map in O(n) time. Deque<Mapping> stack = new ArrayDeque<>(); for (Mapping m : mappings) { // Find the closest ancestor of the current mapping: // An overlapping mapping is an ancestor of the current mapping, any // non-overlapping mappings are siblings (or cousins) and must be // closed in the reverse order of when they encountered. while (!stack.isEmpty() && !isOverlapped(stack.peek(), m)) { Mapping previous = stack.pop(); maybeVisit(v, previous); } // Any gaps between the current line position and the start of the // current mapping belong to the parent. Mapping parent = stack.peek(); maybeVisitParent(v, parent, m); stack.push(m); } // There are no more children to be had, simply close the remaining // mappings in the reverse order of when they encountered. while (!stack.isEmpty()) { Mapping m = stack.pop(); maybeVisit(v, m); } }
@Override public void endElement(String uri, String localName, String qName) throws SAXException { addElementErrors(); FxNode node = nodeStack.pop(); i(node).endsAt(contentLocator.getEndOffset()).endContent(contentLocator.getElementOffset()); if (node instanceof PropertySetter) { PropertySetter s = (PropertySetter) node; if (s.isImplicit()) { // actually the outer element ends node = nodeStack.pop(); // copy the offset information i(node).endsAt(contentLocator.getEndOffset()).endContent(contentLocator.getElementOffset()); } } String tn = node.getSourceName(); if (!tn.equals(localName)) { throw new IllegalStateException(); } // special hack for parent nodes, which are implicit property setters: FxNode parentNode = nodeStack.peek(); if (parentNode instanceof PropertySetter) { PropertySetter ps = (PropertySetter) parentNode; if (ps.isImplicit() && ps.getContent() == null) { i(ps).endsAt(contentLocator.getEndOffset()).endContent(contentLocator.getEndOffset()); } } if (!nodeStack.isEmpty() && nodeStack.peek().getKind() == Kind.Instance) { current = (FxInstance) nodeStack.peek(); } else { current = null; } }
public boolean isBalancedExp(String s) { for (int i = 0; i < s.length(); i++) { char ch = s.charAt(i); if (ch == PARANTHESIS.LP.getSymbol() || ch == PARANTHESIS.LSB.getSymbol() || ch == PARANTHESIS.LCB.getSymbol()) { paranthesisStack.push(ch); } else if (ch == PARANTHESIS.RP.getSymbol()) { if (paranthesisStack.isEmpty() || paranthesisStack.pop() != PARANTHESIS.LP.getSymbol()) { return false; } } else if (ch == PARANTHESIS.RSB.getSymbol()) { if (paranthesisStack.isEmpty() || paranthesisStack.pop() != PARANTHESIS.LSB.getSymbol()) { return false; } } else if (ch == PARANTHESIS.RCB.getSymbol()) { if (paranthesisStack.isEmpty() || paranthesisStack.pop() != PARANTHESIS.LCB.getSymbol()) { return false; } } } return paranthesisStack.isEmpty(); }
public void scrollTo(Deque<ElementPosition> nodeChain) { if (currentEditor.getValue().getMediaType().equals(MediaType.XHTML)) { XHTMLCodeEditor xhtmlCodeEditor = (XHTMLCodeEditor) currentEditor.getValue(); String code = xhtmlCodeEditor.getCode(); /* int index = code.indexOf(html); logger.info("index of clicked html " + index + " html: " + html); xhtmlCodeEditor.scrollTo(index);*/ LocatedJDOMFactory factory = new LocatedJDOMFactory(); try { org.jdom2.Document document = XHTMLUtils.parseXHTMLDocument(code, factory); org.jdom2.Element currentElement = document.getRootElement(); ElementPosition currentElementPosition = nodeChain.pop(); while (currentElementPosition != null) { IteratorIterable<org.jdom2.Element> children; if (StringUtils.isNotEmpty(currentElementPosition.getNamespaceUri())) { List<Namespace> namespaces = currentElement.getNamespacesInScope(); Namespace currentNamespace = null; for (Namespace namespace : namespaces) { if (namespace.getURI().equals(currentElementPosition.getNamespaceUri())) { currentNamespace = namespace; break; } } Filter<org.jdom2.Element> filter = Filters.element(currentElementPosition.getNodeName(), currentNamespace); children = currentElement.getDescendants(filter); } else { Filter<org.jdom2.Element> filter = Filters.element(currentElementPosition.getNodeName()); children = currentElement.getDescendants(filter); } int currentNumber = 0; for (org.jdom2.Element child : children) { if (currentNumber == currentElementPosition.getPosition()) { currentElement = child; break; } currentNumber++; } try { currentElementPosition = nodeChain.pop(); } catch (NoSuchElementException e) { logger.info("no more element in node chain"); currentElementPosition = null; } } LocatedElement locatedElement = (LocatedElement) currentElement; EditorPosition pos = new EditorPosition(locatedElement.getLine() - 1, locatedElement.getColumn()); logger.info("pos for scrolling to is " + pos.toJson()); xhtmlCodeEditor.scrollTo(pos); } catch (IOException | JDOMException e) { logger.error("", e); } } }
void decrementScope() { int remove = scopes.pop(); while (remove > 0) { variables.pop(); --remove; } }
private void registerLineOfCode() { lastCodeLine = locator.getLineNumber(); if (lastEffectiveCommentLine() == lastCodeLine) { effectiveCommentLines.pop(); } if (lastCommentLine() == lastCodeLine) { commentLines.pop(); } }
private void popOperator() { final Double rightOperand = operandStack.pop(); final Double leftOperand = operandStack.pop(); final BinaryOperator binaryOperator = operatorStack.pop(); final double result = binaryOperator.execute(leftOperand, rightOperand); operandStack.push(result); }
@Override public void meet(FunctionCall fc) throws RuntimeException { // special optimizations for frequent cases with variables if ((XMLSchema.DOUBLE.toString().equals(fc.getURI()) || XMLSchema.FLOAT.toString().equals(fc.getURI())) && fc.getArgs().size() == 1) { optypes.push(ValueType.DOUBLE); fc.getArgs().get(0).visit(this); optypes.pop(); } else if ((XMLSchema.INTEGER.toString().equals(fc.getURI()) || XMLSchema.INT.toString().equals(fc.getURI())) && fc.getArgs().size() == 1) { optypes.push(ValueType.INT); fc.getArgs().get(0).visit(this); optypes.pop(); } else if (XMLSchema.BOOLEAN.toString().equals(fc.getURI()) && fc.getArgs().size() == 1) { optypes.push(ValueType.BOOL); fc.getArgs().get(0).visit(this); optypes.pop(); } else if (XMLSchema.DATE.toString().equals(fc.getURI()) && fc.getArgs().size() == 1) { optypes.push(ValueType.DATE); fc.getArgs().get(0).visit(this); optypes.pop(); } else { String fnUri = fc.getURI(); String[] args = new String[fc.getArgs().size()]; NativeFunction nf = functionRegistry.get(fnUri); if (nf != null && nf.isSupported(parent.getDialect())) { for (int i = 0; i < args.length; i++) { args[i] = new ValueExpressionEvaluator(fc.getArgs().get(i), parent, nf.getArgumentType(i)) .build(); } if (optypes.peek() != nf.getReturnType()) { builder.append(castExpression(nf.getNative(parent.getDialect(), args), optypes.peek())); } else { builder.append(nf.getNative(parent.getDialect(), args)); } } else { throw new IllegalArgumentException( "the function " + fnUri + " is not supported by the SQL translation"); } } }
public double calculate(String input) { Deque<String> inputStack = new ArrayDeque<>(asList(input.split("\\s+"))); Deque<Double> outputStack = new ArrayDeque<>(); while (!inputStack.isEmpty()) { String nextInput = inputStack.pop(); if (nextInput.matches("\\d+")) { outputStack.push(Double.parseDouble(nextInput)); } else if (operations.containsKey(nextInput)) { operations.get(nextInput).operate(outputStack); } else { throw new BadInputException("Invalid input: " + nextInput); } } return outputStack.pop(); }
@Test public void includeWithError() { String includedResource1 = "org/raml/parser/rules/included-with-error.yaml"; String includedResource2 = "org/raml/parser/rules/included-with-error-2.yaml"; List<ValidationResult> errors = validateRaml("org/raml/parser/rules/includes-yaml-with-error.yaml"); assertThat(errors.size(), is(3)); assertThat(errors.get(0).getMessage(), containsString("Unknown key: invalidKeyRoot")); assertThat(errors.get(0).getIncludeName(), nullValue()); assertThat(errors.get(0).getLine() + 1, is(6)); assertThat(errors.get(0).getStartColumn() + 1, is(1)); assertThat(errors.get(0).getEndColumn() + 1, is(15)); assertThat(errors.get(1).getMessage(), containsString("Unknown key: invalidKey1")); assertThat(errors.get(1).getIncludeName(), is(includedResource1)); assertThat(errors.get(1).getLine() + 1, is(2)); assertThat(errors.get(1).getStartColumn() + 1, is(1)); assertThat(errors.get(1).getEndColumn() + 1, is(12)); Deque<IncludeInfo> includeContext = errors.get(1).getIncludeContext(); assertThat(includeContext.size(), is(1)); IncludeInfo includeInfo = includeContext.pop(); assertThat(includeInfo.getLine() + 1, is(7)); assertThat(includeInfo.getStartColumn() + 1, is(14)); assertThat(includeInfo.getEndColumn() + 1, is(69)); assertThat(includeInfo.getIncludeName(), is(includedResource1)); assertThat(includeContext.isEmpty(), is(true)); assertThat(errors.get(2).getMessage(), containsString("Unknown key: invalidKey2")); assertThat(errors.get(2).getIncludeName(), is(includedResource2)); assertThat(errors.get(2).getLine() + 1, is(3)); assertThat(errors.get(2).getStartColumn() + 1, is(1)); assertThat(errors.get(2).getEndColumn() + 1, is(12)); includeContext = errors.get(2).getIncludeContext(); assertThat(includeContext.size(), is(2)); includeInfo = includeContext.pop(); assertThat(includeInfo.getLine() + 1, is(3)); assertThat(includeInfo.getStartColumn() + 1, is(6)); assertThat(includeInfo.getEndColumn() + 1, is(63)); assertThat(includeInfo.getIncludeName(), is(includedResource2)); includeInfo = includeContext.pop(); assertThat(includeInfo.getLine() + 1, is(7)); assertThat(includeInfo.getStartColumn() + 1, is(14)); assertThat(includeInfo.getEndColumn() + 1, is(69)); assertThat(includeInfo.getIncludeName(), is(includedResource1)); assertThat(includeContext.isEmpty(), is(true)); }
@SuppressWarnings("unchecked") private void processNextNode() { if (stateStack.isEmpty()) { nextEvent = null; return; } currentItr = stateStack.peek(); if (currentItr.hasNext()) { Object o = currentItr.next(); if (inMap()) { Entry<String, HValue> entry = (Entry<String, HValue>) o; key = entry.getKey(); value = entry.getValue(); } else { key = null; value = HValue.initFromObject(o); } nextEvent = Types.getEventTypeForType(value.getType()); if (!value.getType().isScalar()) { stateStack.push(new IteratorWithType(value)); } } else { IteratorWithType iter = stateStack.pop(); key = null; value = iter.getValue(); nextEvent = (iter.getType() == Type.MAP) ? EventType.END_MAP : EventType.END_ARRAY; currentItr = stateStack.isEmpty() ? null : stateStack.peek(); } }
private void handleFunction(Node node) { // A block transfer control to its first child if it is not empty. Preconditions.checkState(node.getChildCount() >= 3); createEdge(node, Branch.UNCOND, computeFallThrough(node.getFirstChild().getNext().getNext())); Preconditions.checkState(exceptionHandler.peek() == node); exceptionHandler.pop(); }
/** * Main method to solve the maze. pre: args[0] contains the name of the input file. * * @param args Command line argument */ public static void main(String[] args) { int numV = 0; // The number of vertices. Graph theMaze = null; // Load the graph data from a file. try { Scanner scan = new Scanner(new FileReader(args[0])); theMaze = AbstractGraph.createGraph(scan, false, "List"); numV = theMaze.getNumV(); } catch (IOException ex) { System.err.println("IO Error while reading graph"); System.err.println(ex.toString()); System.exit(1); } // Perform breadth-first search. int parent[] = BreadthFirstSearch.breadthFirstSearch(theMaze, 0); // Construct the path. Deque<Integer> thePath = new ArrayDeque<Integer>(); int v = numV - 1; while (parent[v] != -1) { thePath.push(new Integer(v)); v = parent[v]; } // Output the path. System.out.println("The Shortest path is:"); while (!thePath.isEmpty()) { System.out.println(thePath.pop()); } }
@Override public AST next() { if (!hasNext()) throw new IllegalStateException("This traversal is done"); final AST thisNode = nextNode; // look forward for the next next node if (nextNode.getFirstChild() != null) { context.push(nextNode); nextNode = nextNode.getFirstChild(); } else if (nextNode.getNextSibling() != null) { nextNode = nextNode.getNextSibling(); } else { nextNode = null; while (!context.isEmpty() && nextNode == null) { AST beenThere = context.pop(); if (beenThere.getNextSibling() != null) { nextNode = beenThere.getNextSibling(); break; } } } return thisNode; }
@Override public void meet(MathExpr expr) throws RuntimeException { ValueType ot = new OPTypeFinder(expr).coerce(); if (ot == ValueType.STRING) { if (expr.getOperator() == MathExpr.MathOp.PLUS) { builder.append( functionRegistry .get(FN.CONCAT.stringValue()) .getNative( parent.getDialect(), new ValueExpressionEvaluator(expr.getLeftArg(), parent, ot).build(), new ValueExpressionEvaluator(expr.getRightArg(), parent, ot).build())); } else { throw new IllegalArgumentException( "operation " + expr.getOperator() + " is not supported on strings"); } } else { if (ot == ValueType.NODE || ot == ValueType.TERM) { ot = ValueType.DOUBLE; } optypes.push(ot); expr.getLeftArg().visit(this); builder.append(getSQLOperator(expr.getOperator())); expr.getRightArg().visit(this); optypes.pop(); } }
private void refineRelevantPredicatesComputer(List<ARGState> pPath, ARGReachedSet pReached) { UnmodifiableReachedSet reached = pReached.asReachedSet(); Precision oldPrecision = reached.getPrecision(reached.getLastState()); PredicatePrecision oldPredicatePrecision = Precisions.extractPrecisionByType(oldPrecision, PredicatePrecision.class); BlockPartitioning partitioning = predicateCpa.getPartitioning(); Deque<Block> openBlocks = new ArrayDeque<>(); openBlocks.push(partitioning.getMainBlock()); for (ARGState pathElement : pPath) { CFANode currentNode = AbstractStates.extractLocation(pathElement); Integer currentNodeInstance = getPredicateState(pathElement).getAbstractionLocationsOnPath().get(currentNode); if (partitioning.isCallNode(currentNode)) { openBlocks.push(partitioning.getBlockForCallNode(currentNode)); } Collection<AbstractionPredicate> localPreds = oldPredicatePrecision.getPredicates(currentNode, currentNodeInstance); for (Block block : openBlocks) { for (AbstractionPredicate pred : localPreds) { relevantPredicatesComputer.considerPredicateAsRelevant(block, pred); } } while (openBlocks.peek().isReturnNode(currentNode)) { openBlocks.pop(); } } }
@Override public void meet(Count node) throws RuntimeException { builder.append("COUNT("); if (node.isDistinct()) { builder.append("DISTINCT "); } if (node.getArg() == null) { // this is a weird special case where we need to expand to all variables selected in the query // wrapped // by the group; we cannot simply use "*" because the concept of variables is a different one // in SQL, // so instead we construct an ARRAY of the bindings of all variables List<String> countVariables = new ArrayList<>(); for (SQLVariable v : parent.getVariables().values()) { if (v.getProjectionType() == ValueType.NONE) { Preconditions.checkState( v.getExpressions().size() > 0, "no expressions available for variable"); countVariables.add(v.getExpressions().get(0)); } } builder.append("ARRAY["); Joiner.on(',').appendTo(builder, countVariables); builder.append("]"); } else { optypes.push(ValueType.NODE); node.getArg().visit(this); optypes.pop(); } builder.append(")"); }
@Override public void endMessage() { if (DEBUG) if (groups.size() != 1) throw new IllegalStateException("end of message in the middle of a record " + fields); this.currentRecord = groups.pop(); }
/** * Given the current configurations (e.g., hadoop version and execution mode), return the correct * file name to compare with the current test run output. * * @param outDir The directory where the reference log files are stored. * @param testName The test file name (terminated by ".out"). * @return The file name appended with the configuration values if it exists. */ public String outPath(String outDir, String testName) { String ret = (new File(outDir, testName)).getPath(); // List of configurations. Currently the list consists of hadoop version and execution // mode only List<String> configs = new ArrayList<String>(); configs.add(this.hadoopVer); Deque<String> stack = new LinkedList<String>(); StringBuilder sb = new StringBuilder(); sb.append(testName); stack.push(sb.toString()); // example file names are input1.q.out_0.20.0_minimr or input2.q.out_0.17 for (String s : configs) { sb.append('_'); sb.append(s); stack.push(sb.toString()); } while (stack.size() > 0) { String fileName = stack.pop(); File f = new File(outDir, fileName); if (f.exists()) { ret = f.getPath(); break; } } return ret; }
Variable addVariable(final ParserRuleContext source, final String name, final Type type) { if (getVariable(name) != null) { if (source == null) { throw new IllegalArgumentException( "Argument name [" + name + "] already defined within the scope."); } else { throw new IllegalArgumentException( error(source) + "Variable name [" + name + "] already defined within the scope."); } } final Variable previous = variables.peekFirst(); int slot = 0; if (previous != null) { slot += previous.slot + previous.type.type.getSize(); } final Variable variable = new Variable(name, type, slot); variables.push(variable); final int update = scopes.pop() + 1; scopes.push(update); return variable; }
public static byte[] zip(List<File> roots) throws IOException { ByteArrayOutputStream outputStream = new ByteArrayOutputStream(); ZipOutputStream zipOutputStream = new ZipOutputStream(outputStream); Deque<File> queue = new LinkedList<File>(); Set<String> names = new HashSet<String>(); try { for (File root : roots) { URI base = root.isDirectory() ? root.toURI() : root.getParentFile().toURI(); queue.push(root); while (!queue.isEmpty()) { File file = queue.pop(); if (file.getName().equals(".DS_Store")) { continue; } if (file.isDirectory()) { addDirectory(zipOutputStream, queue, names, base, file); } else { addFile(zipOutputStream, base, file); } } } } finally { zipOutputStream.close(); } return outputStream.toByteArray(); }
@Override public List<INPUT> getDependenciesOf(List<INPUT> rootInputs, boolean sorted) { Preconditions.checkArgument(userOrderedInputs.containsAll(rootInputs)); Set<INPUT> includedInputs = new HashSet<>(); Deque<INPUT> worklist = new ArrayDeque<>(rootInputs); while (!worklist.isEmpty()) { INPUT input = worklist.pop(); if (includedInputs.add(input)) { for (String symbolName : input.getRequires()) { INPUT importedSymbolName = exportingInputBySymbolName.get(symbolName); if (importedSymbolName != null) { worklist.add(importedSymbolName); } } } } ImmutableList.Builder<INPUT> builder = ImmutableList.builder(); for (INPUT input : (sorted ? importOrderedInputs : userOrderedInputs)) { if (includedInputs.contains(input)) { builder.add(input); } } return builder.build(); }
private static void placePhi(MethodNode mth, int regNum, LiveVarAnalysis la) { List<BlockNode> blocks = mth.getBasicBlocks(); int blocksCount = blocks.size(); BitSet hasPhi = new BitSet(blocksCount); BitSet processed = new BitSet(blocksCount); Deque<BlockNode> workList = new LinkedList<BlockNode>(); BitSet assignBlocks = la.getAssignBlocks(regNum); for (int id = assignBlocks.nextSetBit(0); id >= 0; id = assignBlocks.nextSetBit(id + 1)) { processed.set(id); workList.add(blocks.get(id)); } while (!workList.isEmpty()) { BlockNode block = workList.pop(); BitSet domFrontier = block.getDomFrontier(); for (int id = domFrontier.nextSetBit(0); id >= 0; id = domFrontier.nextSetBit(id + 1)) { if (!hasPhi.get(id) && la.isLive(id, regNum)) { BlockNode df = blocks.get(id); addPhi(df, regNum); hasPhi.set(id); if (!processed.get(id)) { processed.set(id); workList.add(df); } } } } }
protected TreeContext popContext() { final TreeContext treeContext = treeContextStack.pop(); List<TreeContextAction> actions = treeContext.getOnPopHandlers(); for (TreeContextAction contextAction : actions) { contextAction.call(treeContext); } lastContext = treeContext; ASTNode parentNode = treeContext.parent != null ? treeContext.parent.node : null; if (treeContext.node instanceof Expression && parentNode != null) { ClassCodeExpressionTransformer trn = new ClassCodeExpressionTransformer() { @Override protected SourceUnit getSourceUnit() { return null; } @Override public Expression transform(final Expression exp) { if (exp == treeContext.node) { Expression replacement = treeContext.getReplacement(); if (replacement != null) { return replacement; } } return super.transform(exp); } }; // todo: reliable way to call the transformer // parentNode.visit(trn); } return treeContext; }
@Override public void meet(If node) throws RuntimeException { builder.append("CASE WHEN "); optypes.push(ValueType.BOOL); node.getCondition().visit(this); optypes.pop(); optypes.push(new OPTypeFinder(node).coerce()); builder.append(" THEN "); node.getResult().visit(this); builder.append(" ELSE "); node.getAlternative().visit(this); builder.append(" END"); optypes.pop(); }
protected void parse(NonterminalGrammarSlot startSymbol, Environment env) { // if(!startSymbol.testPredict(input.charAt(ci))) { // recordParseError(startSymbol); // return; // } if (env == null) startSymbol .getFirstSlots() .forEach(s -> scheduleDescriptor(new Descriptor(s, cu, ci, DummyNode.getInstance()))); else startSymbol .getFirstSlots() .forEach( s -> scheduleDescriptor( new org.iguana.datadependent.descriptor.Descriptor( s, cu, ci, DummyNode.getInstance(), env))); while (!descriptorsStack.isEmpty()) { Descriptor descriptor = descriptorsStack.pop(); ci = descriptor.getInputIndex(); cu = descriptor.getGSSNode(); cn = descriptor.getSPPFNode(); logger.log("Processing %s", descriptor); descriptor.execute(this); } }
/** Completes the current type declaration. */ public JavaWriter endType() throws IOException { popScope(Scope.TYPE_DECLARATION); types.pop(); indent(); out.write("}\n"); return this; }