/** * Method to find the Euler Tour based on the Hierholzer's algorithm. * * @param g : Input graph for which the tour is to be found. * @return : Returns a list of edges that comprises of the Euler Tour. */ public static List<Edge> findEulerTour(Graph<Vertex> g) { Vertex start = g.verts.get(1); Stack<Edge> forward = new Stack<Edge>(); Stack<Edge> backtrack = new Stack<Edge>(); Edge e = getUnvisitedEdge(start); while (e != null) { e.visited = true; forward.push(e); e = getUnvisitedEdge(e.To); } while (!(forward.isEmpty())) { e = forward.pop(); backtrack.push(e); e = getUnvisitedEdge(e.From); while (e != null) { e.visited = true; forward.push(e); e = getUnvisitedEdge(e.To); } } List<Edge> path = new LinkedList<Edge>(); while (!backtrack.isEmpty()) { Edge edge = backtrack.pop(); path.add(edge); } return path; }
// if any ancestor node of given path is selected then unselect it // and selection all its descendants except given path and descendants. // otherwise just unselect the given path private void toggleRemoveSelection(TreePath path) { Stack stack = new Stack(); TreePath parent = path.getParentPath(); while (parent != null && !isPathSelected(parent)) { stack.push(parent); parent = parent.getParentPath(); } if (parent != null) stack.push(parent); else { super.removeSelectionPaths(new TreePath[] {path}); return; } while (!stack.isEmpty()) { TreePath temp = (TreePath) stack.pop(); TreePath peekPath = stack.isEmpty() ? path : (TreePath) stack.peek(); Object node = temp.getLastPathComponent(); Object peekNode = peekPath.getLastPathComponent(); int childCount = model.getChildCount(node); for (int i = 0; i < childCount; i++) { Object childNode = model.getChild(node, i); if (childNode != peekNode) super.addSelectionPaths(new TreePath[] {temp.pathByAddingChild(childNode)}); } } super.removeSelectionPaths(new TreePath[] {parent}); }
public static boolean pastTM_Al(Stack s) { byte[] topElmt = ((Entry) s.peek()).getPart(); byte[] oldTopElmt = topElmt; if (BooleanMethod.endsWith_PastTMHuman_Al(topElmt)) { // clia.unl.unicode.utils.Utils.printOut(Analyser.print, x + "TM_Al"); s.pop(); s.push(new Entry(Constant.Al, Tag.ConditionalSuffix)); topElmt = ByteMeth.subArray(topElmt, 0, topElmt.length - Constant.Al.length); s.push(new Entry(topElmt, -1, oldTopElmt)); Tense.human(s); return true; } if (BooleanMethod.endsWith_PastTMHuman_Alum(topElmt)) { // clia.unl.unicode.utils.Utils.printOut(Analyser.print, x + "TM_Alum"); s.pop(); s.push(new Entry(Constant.um, Tag.Clitic)); s.push(new Entry(Constant.Al, Tag.ConditionalSuffix)); topElmt = ByteMeth.subArray(topElmt, 0, topElmt.length - Constant.Alum.length); s.push(new Entry(topElmt, -1, oldTopElmt)); Tense.human(s); return true; } return false; }
/** * @param tagType * @param text * @param tagContent * @param tagName ; */ private void addInnerTagBean( TagType tagType, StringBuffer text, String tagContent, String tagName) { /* 在文本中插入索引 */ int index = -1; if (tagType == START) { HasStartTag = true; maxIndex++; indexStack.push(maxIndex); index = maxIndex; } else if (tagType == END) { if (!HasStartTag) { maxIndex++; indexStack.push(maxIndex); } HasStartTag = false; if (!indexStack.empty()) { index = indexStack.pop(); } } else if (tagType == STANDALONE) { maxIndex++; index = maxIndex; } if (index > -1) { InnerTagBean bean = new InnerTagBean(index, tagName, tagContent, tagType); beans.add(bean); SegmentText stText = new SegmentText(start, start + tagContent.length(), tagContent); lstSegment.add(stText); String placeHolder = placeHolderCreater.getPlaceHolder(beans, beans.size() - 1); text.replace(start, start + tagContent.length(), placeHolder); } }
public static int decode(String chromosome) { Stack<Integer> bits = new Stack<Integer>(); for (int i = 0; i <= chromosome.length() - 4; i += 4) { String gene = chromosome.substring(i, i + 4); int geneIndex = Integer.parseInt(gene, 2); if (geneIndex >= genes.length) { // skip this "gene" we don"t know what to do with it continue; } else if (geneIndex < 10) { // Add the number to the stack bits.push(Integer.parseInt(genes[geneIndex])); } else { // Do simple arithmatic operation int a = bits.pop(); int b = bits.pop(); if (genes[geneIndex].equals("+")) { bits.push(a + b); } else if (genes[geneIndex].equals("-")) { bits.push(a - b); } else if (genes[geneIndex].equals("*")) { bits.push(a * b); } else if (genes[geneIndex].equals("/")) { bits.push(a / b); } } } return bits.pop(); }
public int calculate(String s) { Stack<Integer> stack = new Stack<Integer>(); int num = 0; char sign = '+'; for (int i = 0; i < s.length(); i++) { char c = s.charAt(i); if (Character.isDigit(c)) { num = num * 10 + (c - '0'); } if (i == s.length() - 1 || !Character.isDigit(c) && c != ' ') { switch (sign) { case '+': stack.push(+num); break; case '-': stack.push(-num); break; case '*': stack.push(stack.pop() * num); break; case '/': stack.push(stack.pop() / num); break; } sign = c; num = 0; } } int result = 0; while (!stack.isEmpty()) result += stack.pop(); return result; }
/** @see org.geotools.filter.FilterVisitor#visit(org.geotools.filter.FunctionExpression) */ public Object visit(Function expression, Object notUsed) { if (!fcs.supports(expression.getClass())) { postStack.push(expression); return null; } if (expression.getName() == null) { postStack.push(expression); return null; } int i = postStack.size(); int j = preStack.size(); for (int k = 0; k < expression.getParameters().size(); k++) { ((Expression) expression.getParameters().get(i)).accept(this, null); if (i < postStack.size()) { while (j < preStack.size()) preStack.pop(); postStack.pop(); postStack.push(expression); return null; } } while (j < preStack.size()) preStack.pop(); preStack.push(expression); return null; }
/** * @see FilterVisitor#visit(ExcludeFilter, Object) * @param filter the {@link Filter} to visit */ public void visit(ExcludeFilter filter) { if (fcs.supports(Filter.EXCLUDE)) { preStack.push(filter); } else { postStack.push(filter); } }
/** * "Normalize" the given absolute path. * * <p>This includes: * * <ul> * <li>Uppercase the drive letter if there is one. * <li>Remove redundant slashes after the drive spec. * <li>Resolve all ./, .\, ../ and ..\ sequences. * <li>DOS style paths that start with a drive letter will have \ as the separator. * </ul> * * Unlike {@link File#getCanonicalPath()} this method specifically does not resolve symbolic * links. * * @param path the path to be normalized. * @return the normalized version of the path. * @throws java.lang.NullPointerException if path is null. */ public static File normalize(final String path) { Stack s = new Stack(); String[] dissect = dissect(path); s.push(dissect[0]); StringTokenizer tok = new StringTokenizer(dissect[1], File.separator); while (tok.hasMoreTokens()) { String thisToken = tok.nextToken(); if (".".equals(thisToken)) { continue; } if ("..".equals(thisToken)) { if (s.size() < 2) { // Cannot resolve it, so skip it. return new File(path); } s.pop(); } else { // plain component s.push(thisToken); } } StringBuffer sb = new StringBuffer(); for (int i = 0; i < s.size(); i++) { if (i > 1) { // not before the filesystem root and not after it, since root // already contains one sb.append(File.separatorChar); } sb.append(s.elementAt(i)); } return new File(sb.toString()); }
public String getRelativePath(SNode node) throws ArtifactsRelativePathHelper.RelativePathException { Stack<SNode> names = new Stack<SNode>(); names.push(node); SNode parent = artifacts.parent(node); while (parent != null) { if (MapSequence.fromMap(prefixes).containsKey(parent)) { break; } names.push(parent); parent = artifacts.parent(parent); } if (parent == null) { throw new ArtifactsRelativePathHelper.RelativePathException("no common folder"); } StringBuilder result = new StringBuilder(MapSequence.fromMap(prefixes).get(parent)); while (!(names.isEmpty())) { SNode elem = names.pop(); boolean lastElement = names.isEmpty(); if (SNodeOperations.isInstanceOf( elem, "jetbrains.mps.build.structure.BuildLayout_TransparentContainer")) { continue; } result.append(getNodeName(elem, lastElement)); if (!(lastElement)) { result.append("/"); } } return result.toString(); }
private void expression(Map<String, Boolean> fields, Stack<Boolean> stack, ConditionLexer lex) throws IOException, AnnotationParseException { ConditionToken tok = lex.lex(); if (tok == null) { return; } switch (tok.type) { case FIELD: if (!fields.containsKey(tok.value)) { throw new AnnotationParseException("Field not found", lex.yyline()); } else { stack.push(fields.get(tok.value)); } expressionRest(fields, stack, lex); break; case NOT: expression(fields, stack, lex); Boolean invOp = stack.pop(); stack.push(!invOp); break; case PARENT_OPEN: expression(fields, stack, lex); tok = lex.lex(); if (tok.type != ConditionTokenType.PARENT_CLOSE) { throw new AnnotationParseException("End of parent expected", lex.yyline()); } expressionRest(fields, stack, lex); break; default: throw new AnnotationParseException("Expression expected", lex.yyline()); } }
private void adjustLevel(BlockType type, String typeSpec, int level) { for (ListState previousState = listState.peek(); level != previousState.level || previousState.type != type; previousState = listState.peek()) { if (level > previousState.level) { if (!previousState.openItem) { builder.beginBlock(BlockType.LIST_ITEM, new Attributes()); previousState.openItem = true; } Attributes blockAttributes = new Attributes(); computeAttributes(blockAttributes, type, typeSpec); listState.push(new ListState(previousState.level + 1, type)); builder.beginBlock(type, blockAttributes); } else { closeOne(); if (listState.isEmpty()) { Attributes blockAttributes = new Attributes(); computeAttributes(blockAttributes, type, typeSpec); listState.push(new ListState(1, type)); builder.beginBlock(type, blockAttributes); } } } }
public List<Integer> postorderTraverse2(TreeNode root) { List<Integer> res = new ArrayList<Integer>(); if (root == null) { return res; } Stack<TreeNode> stack = new Stack<TreeNode>(); stack.push(root); while (!stack.isEmpty()) { TreeNode temp = stack.peek(); if (temp.left == null && temp.right == null) { TreeNode pop = stack.pop(); res.add(pop.val); } else { if (temp.right != null) { stack.push(temp.right); temp.right = null; } if (temp.left != null) { stack.push(temp.left); temp.left = null; } } } return res; }
@Override protected void startElement() throws AnimoException, IOException { String prefix = reader.getPrefix(); String name = reader.getLocalName(); if ("ptrn".equals(prefix) && "language".equals(name)) { s.push(false); } else if ("have".equals(prefix) || "is".equals(prefix) || "ic".equals(prefix)) { builder.start(AN._); builder._(REF._, name.equals("name") ? "word" : name); s.push(true); } else { Statement s = Statements.relationshipType(prefix); if (s != null) { if (s instanceof DEF) { builder.start(DEF._, name); } else if (s instanceof Instruction) { builder.start(AN._); builder._(REF._, prefix); builder.start(AN._); builder._(REF._, name); builder.end(); } else { builder.start(s); builder._(REF._, name); } } else { super.startElement(); } this.s.push(true); } }
@NotNull private static Collection<PsiLanguageInjectionHost> collectInjectionHosts( @NotNull PsiFile file, @NotNull TextRange range) { Stack<PsiElement> toProcess = new Stack<PsiElement>(); for (PsiElement e = file.findElementAt(range.getStartOffset()); e != null; e = e.getNextSibling()) { if (e.getTextRange().getStartOffset() >= range.getEndOffset()) { break; } toProcess.push(e); } if (toProcess.isEmpty()) { return Collections.emptySet(); } Set<PsiLanguageInjectionHost> result = null; while (!toProcess.isEmpty()) { PsiElement e = toProcess.pop(); if (e instanceof PsiLanguageInjectionHost) { if (result == null) { result = ContainerUtilRt.newHashSet(); } result.add((PsiLanguageInjectionHost) e); } else { for (PsiElement child = e.getFirstChild(); child != null; child = child.getNextSibling()) { if (e.getTextRange().getStartOffset() >= range.getEndOffset()) { break; } toProcess.push(child); } } } return result == null ? Collections.<PsiLanguageInjectionHost>emptySet() : result; }
@Override public void exitMillennium(MillenniumContext ctx) { if (ctx.exception != null) return; Era era = (Era) stack.pop(); Integer n = (Integer) stack.pop(); if (era != null) { // If the era was explicitly specified, the start and end years // may be calculated now. stack.push(DateUtils.getMillenniumStartDate(n, era)); stack.push(DateUtils.getMillenniumEndDate(n, era)); } else { // If the era was not explicitly specified, the start and end years // can't be calculated yet. The calculation must be deferred until // later. For example, this millennium may be the start of a hyphenated // range, where the era will be inherited from the era of the end of // the range; this era won't be known until farther up the parse tree, // when both sides of the range will have been parsed. stack.push(new DeferredMillenniumStartDate(n)); stack.push(new DeferredMillenniumEndDate(n)); } }
public List<Integer> preorderTraversal(TreeNode root) { List<Integer> list = new ArrayList<Integer>(); Stack<TreeNode> stack = new Stack<TreeNode>(); if (root != null) { stack.push(root); } else { return null; } TreeNode temp = null; TreeNode left = null; TreeNode right = null; while (!stack.empty()) { temp = stack.pop(); list.add(temp.val); left = temp.left; right = temp.right; if (right != null) { stack.push(right); } if (left != null) { stack.push(left); } } return list; }
public void push(int number) { // write your code here if (s.isEmpty() || number <= minStack.peek()) { minStack.push(number); } s.push(number); }
public Object visit(Literal expression, Object notUsed) { if (expression.getValue() == null) { postStack.push(expression); } preStack.push(expression); return null; }
public static int calculate(String s) { // 使用栈来计算表达式的值 if (s == null || s.length() == 0) return 0; Stack<Integer> stack = new Stack<Integer>(); // 只有在遇到 () 时才会用到栈 int ans = 0; // 用来存放当前计算的结果 int sign = 1; // 用来存放下一个运算的符号 for (int i = 0; i < s.length(); i++) { char c = s.charAt(i); // 拿出字符 if (Character.isDigit(c)) { // 如果是数字 int cur = c - '0'; while (i + 1 < s.length() && Character.isDigit(s.charAt(i + 1))) { cur = 10 * cur + s.charAt(++i) - '0'; } // 把数字拼凑完成 ans += sign * cur; // 计算下结果 } else if (c == '-') { sign = -1; // 下一次做- } else if (c == '+') { sign = 1; // 下一次做+ } else if (c == '(') { // 如果是( stack.push(ans); // 则把 结果 和 运算符号一起入栈,ans sign重置 ans = 0; // 必须,不然括号里第一个数字立即运算了 stack.push(sign); sign = 1; // 必须,不然括号里第一个数字之前的符号不确定 } else if (c == ')') { // 如果是) ans = stack.pop() * ans + stack.pop(); // 则把 运算符 和 结果一起出栈, 计算当前的结果 } } return ans; }
/** * Stack an object onto {@link this}. Stacking means: sequentially adding {@PListObject}s onto the * {@PList}. The previous object that was stacked affects the context of the current object being * stacked. For example - if the previous element stacked was an {@link Array} or {@link Dict} - * the current object being stacked will be a child. * * @param obj * @param key If the parent of the element being added is a {@link Dict} - this is required and * must be non-null. Otherwise it's not used. * @throws Exception TODO: refactor - move me */ public void stackObject(PListObject obj, java.lang.String key) throws Exception { if (null == key && stackCtxInDict) { throw new Exception("PList objects with Dict parents require a key."); } if (stackCtxNestedDepth > 0 && !stackCtxInDict && !stackCtxInArray) { // if obj is not at root, its parent should be an Array or // Dict throw new Exception( "PList elements that are not at the root should have an Array or Dict parent."); } switch (obj.getType()) { case DICT: attachPListObjToParent(obj, key); stack.push(obj); stackCtxInArray = false; stackCtxInDict = true; stackCtxNestedDepth++; break; case ARRAY: attachPListObjToParent(obj, key); stack.push(obj); stackCtxInArray = true; stackCtxInDict = false; stackCtxNestedDepth++; break; default: attachPListObjToParent(obj, key); } }
private static void dfsTraversal() { Stack<Character> stack = new Stack<>(); while (!isThereUnvisitedVertex()) { char markedNode = getNextNotVisitedVertex(); stack.push(markedNode); visitedMap.put(markedNode, true); System.out.println(markedNode); visitedVertexCount++; while (!stack.isEmpty()) { char poppedNode = stack.pop(); for (char adjacentNode : AdjacencyMap.get(poppedNode)) { if (visitedMap.containsKey(adjacentNode) && (!visitedMap.get(adjacentNode))) { visitedMap.put(adjacentNode, true); visitedVertexCount++; System.out.print(adjacentNode + " "); stack.push(adjacentNode); break; } } } } }
public void printSolution() { String currState = "www0bbb"; File file = new File("solution_slidingtile_dfs.txt"); try { FileWriter fw = new FileWriter(file); path.push(currState); while (parentMap.get(currState) != null) { // System.out.println("inside while, parentMap.get(currState)!=null"); path.push(parentMap.get(currState)); currState = parentMap.get(currState); } System.out.println("Solution path:"); while (!path.empty()) { // System.out.println("inside while, stack not empty"); // System.out.println(path.pop()); fw.write(path.pop()); fw.write("\n"); } fw.close(); } catch (IOException ie) { ie.printStackTrace(); } finally { } }
@Override public void exitUncertainDate(UncertainDateContext ctx) { if (ctx.exception != null) return; Date latestDate = (Date) stack.pop(); Date earliestDate = (Date) stack.pop(); int earliestInterval = DateUtils.getCircaIntervalYears(earliestDate.getYear(), earliestDate.getEra()); int latestInterval = DateUtils.getCircaIntervalYears(latestDate.getYear(), latestDate.getEra()); // Express the circa interval as a qualifier. // stack.push(earliestDate.withQualifier(QualifierType.MINUS, earliestInterval, // QualifierUnit.YEARS)); // stack.push(latestDate.withQualifier(QualifierType.PLUS, latestInterval, // QualifierUnit.YEARS)); // OR: // Express the circa interval as an offset calculated into the year. DateUtils.subtractYears(earliestDate, earliestInterval); DateUtils.addYears(latestDate, latestInterval); stack.push(earliestDate); stack.push(latestDate); }
@Override public void exitCertainDate(CertainDateContext ctx) { if (ctx.exception != null) return; Date latestDate = (Date) stack.pop(); Date earliestDate = (Date) stack.pop(); // Set null eras to the default. if (earliestDate.getEra() == null) { earliestDate.setEra(Date.DEFAULT_ERA); } if (latestDate.getEra() == null) { latestDate.setEra(Date.DEFAULT_ERA); } // Finalize any deferred calculations. if (latestDate instanceof DeferredDate) { ((DeferredDate) latestDate).resolveDate(); } if (earliestDate instanceof DeferredDate) { ((DeferredDate) earliestDate).resolveDate(); } stack.push(earliestDate); stack.push(latestDate); }
@Override public void exitHyphenatedRange(HyphenatedRangeContext ctx) { if (ctx.exception != null) return; Date latestEndDate = (Date) stack.pop(); stack.pop(); // latestStartDate stack.pop(); // earliestEndDate Date earliestStartDate = (Date) stack.pop(); // If no era was explicitly specified for the first date, // make it inherit the era of the second date. if (earliestStartDate.getEra() == null && latestEndDate.getEra() != null) { earliestStartDate.setEra(latestEndDate.getEra()); } // Finalize any deferred calculations. if (earliestStartDate instanceof DeferredDate) { ((DeferredDate) earliestStartDate).resolveDate(); } if (latestEndDate instanceof DeferredDate) { ((DeferredDate) latestEndDate).resolveDate(); } stack.push(earliestStartDate); stack.push(latestEndDate); }
public static boolean adverbial_Particle(Stack s) { byte[] topElmt = ((Entry) s.peek()).getPart(); byte[] oldTopElmt = topElmt; // kayil if (ByteMeth.endsWith(topElmt, Constant.kayil)) { // clia.unl.unicode.utils.Utils.printOut(Analyser.print, x + "kayil"); s.pop(); s.push(new Entry(Constant.kayil, Tag.ParticleSuffix)); // change topElmt = ByteMeth.subArray(topElmt, 0, topElmt.length - Constant.kayil.length); s.push(new Entry(topElmt, -1, oldTopElmt)); Sandhi.k(s); return true; } // poothu if (ByteMeth.endsWith(topElmt, Constant.poothu)) { // clia.unl.unicode.utils.Utils.printOut(Analyser.print, x + "poothu"); s.pop(); s.push(new Entry(Constant.poothu, Tag.ParticleSuffix)); // change topElmt = ByteMeth.subArray(topElmt, 0, topElmt.length - Constant.poothu.length); s.push(new Entry(topElmt, -1, oldTopElmt)); Sandhi.k(s); return true; } return false; }
@Override public void exitNthCenturyRange(NthCenturyRangeContext ctx) { if (ctx.exception != null) return; Era era = (Era) stack.pop(); Integer endN = (Integer) stack.pop(); Part endPart = (Part) stack.pop(); Integer startN = (Integer) stack.pop(); Part startPart = (Part) stack.pop(); if (era == null) { era = Date.DEFAULT_ERA; } int startYear = DateUtils.nthCenturyToYear(startN); int endYear = DateUtils.nthCenturyToYear(endN); stack.push( startPart == null ? DateUtils.getCenturyStartDate(startYear, era) : DateUtils.getPartialCenturyStartDate(startYear, startPart, era)); stack.push( startPart == null ? DateUtils.getCenturyEndDate(startYear, era) : DateUtils.getPartialCenturyEndDate(startYear, startPart, era)); stack.push( endPart == null ? DateUtils.getCenturyStartDate(endYear, era) : DateUtils.getPartialCenturyStartDate(endYear, endPart, era)); stack.push( endPart == null ? DateUtils.getCenturyEndDate(endYear, era) : DateUtils.getPartialCenturyEndDate(endYear, endPart, era)); }
public static boolean umpadi(Stack s) { byte[] topElmt = ((Entry) s.peek()).getPart(); byte[] oldTopElmt = topElmt; if (ByteMeth.endsWith(topElmt, Constant.umpadi)) { // clia.unl.unicode.utils.Utils.printOut(Analyser.print, x + "umpadi"); s.pop(); s.push(new Entry(Constant.pati, Tag.ParticleSuffix)); s.push(new Entry(Constant.um, Tag.ThirdFutureNeuterSingularORRP)); topElmt = ByteMeth.subArray(topElmt, 0, topElmt.length - Constant.umpadi.length); if (ByteMeth.isEqual(topElmt, Constant.kEtk)) { topElmt = ByteMeth.replace(topElmt, Constant.L, 2); } if (ByteMeth.endsWith(topElmt, Constant.var) || ByteMeth.endsWith(topElmt, Constant.thar)) { topElmt = ByteMeth.replace(topElmt, Constant.A, Constant.ar.length); } if (ByteMeth.isEqual(topElmt, Constant.kaRk) || ByteMeth.isEqual(topElmt, Constant.viRk) || ByteMeth.isEqual(topElmt, Constant.n_iRk)) { topElmt = ByteMeth.replace(topElmt, Constant.l, 2); } if (ByteMeth.isEqual(topElmt, Constant.sAk) || ByteMeth.isEqual(topElmt, Constant.pOk)) { topElmt = ByteMeth.subArray(topElmt, 0, topElmt.length - 1); } s.push(new Entry(topElmt, -1, oldTopElmt)); Sandhi.kk(s); Sandhi.check(s); return true; } return false; }
public static void flattenIterative1(TreeNode root) { Stack<TreeNode> stack = new Stack<>(); while (root != null) { if (root.left != null) { stack.push(root); root = root.left; } else if (root.right != null) { stack.push(root); root = root.right; } else if (!stack.isEmpty()) { TreeNode end = root; while (!stack.isEmpty() && (stack.peek().left == null || (stack.peek().left != null && stack.peek().left != root))) { root = stack.pop(); } if (stack.isEmpty()) root = root.right; else { TreeNode top = stack.peek(); TreeNode temp = top.right; top.right = root; top.left = null; end.right = temp; root = (end.left != null || temp == null) ? end : temp; } } else root = root.right; } }