public DirectoryDescendingFileFinderImpl(File root, FilenameFilter filter, boolean canonical) throws IOException { if (!root.isDirectory()) throw new IllegalArgumentException(root.getName() + " is not a directory."); this.filter = filter; this.canonical = canonical; blossomDirectory(root); while (files.empty() && !direx.empty()) blossomDirectory((File) direx.pop()); }
public static boolean isValid(String s) { Stack<Character> stack = new Stack<Character>(); for (int i = 0; i < s.length(); i++) { if (s.charAt(i) == '(' || s.charAt(i) == '[' || s.charAt(i) == '{') stack.push(s.charAt(i)); else if (s.charAt(i) == ')' && !stack.empty() && stack.peek() == '(') stack.pop(); else if (s.charAt(i) == ']' && !stack.empty() && stack.peek() == '[') stack.pop(); else if (s.charAt(i) == '}' && !stack.empty() && stack.peek() == '{') stack.pop(); else return false; } return stack.empty(); }
private static long computeExpression(Stack<Long> oper, String op) { if (oper.empty()) return -1; long oper1 = oper.pop(); if (oper.empty()) return -1; long oper2 = oper.pop(); if ("*".equals(op)) { return oper1 * oper2; } else { long result = 1; for (long i = 0; i < oper1; i++) { result *= oper2; } return result; } }
public void dealWithCloser() { while (!stack.empty()) { char pending = popChar(); if (pending == '(') break; out.append(pending); } }
public StringBuffer translate() { for (index = 0; index < line.length(); index++) { char c = line.charAt(index); if (Character.isDigit(c)) { dealWithOperand(); } else if (isOperator(c)) { dealWithOperator(c); } else if (c == '(') { stack.push(new Character(c)); } else if (c == ')') { dealWithCloser(); } else if (Character.isSpaceChar(c)) { // do nothing } else { System.out.println("Error: unknown character" + c); } } // pop and output all the operators left on the stack while (!stack.empty()) { out.append(popChar()); } return out; }
static void test00() { Aron.beg(); PriorityQueue<Interval> queue = new PriorityQueue<Interval>(); Stack<Interval> stack = new Stack<Interval>(); int[] arr1 = {4, 1, 2, 6, 9}; int[] arr2 = {5, 1, 4, 9, 10}; for (int i = 0; i < arr1.length; i++) { queue.add(new Interval(arr1[i], arr2[i])); } if (queue.size() > 0) { stack.push(queue.remove()); } while (!queue.isEmpty()) { Interval top = stack.peek(); Interval inter = queue.remove(); if (top.end < inter.begin) stack.push(inter); else { stack.peek().end = Math.max(stack.peek().end, inter.end); } } while (!stack.empty()) { System.out.println("[" + stack.peek().begin + " " + stack.peek().end + "]"); stack.pop(); } Aron.end(); }
/* This method evaluates the given arithmetic expression and returns * its Integer value. The method throws an Exception if the expression * is malformed.*/ static Integer evaluate(String expr) throws Exception { StringTokenizer st = new StringTokenizer(expr, delimiters, true); Stack<Integer> intArgs = new Stack<Integer>(); Stack<String> delimiterArgs = new Stack<String>(); if (!bracketMatch(expr)) { throw new Exception(); // Malformed expression given as input, throw exception } /*Fill the integer and delimiter argument stacks with tokens from input string*/ while (st.hasMoreTokens()) { String token = st.nextToken(); // Store current token for sorting if (token.equals(")")) { /*\ Do not add bracket to delimiterArgs, stripping it from expression. Instead, take contents of brackets * and perform specified operation on them \*/ Integer product = performOperation(intArgs.pop(), intArgs.pop(), delimiterArgs.pop()); intArgs.push(product); } else if (token.equals("(")) ; // Do nothing in this case. This strips the bracket from the expression else if (isInt(token)) { // Store the integer in the stack intArgs.push(Integer.parseInt(token)); } else { // Store the operator delimiterArgs.push(token); } } /*Evaluate the simplified expression contained in our stacks*/ while (!(delimiterArgs.empty())) { Integer product = performOperation(intArgs.pop(), intArgs.pop(), delimiterArgs.pop()); intArgs.push(product); } return intArgs.pop(); } // end of evaluate
/*\ * Helper method for evaluate. Takes as input the user generated expression, and tests it to make sure that * , at least in terms of brackets, it is a well formed expression. Method tests for: * ->Bracket matching. Every open bracket must have a closing bracket * ->Integer values and arithmatic operators inside brackets. An expression such as 3+() would be considered malformed. * Method returns boolean value true if expression is well formed, and false otherwise \*/ static boolean bracketMatch(String expression) throws Exception { Stack<Character> bracketSum = new Stack<Character>(); // This stack will keep track of brackets which are yet to be closed for (int i = 0; i < expression.length() - 1; i++) { if (expression.charAt(i) == '(') { // Check for empty sets of brackets if (expression.charAt(i + 1) == ')') { return false; } Character openBrac = new Character(expression.charAt(i)); bracketSum.push(openBrac); // Add open bracket to total } else if (expression.charAt(i) == ')') { /*\ Two cases covered here. Either we have a closing bracket where there should not be one, in which case * an EmptyStackException is thrown, or else we have closed an open bracket, in which case we * remove a bracket from bracketSun \*/ bracketSum.pop(); } } // Check last character in expression. if (expression.charAt(expression.length() - 1) == '(') return false; else if (expression.charAt(expression.length() - 1) == ')') { bracketSum.pop(); } // Check to see if all brackets closed else if (!bracketSum.empty()) { return false; } return true; }
private static long evaluateExpression(String expr) { Stack<Long> oper = new Stack<Long>(); Stack<String> op = new Stack<String>(); int i = 0; while (i < expr.length()) { char ch = expr.charAt(i); if (ch >= '0' && ch <= '9') { StringBuffer sb = new StringBuffer(); sb.append(ch); while (++i < expr.length()) { ch = expr.charAt(i); if (ch >= '0' && ch <= '9') { sb.append(ch); } else { break; } } oper.push(Long.parseLong(sb.toString())); if (!op.empty() && ("**".equals(op.peek()))) { oper.push(computeExpression(oper, op.pop())); } if (i == expr.length()) { while (!op.empty() && "*".equals(op.peek())) { oper.push(computeExpression(oper, op.pop())); } } } else if (ch == '*') { StringBuffer sb = new StringBuffer(); sb.append(ch); while (++i < expr.length()) { ch = expr.charAt(i); if (ch == '*') { sb.append(ch); } else { break; } } if (sb.length() > 2) { return -1; } op.push(sb.toString()); } } if (!op.empty() || oper.empty()) return -1; return oper.pop(); }
public synchronized void workAllJobs() { while (!jobs.isEmpty()) { loadJob(jobs.remove()); } while (!shadertoset.empty()) { shadertoset.pop().load(); } }
public DictionaryEntryInterpreter newEntryInterpreter() { synchronized (interpreterPool) { if (!interpreterPool.empty()) { return (DictionaryEntryInterpreter) interpreterPool.pop(); } } return new DictionaryEntryInterpreter(this); }
public void ignorableAtRule(String atRule) throws CSSException { // Create the unknown rule and add it to the rule list CSSUnknownRuleImpl ir = new CSSUnknownRuleImpl(_parentStyleSheet, null, atRule); if (!_nodeStack.empty()) { ((CSSRuleListImpl) _nodeStack.peek()).add(ir); } else { // _nodeStack.push(ir); _root = ir; } }
public TokenizedInputDictionaryExtension newTokenizedInputExtension(DefaultTokenizedInput input) { synchronized (tokenizedInputExtensionPool) { if (!tokenizedInputExtensionPool.empty()) { TokenizedInputDictionaryExtension result = (TokenizedInputDictionaryExtension) tokenizedInputExtensionPool.pop(); result.setInput(input); return result; } } return new TokenizedInputDictionaryExtension(this, input); }
public void importStyle(String uri, SACMediaList media, String defaultNamespaceURI) throws CSSException { // Create the import rule and add it to the rule list CSSImportRuleImpl ir = new CSSImportRuleImpl(_parentStyleSheet, null, uri, new MediaListImpl(media)); if (!_nodeStack.empty()) { ((CSSRuleListImpl) _nodeStack.peek()).add(ir); } else { // _nodeStack.push(ir); _root = ir; } }
private static void netxsurgery() throws Exception { /* Force off NetX codebase classloading. */ Class<?> nxc; try { nxc = Class.forName("net.sourceforge.jnlp.runtime.JNLPClassLoader"); } catch (ClassNotFoundException e1) { try { nxc = Class.forName("netx.jnlp.runtime.JNLPClassLoader"); } catch (ClassNotFoundException e2) { throw (new Exception("No known NetX on classpath")); } } ClassLoader cl = MainFrame.class.getClassLoader(); if (!nxc.isInstance(cl)) { throw (new Exception("Not running from a NetX classloader")); } Field cblf, lf; try { cblf = nxc.getDeclaredField("codeBaseLoader"); lf = nxc.getDeclaredField("loaders"); } catch (NoSuchFieldException e) { throw (new Exception("JNLPClassLoader does not conform to its known structure")); } cblf.setAccessible(true); lf.setAccessible(true); Set<Object> loaders = new HashSet<Object>(); Stack<Object> open = new Stack<Object>(); open.push(cl); while (!open.empty()) { Object cur = open.pop(); if (loaders.contains(cur)) continue; loaders.add(cur); Object curl; try { curl = lf.get(cur); } catch (IllegalAccessException e) { throw (new Exception("Reflection accessibility not available even though set")); } for (int i = 0; i < Array.getLength(curl); i++) { Object other = Array.get(curl, i); if (nxc.isInstance(other)) open.push(other); } } for (Object cur : loaders) { try { cblf.set(cur, null); } catch (IllegalAccessException e) { throw (new Exception("Reflection accessibility not available even though set")); } } }
public static void main(String[] args) throws IOException { br = new BufferedReader(new InputStreamReader(System.in)); out = new PrintWriter(new OutputStreamWriter(System.out)); // br = new BufferedReader(new FileReader("in.txt")); // out = new PrintWriter(new FileWriter("out.txt")); N = readInt(); val = new int[N]; hi = new int[N]; lo = new int[N]; poss = new ArrayList<ArrayList<Integer>>(); intervals = new TreeSet<Interval>(); for (int i = 0; i < N; i++) val[i] = readInt(); for (int i = 0; i < 2 * N; i++) poss.add(new ArrayList<Integer>()); Stack<State> s = new Stack<State>(); // processing upper bound (first number less than the current number) for (int i = N - 1; i >= 0; i--) { while (!s.isEmpty() && val[i] < s.peek().val) s.pop(); if (s.isEmpty()) hi[val[i]] = N; else hi[val[i]] = s.peek().index; s.push(new State(val[i], i)); } s.clear(); // processing lower bound (last number greater than the current number) for (int i = 0; i < N; i++) { while (!s.isEmpty() && val[i] > s.peek().val) s.pop(); if (s.empty()) lo[val[i]] = -1; else lo[val[i]] = s.peek().index; s.push(new State(val[i], i)); } for (int i = 0; i < N; i++) { int diff = val[i] - i + N - 1; poss.get(diff).add(i); } for (int i = 0; i < 2 * N; i++) { sweep(poss.get(i)); } out.println(intervals.size()); for (Interval i : intervals) out.printf("%d %d\n", i.l + 1, i.r + 1); out.close(); }
public void startSelector(SelectorList selectors) throws CSSException { // Create the style rule and add it to the rule list CSSStyleRuleImpl sr = new CSSStyleRuleImpl(_parentStyleSheet, null, selectors); if (!_nodeStack.empty()) { ((CSSRuleListImpl) _nodeStack.peek()).add(sr); } // Create the style declaration CSSStyleDeclarationImpl decl = new CSSStyleDeclarationImpl(sr); sr.setStyle(decl); _nodeStack.push(sr); _nodeStack.push(decl); }
public void startPage(String name, String pseudo_page) throws CSSException { // Create the page rule and add it to the rule list CSSPageRuleImpl pr = new CSSPageRuleImpl(_parentStyleSheet, null, name, pseudo_page); if (!_nodeStack.empty()) { ((CSSRuleListImpl) _nodeStack.peek()).add(pr); } // Create the style declaration CSSStyleDeclarationImpl decl = new CSSStyleDeclarationImpl(pr); pr.setStyle(decl); _nodeStack.push(pr); _nodeStack.push(decl); }
public void startFontFace() throws CSSException { // Create the font face rule and add it to the rule list CSSFontFaceRuleImpl ffr = new CSSFontFaceRuleImpl(_parentStyleSheet, null); if (!_nodeStack.empty()) { ((CSSRuleListImpl) _nodeStack.peek()).add(ffr); } // Create the style declaration CSSStyleDeclarationImpl decl = new CSSStyleDeclarationImpl(ffr); ffr.setStyle(decl); _nodeStack.push(ffr); _nodeStack.push(decl); }
public void startDocument(InputSource source) throws CSSException { if (_nodeStack.empty()) { CSSStyleSheetImpl ss = new CSSStyleSheetImpl(); _parentStyleSheet = ss; // Create the rule list CSSRuleListImpl rules = new CSSRuleListImpl(); ss.setRuleList(rules); _nodeStack.push(ss); _nodeStack.push(rules); } else { // Error } }
public void startMedia(SACMediaList media) throws CSSException { // Create the media rule and add it to the rule list CSSMediaRuleImpl mr = new CSSMediaRuleImpl(_parentStyleSheet, null, new MediaListImpl(media)); if (!_nodeStack.empty()) { ((CSSRuleListImpl) _nodeStack.peek()).add(mr); } // Create the rule list CSSRuleListImpl rules = new CSSRuleListImpl(); mr.setRuleList(rules); _nodeStack.push(mr); _nodeStack.push(rules); }
public void dealWithOperator(char operator) { while (!stack.empty()) { char pending = peekChar(); if (pending == '(') break; if (outranks(operator, pending)) { break; } else { out.append(popChar()); } } // every time we push an argument, we output a space to // separate the arguments out.append(' '); stack.push(new Character(operator)); return; }
private ArrayList<Cluster> getClusters(Cluster clustering, float threshold) { ArrayList<Cluster> clusters = new ArrayList<Cluster>(); // First determine the clusters Stack<Cluster> stack = new Stack<Cluster>(); stack.push(clustering); while (!stack.empty()) { Cluster current = stack.pop(); if (current.size() == 1) { clusters.add(current); // singleton clusters } else { if (current.getSimilarity() >= threshold) { clusters.add(current); } else { // current.size() != 1 !!! stack.push(current.getLeft()); stack.push(current.getRight()); } } } return clusters; }
/* * Finds the number of routes from a specified start node to a specified end node. * Restrictions include max, distance, and lessthan. */ private static void findNumberofRoutes(String inputString) { int routes = 0; int counter1 = 0; int counter2 = 0; int depth = 0; String restriction = inputString.substring(input.indexOf(":") + 1, input.indexOf("=")); String restrictionValueString = inputString.substring(input.indexOf("=") + 2); int restrictionValue = Integer.parseInt(restrictionValueString); LinkedList<Edge> bfs = new LinkedList<Edge>(); Stack<Edge> dfs = new Stack<Edge>(); Stack<Integer> dfslength = new Stack<Integer>(); Edge current; int currentLength; first = inputString.charAt(0); second = inputString.charAt(2); if (restriction.equals("max")) { edgeList = nodeMap.get(first).getEdges(); for (i = 0; i < edgeList.size(); i++) { bfs.add(edgeList.get(i)); counter2++; } while (depth < restrictionValue) { counter1 = counter2; counter2 = 0; for (i = 0; i < counter1; i++) { current = bfs.poll(); if (current != null && current.getEdgeDestination() == second) { routes++; } edgeList = nodeMap.get(current.getEdgeDestination()).getEdges(); for (j = 0; j < edgeList.size(); j++) { bfs.add(edgeList.get(j)); counter2++; } } depth++; } } else if (restriction.equals("distance")) { edgeList = nodeMap.get(first).getEdges(); for (i = 0; i < edgeList.size(); i++) { bfs.add(edgeList.get(i)); counter2++; } while (depth <= restrictionValue) { counter1 = counter2; counter2 = 0; for (i = 0; i < counter1; i++) { current = bfs.poll(); if (current != null && current.getEdgeDestination() == second && depth == restrictionValue) { routes++; } edgeList = nodeMap.get(current.getEdgeDestination()).getEdges(); for (j = 0; j < edgeList.size(); j++) { bfs.add(edgeList.get(j)); counter2++; } } depth++; } } else if (restriction.equals("lessthan")) { edgeList = nodeMap.get(first).getEdges(); for (i = 0; i < edgeList.size(); i++) { if (edgeList.get(i).getEdgeLength() < restrictionValue) { dfs.push(edgeList.get(i)); dfslength.push(edgeList.get(i).getEdgeLength()); } } while (!dfs.empty()) { current = dfs.pop(); currentLength = dfslength.pop(); edgeList = nodeMap.get(current.getEdgeDestination()).getEdges(); for (i = 0; i < edgeList.size(); i++) { if (currentLength + edgeList.get(i).getEdgeLength() < restrictionValue) { if (edgeList.get(i).getEdgeDestination() == second) { routes++; } dfs.push(edgeList.get(i)); dfslength.push(edgeList.get(i).getEdgeLength() + currentLength); } } } } try { bw.write(Integer.toString(routes)); bw.newLine(); } catch (IOException e) { System.out.println(e); } }
public void generate(String inputFileName) throws Exception { List<MetaClass> metaClasses = new ArrayList<>(); List<LifeLine> lifeLines = new ArrayList<>(); List<MethodInvocation> rootMessages = new ArrayList<>(); MethodInvocation parentMessage = new MethodInvocation(); GsonBuilder builder = new GsonBuilder(); List<MethodInvocation> methodInvocations = new ArrayList<>(); Package mainPackage = new Package(); List<Guard> listOfGuards = new ArrayList<>(); Map<Guard, Instruction> guardToCFMap = new HashMap<>(); List<Instruction> combinedFragments = new ArrayList<Instruction>(); List<Operation> operationsList = new ArrayList<>(); builder.registerTypeAdapter(RefObject.class, new RefObjectJsonDeSerializer()); Gson gson = builder.create(); Element myTypes = gson.fromJson(new FileReader(inputFileName), Element.class); if (myTypes._type.equals("Project")) { List<Element> umlElements = myTypes .ownedElements .stream() .filter(f -> f._type.equals("UMLModel")) .collect(Collectors.toList()); if (umlElements.size() > 0) { // There has be to atleast one UMLModel package Element element = umlElements.get(0); // package that the classes are supposed to be in mainPackage.setName(element.name); List<Element> umlPackages = element .ownedElements .stream() .filter(g -> g._type.equals("UMLPackage")) .collect(Collectors.toList()); if (umlPackages.size() > 1) { // There has to be two packages- one for class one for behaviour Element classes = umlPackages.get(0); Element behaviour = umlPackages.get(1); // *--------------------------CLASSES-------------------------------*// // in the first pass, get all classes that are defined in the diagram // get details that can be directly inferred from the json like, fields and operations, // which do not refer to other classes for (Element umlClass : classes.getOwnedElements()) { MetaClass metaClass = new MetaClass(umlClass.name, umlClass._id); // check if class is interface or not because there is no distinction in json if (umlClass._type.equals("UMLClass")) { metaClass.setInterface(false); } else { metaClass.setInterface(true); } if (umlClass.operations != null) { metaClass.setOperations(umlClass.operations); operationsList.addAll(metaClass.operations); } if (umlClass.attributes != null) { metaClass.setFields(umlClass.attributes); } metaClasses.add(metaClass); } // in second pass, define associations and generalizations for these classes for (Element umlClass : classes.getOwnedElements()) { if (umlClass.ownedElements != null) { // find corresponding metaclass, then populate the secondary inferences List<MetaClass> correspondingMetaClassList = metaClasses .stream() .filter(f -> f._id.equals(umlClass._id)) .collect(Collectors.toList()); MetaClass correspondingMetaClass = correspondingMetaClassList.get(0); List<Element> umlAssociations = umlClass .ownedElements .stream() .filter(f -> f._type.equals("UMLAssociation")) .collect(Collectors.toList()); if (umlAssociations.size() > 0) { correspondingMetaClass.setAssociations(metaClasses, umlAssociations); } List<Element> umlGeneralization = umlClass .ownedElements .stream() .filter(f -> f._type.equals("UMLGeneralization")) .collect(Collectors.toList()); if (umlGeneralization.size() > 0) { correspondingMetaClass.setGeneralizations(metaClasses, umlGeneralization); } List<Element> umlRealization = umlClass .ownedElements .stream() .filter(f -> f._type.equals("UMLInterfaceRealization")) .collect(Collectors.toList()); if (umlRealization.size() > 0) { correspondingMetaClass.setInterfaceRealization(metaClasses, umlRealization); } } } // *--------------------------CLASSES-------------------------------*// // *----------------------- BEHAVIOUR---------------------------------*// for (Element umlCollaboration : behaviour.getOwnedElements()) { // Role to Class mapping ArrayList<Element> attributes = umlCollaboration.attributes; HashMap<String, MetaClass> roleToClassMap = new HashMap<>(); if (attributes != null) { for (Element attribute : attributes) { List<MetaClass> roleClass = metaClasses .stream() .filter(f -> f._id.equals(attribute.type.$ref)) .collect(Collectors.toList()); roleToClassMap.put(attribute._id, roleClass.get(0)); } } for (Element umlInteraction : umlCollaboration.ownedElements) { // mapping lifelines to the classes they correspond ArrayList<Element> participants = umlInteraction.participants; if (participants != null && participants.size() > 0) { for (Element participant : participants) { MetaClass participantClass = roleToClassMap.get(participant.represent.$ref); LifeLine lifeLine = new LifeLine(); lifeLine.setName(participant.name); lifeLine.setId(participant._id); lifeLine.setMetaClass(participantClass); lifeLines.add(lifeLine); } } // first parse all the combined fragments and get ready if (umlInteraction.fragments != null) { for (Element fragment : umlInteraction.fragments) { // depending on the fragment set the class Instruction instruction = null; if (fragment.interactionOperator.equals("loop")) { Loop loop = new Loop(); loop.setId(fragment._id); loop.setWeight(0); Guard guard = new Guard(fragment.operands.get(0)._id); // loop can have only one condition--- one condition-- condition is made up of // AND or OR's guard.setCondition(fragment.operands.get(0).guard); loop.setGuard(guard); instruction = loop; combinedFragments.add(loop); listOfGuards.add(guard); guardToCFMap.put(guard, loop); } if (fragment.interactionOperator.equals("alt")) { Conditional c = new Conditional(); c.setId(fragment._id); c.setWeight(0); instruction = c; combinedFragments.add(c); Guard consequence = new Guard(fragment.operands.get(0)._id); consequence.setCondition(fragment.operands.get(0).guard); c.setCons(consequence); listOfGuards.add(consequence); guardToCFMap.put(consequence, c); consequence.setConsequence(true); if (fragment.operands.size() > 1) { Guard alternate = new Guard(fragment.operands.get(1)._id); alternate.setCondition(fragment.operands.get(1).guard); c.setAlt(alternate); listOfGuards.add(alternate); guardToCFMap.put(alternate, c); alternate.setAlternative(true); } } if (fragment.tags != null) { for (Element tag : fragment.tags) { if (tag.name.equals("parent")) { List<Instruction> instructionList = combinedFragments .stream() .filter(e -> e.getId().equals(tag.reference.$ref)) .collect(Collectors.toList()); if (instructionList.size() > 0) { instructionList.get(0).getBlock().add(instruction); instruction.setParent(instructionList.get(0)); } } } } } } // parsing the messages and make nodes out them to later build a tree from the // lifelines ArrayList<Element> messages = umlInteraction.messages; Element startMessage = messages.get(0); String sourceRef = startMessage.source.$ref; String targetRef = startMessage.target.$ref; Element endMessage = null; LifeLine sourceLifeLine = getLifeLine(lifeLines, sourceRef); LifeLine targetLifeLine = getLifeLine(lifeLines, targetRef); // First message processing parentMessage = new MethodInvocation(); parentMessage.setAssignmentTarget(startMessage.assignmentTarget); parentMessage.setMessageSort(startMessage.messageSort); parentMessage.setSource(sourceLifeLine.getMetaClass()); parentMessage.setTarget(targetLifeLine.getMetaClass()); parentMessage.setName(startMessage.name); parentMessage.setId(startMessage._id); if (sourceLifeLine.getId().equals(targetLifeLine.getId())) { parentMessage.setCallerObject("this"); } else { parentMessage.setCallerObject(targetLifeLine.getName()); } int weight = 0; parentMessage.setWeight(weight++); if (startMessage.signature != null) { parentMessage.setSignature(startMessage.signature.$ref); } if (startMessage.tags != null) { for (Element tag : startMessage.tags) { // if (tag.name.equals("CF")) { // parentMessage.setInCF(true); // // parentMessage.setCfID(tag.reference.$ref); // } if (tag.name.equals("operand")) { parentMessage.setOperandId(tag.reference.$ref); } } } MethodInvocation rootMessage = parentMessage; methodInvocations.add(rootMessage); rootMessages.add(rootMessage); Iterator<Element> iter = messages.iterator(); while (iter.hasNext()) { if (iter.next() == endMessage) { continue; } iter.remove(); List<Element> childMessages = getChildMessages(messages, targetRef); for (Element child : childMessages) { LifeLine childSource = getLifeLine(lifeLines, child.source.$ref); LifeLine childTarget = getLifeLine(lifeLines, child.target.$ref); MethodInvocation childMessage = new MethodInvocation(); childMessage.setMessageSort(child.messageSort); childMessage.setSource(childSource.getMetaClass()); childMessage.setTarget(childTarget.getMetaClass()); childMessage.setAssignmentTarget(child.assignmentTarget); childMessage.setName(child.name); childMessage.setId(child._id); childMessage.setWeight(weight++); childMessage.setArguments(child.arguments); if (childSource.getId().equals(childTarget.getId())) { childMessage.setCallerObject("this"); } else { childMessage.setCallerObject(childTarget.getName()); } if (child.signature != null) { childMessage.setSignature(child.signature.$ref); } if (child.tags != null) { for (Element tag : child.tags) { // if (tag.name.equals("CF")) { // childMessage.setInCF(true); // // childMessage.setCfID(tag.reference.$ref); // } if (tag.name.equals("operand")) { childMessage.setOperandId(tag.reference.$ref); } } } parentMessage.childNodes.add(childMessage); methodInvocations.add(childMessage); } if (childMessages.size() > 0) { List<MethodInvocation> nextMessage = parentMessage .childNodes .stream() .filter(f -> !f.source.equals(f.target)) .collect(Collectors.toList()); List<Element> startMessageNext = childMessages .stream() .filter(f -> !f.source.$ref.equals(f.target.$ref)) .collect(Collectors.toList()); startMessage = startMessageNext.get(0); targetRef = startMessage.target.$ref; sourceRef = startMessage.source.$ref; parentMessage = nextMessage.get(0); if (childMessages.size() > 1) { endMessage = childMessages.get(childMessages.size() - 1); } } } } for (MethodInvocation methodInvocation : methodInvocations) { List<Operation> matchingOperation = operationsList .stream() .filter(f -> f._id.equals(methodInvocation.getSignature())) .collect(Collectors.toList()); if (matchingOperation.size() > 0) { operationMap.put(methodInvocation, matchingOperation.get(0)._id); methodInvocation.setOperation(matchingOperation.get(0)); } } Stack stack = new Stack(); for (MethodInvocation root : methodInvocations) { stack.push(root); while (!stack.empty()) { MethodInvocation methodInvocation = (MethodInvocation) stack.pop(); Operation currentOperation = methodInvocation.getOperation(); if (currentOperation != null) { // all child nodes of this node make up its body List<MethodInvocation> childNodes = methodInvocation.childNodes; for (MethodInvocation child : childNodes) { stack.push(child); } for (MethodInvocation childNode : childNodes) { if (childNode.getOperandId() != null) { List<Instruction> combinedFragmentsList = combinedFragments .stream() .filter(f -> f.getId().equals(childNode.getCfID())) .collect(Collectors.toList()); List<Guard> guardList = listOfGuards .stream() .filter(f -> f.id.equals(childNode.getOperandId())) .collect(Collectors.toList()); if (guardList.size() > 0) { Guard currentGuard = guardList.get(0); Instruction instruction = guardToCFMap.get(guardList.get(0)); // get the topmost CF if it is in a tree Instruction parent = instruction.getParent(); while (instruction.getParent() != null) { instruction = instruction.getParent(); } if (currentGuard.isConsequence) { Conditional conditional = (Conditional) instruction; if (!conditional.getConsequence().contains(childNode)) { conditional.getConsequence().add(childNode); } } if (currentGuard.isAlternative) { Conditional conditional = (Conditional) instruction; if (!conditional.getAlternative().contains(childNode)) { conditional.getAlternative().add(childNode); } } if (!currentGuard.isAlternative && !currentGuard.isConsequence) { Loop loop = (Loop) instruction; loop.getBlock().add(childNode); } else { if (!currentOperation.getBlock().contains(instruction)) { currentOperation.getBlock().add(instruction); } } } } else { if (!currentOperation.getBlock().contains(childNode)) { currentOperation.getBlock().add(childNode); } } } } } } } } } } // //// printAllData(metaClasses); // while (parentMessage.childNodes != null || parentMessage.childNodes.size() > 0) { // System.out.println("parent " + parentMessage.name); // for (com.cbpro.main.MethodInvocation child : parentMessage.childNodes) { // System.out.println("child " + child.name); // } // if (parentMessage.childNodes.size() > 0) { // parentMessage = parentMessage.childNodes.get(0); // } else { // break; // } // } mainPackage.print(); File dir = new File("/home/ramyashenoy/Desktop/DemoFolder/" + mainPackage.getName()); boolean successful = dir.mkdir(); if (successful) { System.out.println("directory was created successfully"); for (MetaClass metaClass : metaClasses) { if (metaClass.name.equals("Main")) { continue; } else { String data = metaClass.print(); BufferedWriter out = null; try { FileWriter fstream = new FileWriter( dir.getPath() + "/" + metaClass.name + ".java", true); // true tells to append data. out = new BufferedWriter(fstream); out.write(data); } catch (IOException e) { System.err.println("Error: " + e.getMessage()); } finally { if (out != null) { out.close(); } } } } } else { // creating the directory failed System.out.println("failed trying to create the directory"); } mainPackage.setClasses(metaClasses); }
// Graham Scan public Point[] grahamScan(Set<Point> pointSet) { Point[] myPS = pointSet.toArray(new Point[pointSet.size()]); // (0): find the lowest point int min = 0; for (int i = 1; i < myPS.length; i++) { if (myPS[i].y == myPS[min].y) { if (myPS[i].x > myPS[min].x) min = i; } else if (myPS[i].y < myPS[min].y) min = i; } // move min to beginning of array swapPoints(myPS, 0, min); // the base point // System.out.println("BASE POINT: " + myPS[0].stringPoint()); // (1): find angles from base (pointSet[min]) // make array size n-1 ArrayList<PointData> ccw = new ArrayList<PointData>(); double ang; for (int i = 1; i < myPS.length; i++) { ang = findAngle(myPS[0], myPS[i]); ccw.add(new PointData(ang, myPS[i])); } // add the pivot point // ccw.add(new PointData(0, myPS[1])); // (2): Sort ccw by ccw angles Collections.sort(ccw); // DOES THIS USE QUICKSORT? // testing for (Iterator<PointData> i = ccw.iterator(); i.hasNext(); ) { PointData data = i.next(); // System.out.println(data.angle + " --- " + data.p.stringPoint()); } // (3): create the stack(convex hull) of Points // initialize Stack<Point> pointStack = new Stack<Point>(); // push base pointStack.push(myPS[0]); // if the base ONLY exists if (ccw.isEmpty()) { // System.out.println("A BASE HULL!"); Point[] basehull = new Point[1]; basehull[0] = pointStack.pop(); return basehull; } // push rightmost ccw element pointStack.push(ccw.get(0).p); // then we look for CH points int n = 1; while (n < ccw.size()) { // System.out.println("entering STACK"); Point c = pointStack.pop(); // System.out.println("POPPING " + c.stringPoint() ); Point p = pointStack.peek(); Point nPoint = ccw.get(n).p; // System.out.println("CHECKING POINTS: \n" + // p.stringPoint() + "\n" + // c.stringPoint() + "\n" + // nPoint.stringPoint()); // System.out.println("ISLEFT: "+ isLeft(p, c, nPoint) ); if (isLeft(p, c, nPoint) < 0) { pointStack.push(c); pointStack.push(nPoint); n++; } } // stack to array Point[] convexhull = new Point[pointStack.size()]; int z = 0; // System.out.println("THE CONVEX HULL:"); while (pointStack.empty() == false) { Point hullpoint = pointStack.pop(); // System.out.println(hullpoint.stringPoint()); convexhull[z] = hullpoint; z++; } // return the array pf the stack return convexhull; }
public boolean hasMoreFiles() { return !files.empty(); }
public File nextFile() throws IOException { if (files.empty()) throw new NoSuchElementException(); File out = (File) files.pop(); while (files.empty() && !direx.empty()) blossomDirectory((File) direx.pop()); return out; }
/** * Initialize DFSCopyFileMapper specific job-configuration. * * @param conf : The dfs/mapred configuration. * @param jobConf : The handle to the jobConf object to be initialized. * @param args Arguments */ private static void setup(Configuration conf, JobConf jobConf, final Arguments args) throws IOException { jobConf.set(DST_DIR_LABEL, args.dst.toUri().toString()); // set boolean values final boolean update = args.flags.contains(Options.UPDATE); final boolean overwrite = !update && args.flags.contains(Options.OVERWRITE); jobConf.setBoolean(Options.UPDATE.propertyname, update); jobConf.setBoolean(Options.OVERWRITE.propertyname, overwrite); jobConf.setBoolean( Options.IGNORE_READ_FAILURES.propertyname, args.flags.contains(Options.IGNORE_READ_FAILURES)); jobConf.setBoolean( Options.PRESERVE_STATUS.propertyname, args.flags.contains(Options.PRESERVE_STATUS)); final String randomId = getRandomId(); JobClient jClient = new JobClient(jobConf); Path jobDirectory = new Path(jClient.getSystemDir(), NAME + "_" + randomId); jobConf.set(JOB_DIR_LABEL, jobDirectory.toString()); FileSystem dstfs = args.dst.getFileSystem(conf); boolean dstExists = dstfs.exists(args.dst); boolean dstIsDir = false; if (dstExists) { dstIsDir = dstfs.getFileStatus(args.dst).isDir(); } // default logPath Path logPath = args.log; if (logPath == null) { String filename = "_distcp_logs_" + randomId; if (!dstExists || !dstIsDir) { Path parent = args.dst.getParent(); if (!dstfs.exists(parent)) { dstfs.mkdirs(parent); } logPath = new Path(parent, filename); } else { logPath = new Path(args.dst, filename); } } FileOutputFormat.setOutputPath(jobConf, logPath); // create src list, dst list FileSystem jobfs = jobDirectory.getFileSystem(jobConf); Path srcfilelist = new Path(jobDirectory, "_distcp_src_files"); jobConf.set(SRC_LIST_LABEL, srcfilelist.toString()); SequenceFile.Writer src_writer = SequenceFile.createWriter( jobfs, jobConf, srcfilelist, LongWritable.class, FilePair.class, SequenceFile.CompressionType.NONE); Path dstfilelist = new Path(jobDirectory, "_distcp_dst_files"); SequenceFile.Writer dst_writer = SequenceFile.createWriter( jobfs, jobConf, dstfilelist, Text.class, Text.class, SequenceFile.CompressionType.NONE); Path dstdirlist = new Path(jobDirectory, "_distcp_dst_dirs"); jobConf.set(DST_DIR_LIST_LABEL, dstdirlist.toString()); SequenceFile.Writer dir_writer = SequenceFile.createWriter( jobfs, jobConf, dstdirlist, Text.class, FilePair.class, SequenceFile.CompressionType.NONE); // handle the case where the destination directory doesn't exist // and we've only a single src directory OR we're updating/overwriting // the contents of the destination directory. final boolean special = (args.srcs.size() == 1 && !dstExists) || update || overwrite; int srcCount = 0, cnsyncf = 0, dirsyn = 0; long fileCount = 0L, byteCount = 0L, cbsyncs = 0L; try { for (Iterator<Path> srcItr = args.srcs.iterator(); srcItr.hasNext(); ) { final Path src = srcItr.next(); FileSystem srcfs = src.getFileSystem(conf); FileStatus srcfilestat = srcfs.getFileStatus(src); Path root = special && srcfilestat.isDir() ? src : src.getParent(); if (srcfilestat.isDir()) { ++srcCount; } Stack<FileStatus> pathstack = new Stack<FileStatus>(); for (pathstack.push(srcfilestat); !pathstack.empty(); ) { FileStatus cur = pathstack.pop(); FileStatus[] children = srcfs.listStatus(cur.getPath()); for (int i = 0; i < children.length; i++) { boolean skipfile = false; final FileStatus child = children[i]; final String dst = makeRelative(root, child.getPath()); ++srcCount; if (child.isDir()) { pathstack.push(child); } else { // skip file if the src and the dst files are the same. skipfile = update && sameFile(srcfs, child, dstfs, new Path(args.dst, dst)); // skip file if it exceed file limit or size limit skipfile |= fileCount == args.filelimit || byteCount + child.getLen() > args.sizelimit; if (!skipfile) { ++fileCount; byteCount += child.getLen(); if (LOG.isTraceEnabled()) { LOG.trace("adding file " + child.getPath()); } ++cnsyncf; cbsyncs += child.getLen(); if (cnsyncf > SYNC_FILE_MAX || cbsyncs > BYTES_PER_MAP) { src_writer.sync(); dst_writer.sync(); cnsyncf = 0; cbsyncs = 0L; } } } if (!skipfile) { src_writer.append( new LongWritable(child.isDir() ? 0 : child.getLen()), new FilePair(child, dst)); } dst_writer.append(new Text(dst), new Text(child.getPath().toString())); } if (cur.isDir()) { String dst = makeRelative(root, cur.getPath()); dir_writer.append(new Text(dst), new FilePair(cur, dst)); if (++dirsyn > SYNC_FILE_MAX) { dirsyn = 0; dir_writer.sync(); } } } } } finally { checkAndClose(src_writer); checkAndClose(dst_writer); checkAndClose(dir_writer); } FileStatus dststatus = null; try { dststatus = dstfs.getFileStatus(args.dst); } catch (FileNotFoundException fnfe) { LOG.info(args.dst + " does not exist."); } // create dest path dir if copying > 1 file if (dststatus == null) { if (srcCount > 1 && !dstfs.mkdirs(args.dst)) { throw new IOException("Failed to create" + args.dst); } } final Path sorted = new Path(jobDirectory, "_distcp_sorted"); checkDuplication(jobfs, dstfilelist, sorted, conf); if (dststatus != null && args.flags.contains(Options.DELETE)) { deleteNonexisting(dstfs, dststatus, sorted, jobfs, jobDirectory, jobConf, conf); } Path tmpDir = new Path( (dstExists && !dstIsDir) || (!dstExists && srcCount == 1) ? args.dst.getParent() : args.dst, "_distcp_tmp_" + randomId); jobConf.set(TMP_DIR_LABEL, tmpDir.toUri().toString()); LOG.info("srcCount=" + srcCount); jobConf.setInt(SRC_COUNT_LABEL, srcCount); jobConf.setLong(TOTAL_SIZE_LABEL, byteCount); setMapCount(byteCount, jobConf); }
static String getStmt(SamTokenizer f) throws TokenizerException { try { String asmCode = ""; Boolean returnFlag1; Boolean returnFlag2; switch (f.peekAtKind()) { case WORD: { String newWord = f.getWord(); switch (newWord) { case "return": { asmCode += getExp(f); myCheck(f, ';'); // SaM code for return. asmCode += "STOREOFF -" + Integer.toString(params.size() + 1) + "\n" + "ADDSP -" + Integer.toString(varCounter - 2) + "\n" + "JUMPIND\n"; returnFlags.push(true); return asmCode; } case "if": { // Generate two valid lables for divergence. String label1 = getLabel(); String label2 = getLabel(); myCheck(f, '('); asmCode += getExp(f); myCheck(f, ')'); asmCode += ("JUMPC " + label1 + "\n"); // Buffer the statements for if condition. String tempString = getStmt(f); returnFlag1 = returnFlags.pop(); myCheck(f, "else"); asmCode += getStmt(f); returnFlag2 = returnFlags.pop(); // Manage the divergence. asmCode += ("JUMP " + label2 + "\n" + label1 + ":\n" + tempString + label2 + ":\n"); if (returnFlag1 && returnFlag2) returnFlags.push(true); else returnFlags.push(false); return asmCode; } case "while": { String label1 = getLabel(); String label2 = getLabel(); // Push the label as the return position. labels.push(label2); myCheck(f, '('); asmCode += (label1 + ":\n"); asmCode += getExp(f); asmCode += ("ISNIL\n" + "JUMPC " + label2 + "\n"); myCheck(f, ')'); // Flag indicating that we are in a while loop (possibly nested while loops.) flags.push(true); asmCode += getStmt(f); asmCode += ("JUMP " + label1 + "\n" + label2 + ":\n"); // Once finish parsing the while loop, pop out the labels and flags. labels.pop(); flags.pop(); return asmCode; } case "break": { // flags.empty() indicates that we are not currently in a while loop. if (flags.empty()) throw new TokenizerException("Error: Invalid break statement."); myCheck(f, ';'); returnFlags.push(false); // Jump to the current inner most return postion. return "JUMP " + labels.peek() + "\n"; } default: // Assign statement // Check if the variable is already defined. if (!symTables.lastElement().containsKey(newWord)) throw new TokenizerException("Error: Variable not declared."); myCheck(f, '='); asmCode += getExp(f); myCheck(f, ';'); // Get the address of the variable from the symbol table. int addr = symTables.lastElement().get(newWord); asmCode += ("STOREOFF " + Integer.toString(addr) + "\n"); returnFlags.push(false); return asmCode; } } case OPERATOR: { switch (f.getOp()) { case '{': { asmCode += getBlock(f); return asmCode; } case ';': returnFlags.push(false); return asmCode; default: throw new TokenizerException("Error: Invalid operator inside a statement."); } } default: throw new TokenizerException("Error: Invalid Statement."); } } catch (Exception e) { System.out.println(e.getMessage()); throw new TokenizerException("Error: Invalid Statement."); } }