@Implementation public void set(Matrix src) { reset(); if (src != null) { ShadowMatrix shadowMatrix = Shadows.shadowOf(src); preOps.addAll(shadowMatrix.preOps); postOps.addAll(shadowMatrix.postOps); setOps.putAll(shadowMatrix.setOps); } }
@Override public void incrementFont(float incr) { Deque<Component> components = new ArrayDeque<>(); components.addAll(Arrays.asList(getComponents())); Component c = null; while (null != (c = components.poll())) { Font f = c.getFont(); c.setFont(f.deriveFont(f.getSize2D() + incr)); if (c instanceof Container) { components.addAll(Arrays.asList(Container.class.cast(c).getComponents())); } } }
@SuppressWarnings("unchecked") @Override public void doFilter(ServletRequest req, ServletResponse rsp, FilterChain chain) throws IOException, ServletException { HttpServletRequest httpreq = (HttpServletRequest) req; if (FiltersHelper.isAuthenticated(httpreq) || httpreq.getAttribute(FiltersHelper.Constants.REQUEST_AUTH_RECORD_KEY) != null) { chain.doFilter(req, rsp); } else { ((HttpServletRequest) req).setAttribute(FiltersHelper.Constants.REQUEST_SCHEMES_KEY, schemes); HttpSession session = httpreq.getSession(false); Deque<AuthenticationProfile> stack = null; if (session != null) { stack = (Deque<AuthenticationProfile>) session.getAttribute(STACK_ATTR); } if (stack == null) { stack = new ArrayDeque<>(); stack.addAll(profiles); } doAuth(httpreq, (HttpServletResponse) rsp, stack); if (!stack.isEmpty()) { httpreq.getSession(true).setAttribute(STACK_ATTR, stack); } else { if (session != null) { session.removeAttribute(STACK_ATTR); } chain.doFilter(req, rsp); } } }
/** * Returns all elements of the dependency graph that match the given predicate, and any elements * upstream of those matching elements. * * <p>The graph may contain cycles. * * <p>Each key in the dependency graph depends on/is downstream of its associated values. */ public static <T> ImmutableSet<T> getMatchingAndUpstream( Set<T> allNodes, SetMultimap<T, T> dependencyGraph, Predicate<T> matcher) { Set<T> results = Sets.newHashSet(); Deque<T> toTry = Queues.newArrayDeque(); toTry.addAll(Collections2.filter(allNodes, matcher)); while (!toTry.isEmpty()) { T curElem = toTry.remove(); if (!results.contains(curElem)) { results.add(curElem); toTry.addAll(dependencyGraph.get(curElem)); } } return ImmutableSet.copyOf(results); }
/** Queue up some output buffers to be eventually written to the socket. */ public void enqueueOutput(List<ByteBuffer> outBufs) { try { outputAppendLock.lock(); outputQueue.addAll(outBufs); } finally { outputAppendLock.unlock(); } }
public void reset() { playedList.addAll(musicList); musicList = playedList; playedList = new LinkedList<PlaylistEntry>(); // reset the play status on all of the entries for (PlaylistEntry entry : musicList) { entry.setPlayed(false); } }
@Override public void init() throws Exception { _validate(); stats = new IntervalMetric(statKeyDispatchTime); Properties props = buildProperties(); ProducerConfig config = new ProducerConfig(props); _createProducersPool(config); producersStack.addAll(producersPool); _registerStatsCollector(); _registerStatsCalculator(); }
public static void collectAllInterfaces(ITypeBinding type, Set<ITypeBinding> interfaces) { Deque<ITypeBinding> typeQueue = Lists.newLinkedList(); while (type != null) { typeQueue.add(type); type = type.getSuperclass(); } while (!typeQueue.isEmpty()) { ITypeBinding nextType = typeQueue.poll(); List<ITypeBinding> newInterfaces = Arrays.asList(nextType.getInterfaces()); interfaces.addAll(newInterfaces); typeQueue.addAll(newInterfaces); } }
public Deque<Action> processGameData(String gameData) { Deque<Action> actions = rw.parseAndUpdatePlayerCharacter(gameData); log.fine(actions.toString()); Flag flag = null; for (Entry<Flag, Boolean> entry : player.getFlags().entrySet()) { if (entry.getKey() != Flag.LOGGEDIN) { if (entry.getValue()) { flag = entry.getKey(); log.log(Level.INFO, "detected\t{0}", flag); player.setFlag(flag, false); actions.addAll(flag.getActionsForState()); } } } return actions; }
/** * For each qualified name N in the global scope, we check if: (a) No ancestor of N is ever * aliased or assigned an unknown value type. (If N = "a.b.c", "a" and "a.b" are never aliased). * (b) N has exactly one write, and it lives in the global scope. (c) N is aliased in a local * scope. (d) N is aliased in global scope * * <p>If (a) is true, then GlobalNamespace must know all the writes to N. If (a) and (b) are true, * then N cannot change during the execution of a local scope. If (a) and (b) and (c) are true, * then the alias can be inlined if the alias obeys the usual rules for how we decide whether a * variable is inlineable. If (a) and (b) and (d) are true, then inline the alias if possible (if * it is assigned exactly once unconditionally). * * @see InlineVariables */ private void inlineAliases(GlobalNamespace namespace) { // Invariant: All the names in the worklist meet condition (a). Deque<Name> workList = new ArrayDeque<>(namespace.getNameForest()); while (!workList.isEmpty()) { Name name = workList.pop(); // Don't attempt to inline a getter or setter property as a variable. if (name.type == Name.Type.GET || name.type == Name.Type.SET) { continue; } if (!name.inExterns && name.globalSets == 1 && name.localSets == 0 && name.aliasingGets > 0) { // {@code name} meets condition (b). Find all of its local aliases // and try to inline them. List<Ref> refs = new ArrayList<>(name.getRefs()); for (Ref ref : refs) { if (ref.type == Type.ALIASING_GET && ref.scope.isLocal()) { // {@code name} meets condition (c). Try to inline it. // TODO(johnlenz): consider picking up new aliases at the end // of the pass instead of immediately like we do for global // inlines. if (inlineAliasIfPossible(name, ref, namespace)) { name.removeRef(ref); } } else if (ref.type == Type.ALIASING_GET && ref.scope.isGlobal() && ref.getTwin() == null) { // ignore aliases in chained assignments if (inlineGlobalAliasIfPossible(name, ref, namespace)) { name.removeRef(ref); } } } } // Check if {@code name} has any aliases left after the // local-alias-inlining above. if ((name.type == Name.Type.OBJECTLIT || name.type == Name.Type.FUNCTION) && name.aliasingGets == 0 && name.props != null) { // All of {@code name}'s children meet condition (a), so they can be // added to the worklist. workList.addAll(name.props); } } }
/** * This method returns the target states in the subtree of the given state. * * @param state the state for which to collect the target states in its subtree. * @return target states in the subtree of the given state */ public Collection<ARGState> getTargetsInSubtree(ARGState state) { Collection<ARGState> targetStates = new HashSet<>(); Deque<ARGState> todo = new ArrayDeque<>(Collections.singleton(state)); while (!todo.isEmpty()) { final ARGState currentState = todo.removeFirst(); if (currentState.isTarget()) { targetStates.add(currentState); continue; } Collection<ARGState> successors = successorRelation.get(currentState); todo.addAll(successors); } return targetStates; }
/** * This method obtains, for the IMPACT-like approach, the cut-off roots, i.e., for each disjunct * path from target states to the root, it collects the highest state that has a false interpolant * associated. * * @return the set of cut-off roots */ public Collection<ARGState> obtainCutOffRoots() { Collection<ARGState> refinementRoots = new HashSet<>(); Deque<ARGState> todo = new ArrayDeque<>(Collections.singleton(root)); while (!todo.isEmpty()) { final ARGState currentState = todo.removeFirst(); if (stateHasFalseInterpolant(currentState)) { refinementRoots.add(currentState); continue; } Collection<ARGState> successors = successorRelation.get(currentState); todo.addAll(successors); } return refinementRoots; }
private MethodInformation findMatching(String fqn, ArgumentList al, MethodFinder mf) throws IOException { Deque<String> typesToCheck = new ArrayDeque<>(); typesToCheck.addLast(fqn); Set<String> visitedTypes = new HashSet<>(); while (!typesToCheck.isEmpty()) { String clz = typesToCheck.removeFirst(); if (visitedTypes.contains(clz)) continue; visitedTypes.add(clz); List<MethodInformation> ls = mf.getAlternatives(clz); if (ls != null) { for (MethodInformation mi : ls) if (match(al, mi)) return mi; } // not found in current class, try super class Optional<List<String>> o = cip.getSuperTypes(clz); if (o.isPresent() && mf.mayUseSuperclassMethods()) typesToCheck.addAll(o.get()); } return null; }
protected boolean slowCheck() { EntityPlayer player = Minecraft.getMinecraft().thePlayer; mOpen.clear(); mClosed.clear(); boolean newValue = false; // Boundry problems because doubles to ints suck, always pick the "good position" Pos current = new Pos(Math.ceil(player.posX), Math.ceil(player.posY), Math.ceil(player.posZ)); if (!goodSuccessor(current, null)) current = new Pos(Math.floor(player.posX), Math.floor(player.posY), Math.floor(player.posZ)); while (current != null && !newValue) { if (current.isExposed()) { newValue = true; break; } mOpen.addAll(successors(current)); mClosed.add(current); current = mOpen.poll(); } return newValue; }
/** * @return The list of all basic block in this control flow graph in reversed depth-first * postorder sequence. * <p>Blocks may appear more than once in the sequence. */ public List<Block> getDepthFirstOrderedBlocks() { List<Block> dfsOrderResult = new LinkedList<>(); Set<Block> visited = new HashSet<>(); Deque<Block> worklist = new LinkedList<>(); worklist.add(entryBlock); while (!worklist.isEmpty()) { Block cur = worklist.getLast(); if (visited.contains(cur)) { dfsOrderResult.add(cur); worklist.removeLast(); } else { visited.add(cur); Deque<Block> successors = getSuccessors(cur); successors.removeAll(visited); worklist.addAll(successors); } } Collections.reverse(dfsOrderResult); return dfsOrderResult; }
/** * Get a list of all successor Blocks for cur * * @return a Deque of successor Blocks */ private Deque<Block> getSuccessors(Block cur) { Deque<Block> succs = new LinkedList<>(); if (cur.getType() == BlockType.CONDITIONAL_BLOCK) { ConditionalBlock ccur = ((ConditionalBlock) cur); succs.add(ccur.getThenSuccessor()); succs.add(ccur.getElseSuccessor()); } else { assert cur instanceof SingleSuccessorBlock; Block b = ((SingleSuccessorBlock) cur).getSuccessor(); if (b != null) { succs.add(b); } } if (cur.getType() == BlockType.EXCEPTION_BLOCK) { ExceptionBlock ecur = (ExceptionBlock) cur; for (Set<Block> exceptionSuccSet : ecur.getExceptionalSuccessors().values()) { succs.addAll(exceptionSuccSet); } } return succs; }
/** * This method extracts the precision increment for the given refinement root. It does so by * collection all non-trivial interpolants in the subtree of the given refinement root. * * @return the precision increment for the given refinement root */ public Multimap<CFANode, MemoryLocation> extractPrecisionIncrement(ARGState pRefinementRoot) { Multimap<CFANode, MemoryLocation> increment = HashMultimap.create(); Deque<ARGState> todo = new ArrayDeque<>(Collections.singleton(predecessorRelation.get(pRefinementRoot))); while (!todo.isEmpty()) { final ARGState currentState = todo.removeFirst(); if (stateHasNonTrivialInterpolant(currentState) && !currentState.isTarget()) { I itp = interpolants.get(currentState); for (MemoryLocation memoryLocation : itp.getMemoryLocations()) { increment.put(AbstractStates.extractLocation(currentState), memoryLocation); } } if (!stateHasFalseInterpolant(currentState)) { todo.addAll(successorRelation.get(currentState)); } } return increment; }
/** * This method obtains the refinement roots, i.e., for each disjunct path from target states to * the root, it collects the highest state that has a non-trivial interpolant associated. With * non-lazy abstraction, the root of the interpolation tree is used as refinement root. * * @param strategy whether to perform lazy abstraction or not * @return the set of refinement roots */ public Collection<ARGState> obtainRefinementRoots(GenericRefiner.RestartStrategy strategy) { if (strategy == GenericRefiner.RestartStrategy.ROOT) { assert successorRelation.get(root).size() == 1 : "ARG root has more than one successor"; return new HashSet<>(Collections.singleton(successorRelation.get(root).iterator().next())); } ARGState commonRoot = null; Collection<ARGState> refinementRoots = new HashSet<>(); Deque<ARGState> todo = new ArrayDeque<>(Collections.singleton(root)); while (!todo.isEmpty()) { final ARGState currentState = todo.removeFirst(); // determine the first branching point, which is the lowest node common to all refinement // roots if (commonRoot == null && successorRelation.get(currentState).size() > 1) { commonRoot = currentState; } if (stateHasNonTrivialInterpolant(currentState)) { refinementRoots.add(currentState); if (strategy == GenericRefiner.RestartStrategy.COMMON && refinementRoots.size() > 2) { assert commonRoot != null : "common root not yet set"; return new HashSet<>(Collections.singleton(commonRoot)); } continue; } Collection<ARGState> successors = successorRelation.get(currentState); todo.addAll(successors); } return refinementRoots; }
@Override public GrammarNode.GrammarNodeDescription scanImpl(CompiledModel baseModel) { try { CompiledModelV4 model = (CompiledModelV4) baseModel; // don't update if there were errors and a result is already displayed /*if (!result.getParser().getSyntaxErrors().isEmpty() && !ui.isShowingWaitNode()) { return; }*/ GrammarNode.GrammarNodeDescription rootDescription = new GrammarNode.GrammarNodeDescription(DeclarationKind.UNDEFINED); rootDescription.setFileObject(model.getSnapshot().getVersionedDocument().getFileObject()); GrammarNode.GrammarNodeDescription parserRulesRootDescription = new GrammarNode.GrammarNodeDescription( DeclarationKind.PARSER_RULE, "1" + Bundle.LBL_ParserRules()); parserRulesRootDescription.setHtmlHeader(Bundle.LBL_ParserRules()); GrammarNode.GrammarNodeDescription lexerRulesRootDescription = new GrammarNode.GrammarNodeDescription( DeclarationKind.LEXER_RULE, "2" + Bundle.LBL_LexerRules()); lexerRulesRootDescription.setHtmlHeader(Bundle.LBL_LexerRules()); Deque<CompiledFileModelV4> importedWorkList = new ArrayDeque<>(model.getImportedGrammarResults()); Set<String> visitedImports = new HashSet<>(); Set<String> visitedRules = new HashSet<>(); while (!importedWorkList.isEmpty()) { CompiledFileModelV4 importedParseResult = importedWorkList.pop(); Grammar grammar = importedParseResult.getGrammar(); if (grammar == null || grammar.fileName == null) { continue; } if (visitedImports.add(grammar.fileName) && !importedParseResult.getImportedGrammarResults().isEmpty()) { importedWorkList.push(importedParseResult); importedWorkList.addAll(importedParseResult.getImportedGrammarResults()); continue; } if (!visitedRules.add(grammar.fileName)) { continue; } processParseResult( null, importedParseResult, parserRulesRootDescription, lexerRulesRootDescription); } processParseResult( model.getSnapshot(), model.getResult(), parserRulesRootDescription, lexerRulesRootDescription); if (!parserRulesRootDescription.getChildren().isEmpty()) { rootDescription.getChildren().add(parserRulesRootDescription); } if (!lexerRulesRootDescription.getChildren().isEmpty()) { rootDescription.getChildren().add(lexerRulesRootDescription); } return rootDescription; } catch (RuntimeException ex) { Exceptions.printStackTrace(ex); return null; } }
// Decodes your encoded data to tree. public TreeNode deserialize(String data) { Deque<String> nodes = new LinkedList<>(); nodes.addAll(Arrays.asList(data.split(SPLITER))); return buildTree(nodes); }
private List<BooleanFormula> computeBlockFormulas(final ARGState pRoot) throws CPATransferException, InterruptedException { final Map<ARGState, ARGState> callStacks = new HashMap<>(); // contains states and their next higher callstate final Map<ARGState, PathFormula> finishedFormulas = new HashMap<>(); final List<BooleanFormula> abstractionFormulas = new ArrayList<>(); final Deque<ARGState> waitlist = new ArrayDeque<>(); // initialize assert pRoot.getParents().isEmpty() : "rootState must be the first state of the program"; callStacks.put(pRoot, null); // main-start has no callstack finishedFormulas.put(pRoot, pfmgr.makeEmptyPathFormula()); waitlist.addAll(pRoot.getChildren()); // iterate over all elements in the ARG with BFS while (!waitlist.isEmpty()) { final ARGState currentState = waitlist.pollFirst(); if (finishedFormulas.containsKey(currentState)) { continue; // already handled } if (!finishedFormulas.keySet().containsAll(currentState.getParents())) { // parent not handled yet, re-queue current element and wait for all parents waitlist.addLast(currentState); continue; } // collect formulas for current location final List<PathFormula> currentFormulas = new ArrayList<>(currentState.getParents().size()); final List<ARGState> currentStacks = new ArrayList<>(currentState.getParents().size()); for (ARGState parentElement : currentState.getParents()) { PathFormula parentFormula = finishedFormulas.get(parentElement); final CFAEdge edge = parentElement.getEdgeToChild(currentState); assert edge != null : "ARG is invalid: parent has no edge to child"; final ARGState prevCallState; // we enter a function, so lets add the previous state to the stack if (edge.getEdgeType() == CFAEdgeType.FunctionCallEdge) { prevCallState = parentElement; } else if (edge.getEdgeType() == CFAEdgeType.FunctionReturnEdge) { // we leave a function, so rebuild return-state before assigning the return-value. // rebuild states with info from previous state assert callStacks.containsKey(parentElement); final ARGState callState = callStacks.get(parentElement); assert extractLocation(callState).getLeavingSummaryEdge().getSuccessor() == extractLocation(currentState) : "callstack does not match entry of current function-exit."; assert callState != null || currentState.getChildren().isEmpty() : "returning from empty callstack is only possible at program-exit"; prevCallState = callStacks.get(callState); parentFormula = rebuildStateAfterFunctionCall( parentFormula, finishedFormulas.get(callState), (FunctionExitNode) extractLocation(parentElement)); } else { assert callStacks.containsKey(parentElement); // check for null is not enough prevCallState = callStacks.get(parentElement); } final PathFormula currentFormula = pfmgr.makeAnd(parentFormula, edge); currentFormulas.add(currentFormula); currentStacks.add(prevCallState); } assert currentFormulas.size() >= 1 : "each state except root must have parents"; assert currentStacks.size() == currentFormulas.size() : "number of callstacks must match predecessors"; // merging after functioncall with different callstates is ugly. // this is also guaranteed by the abstraction-locations at function-entries // (--> no merge of states with different latest abstractions). assert Sets.newHashSet(currentStacks).size() <= 1 : "function with multiple entry-states not supported"; callStacks.put(currentState, currentStacks.get(0)); PathFormula currentFormula; final PredicateAbstractState predicateElement = extractStateByType(currentState, PredicateAbstractState.class); if (predicateElement.isAbstractionState()) { // abstraction element is the start of a new part of the ARG assert waitlist.isEmpty() : "todo should be empty, because of the special ARG structure"; assert currentState.getParents().size() == 1 : "there should be only one parent, because of the special ARG structure"; // finishedFormulas.clear(); // free some memory // TODO disabled, we need to keep callStates for later usage // start new block with empty formula currentFormula = getOnlyElement(currentFormulas); abstractionFormulas.add(currentFormula.getFormula()); currentFormula = pfmgr.makeEmptyPathFormula(currentFormula); } else { // merge the formulas Iterator<PathFormula> it = currentFormulas.iterator(); currentFormula = it.next(); while (it.hasNext()) { currentFormula = pfmgr.makeOr(currentFormula, it.next()); } } assert !finishedFormulas.containsKey(currentState) : "a state should only be finished once"; finishedFormulas.put(currentState, currentFormula); waitlist.addAll(currentState.getChildren()); } return abstractionFormulas; }
@SuppressWarnings("unchecked") private Object walk(final Tree node, final DatatypeInfo parent) { // an optional node that is not present in the current context, is null if (node == null) { return null; } DatatypeInfo newParent = parent; CommonTree commonTreeNode = (CommonTree) node; Tree modifiers; switch (node.getType()) { // case CsRewriteRulesParser.ATTRIBUTE: // System.out.println("ATTRIBUTE"); // break; case CSharp4AST.NAMESPACE: CommonTree qid = (CommonTree) commonTreeNode.getFirstChildWithType(CSharp4AST.QUALIFIED_IDENTIFIER); Collection<String> namespaces = TreeHelper.treeListToStringList(qid.getChildren()); namespaceStack.addAll(namespaces); walk( commonTreeNode.getFirstChildWithType(CSharp4AST.NAMESPACE_MEMBER_DECLARATIONS), parent); for (int i = 0; i < qid.getChildren().size(); i++) namespaceStack.removeLast(); return null; case CSharp4AST.QUALIFIED_IDENTIFIER: case CSharp4AST.EXTERN_ALIAS_DIRECTIVES: case CSharp4AST.USING_DIRECTIVES: case CSharp4AST.NAMESPACE_MEMBER_DECLARATIONS: case CSharp4AST.ATTRIBUTES: case CSharp4AST.CLASS_MEMBER_DECLARATIONS: case CSharp4AST.INTERFACE_MEMBER_DECLARATIONS: case CSharp4AST.ENUM_MEMBER_DECLARATIONS: case CSharp4AST.STRUCT_MEMBER_DECLARATIONS: case CSharp4AST.CONST: for (int i = 0; i < commonTreeNode.getChildCount(); i++) { walk(commonTreeNode.getChild(i), parent); } return null; case CSharp4AST.CLASS: newParent = new DatatypeInfo("class"); newParent.setNamespace(namespaceStack); datatypeInfos.add(newParent); String className = walk( commonTreeNode.getFirstChildWithType(CSharp4AST.IDENTIFIER), newParent, String.class); newParent.setName(className); LOGGER.fine("class: " + className); // modifiers modifiers = commonTreeNode.getFirstChildWithType(CSharp4AST.MODIFIERS); if (modifiers != null) { List<String> modifierNames = TreeHelper.treeListToStringList(((CommonTree) modifiers).getChildren()); if (modifierNames.contains(ABSTRACT)) { newParent.setIsAbstract(Boolean.TRUE); } } setFullPath(parent, newParent); // must be invoked after setFullPath walk(commonTreeNode.getFirstChildWithType(CSharp4AST.CLASS_MEMBER_DECLARATIONS), newParent); addToReferences(newParent); return newParent; case CSharp4AST.INTERFACE: newParent = new DatatypeInfo("interface"); newParent.setNamespace(namespaceStack); datatypeInfos.add(newParent); String interfaceName = walk( commonTreeNode.getFirstChildWithType(CSharp4AST.IDENTIFIER), newParent, String.class); newParent.setName(interfaceName); LOGGER.fine("interface: " + interfaceName); // transform(commonTreeNode.getFirstChildWithType(CSharp4AST.IMPLEMENTS), // newParent, // false); // modifiers modifiers = commonTreeNode.getFirstChildWithType(CSharp4AST.MODIFIERS); if (modifiers != null) { List<String> modifierNames = TreeHelper.treeListToStringList(((CommonTree) modifiers).getChildren()); if (modifierNames.contains(ABSTRACT)) { newParent.setIsAbstract(Boolean.TRUE); } } setFullPath(parent, newParent); // must be invoked after setFullPath walk( commonTreeNode.getFirstChildWithType(CSharp4AST.INTERFACE_MEMBER_DECLARATIONS), newParent); addToReferences(newParent); return newParent; case CSharp4AST.ENUM: newParent = new DatatypeInfo("enum"); newParent.setNamespace(namespaceStack); datatypeInfos.add(newParent); String enumName = walk( commonTreeNode.getFirstChildWithType(CSharp4AST.IDENTIFIER), newParent, String.class); newParent.setName(enumName); LOGGER.fine("enum: " + enumName); // modifiers modifiers = commonTreeNode.getFirstChildWithType(CSharp4AST.MODIFIERS); if (modifiers != null) { for (int i = 0; i < modifiers.getChildCount(); i++) { Tree modTree = modifiers.getChild(i); String modName = modTree.getText(); if (ABSTRACT.equals(modName)) { newParent.setIsAbstract(Boolean.TRUE); } } } setFullPath(parent, newParent); // must be invoked after setFullPath walk(commonTreeNode.getFirstChildWithType(CSharp4AST.ENUM_MEMBER_DECLARATIONS), newParent); addToReferences(newParent); return newParent; case CSharp4AST.STRUCT: newParent = new DatatypeInfo("struct"); newParent.setNamespace(namespaceStack); datatypeInfos.add(newParent); String structName = walk( commonTreeNode.getFirstChildWithType(CSharp4AST.IDENTIFIER), newParent, String.class); newParent.setName(structName); // IMPLEMENTS setFullPath(parent, newParent); walk( commonTreeNode.getFirstChildWithType(CSharp4AST.STRUCT_MEMBER_DECLARATIONS), newParent); addToReferences(newParent); LOGGER.fine("struct: " + newParent); return newParent; case CSharp4AST.DELEGATE: // see http://msdn.microsoft.com/de-de/library/900fyy8e%28v=vs.80%29.aspx newParent = new DatatypeInfo("delegate"); newParent.setNamespace(namespaceStack); String delegateName = walk( commonTreeNode.getFirstChildWithType(CSharp4AST.IDENTIFIER), newParent, String.class); newParent.setName(delegateName); // TODO handle signature and generics setFullPath(parent, newParent); addToReferences(newParent); LOGGER.fine("delegate: " + newParent); break; case CSharp4AST.METHOD_DECL: MethodInfo methodInfo = new MethodInfo(parent); String returnType = walk(commonTreeNode.getFirstChildWithType(CSharp4AST.TYPE), parent, String.class); String methodName = walk( commonTreeNode.getFirstChildWithType(CSharp4AST.MEMBER_NAME), parent, String.class); List<ParameterInfo> formalParameters = walk( commonTreeNode.getFirstChildWithType(CSharp4AST.FORMAL_PARAMETER_LIST), parent, List.class); LOGGER.fine("method: " + methodName); methodInfo.setName(methodName); methodInfo.setReturnType(returnType); if (formalParameters != null) methodInfo.getParameters().addAll(formalParameters); parent.addMethodInfo(methodInfo); addToReferences(methodInfo); return methodInfo; case CSharp4AST.MEMBER_NAME: String typeName = walk( commonTreeNode.getFirstChildWithType(CSharp4AST.NAMESPACE_OR_TYPE_NAME), parent, String.class); return typeName; case CSharp4AST.FORMAL_PARAMETER_LIST: List<ParameterInfo> parameters = new LinkedList<ParameterInfo>(); for (int i = 0; i < commonTreeNode.getChildCount(); i++) { Tree child = commonTreeNode.getChild(i); ParameterInfo parameter = walk(child, parent, ParameterInfo.class); parameters.add(parameter); } return parameters; case CSharp4AST.FIXED_PARAMETER: String fixedParamName = walk(commonTreeNode.getFirstChildWithType(CSharp4AST.IDENTIFIER), parent, String.class); String fixedParamType = walk(commonTreeNode.getFirstChildWithType(CSharp4AST.TYPE), parent, String.class); if (fixedParamType != null) { // if not __arglist fixedParamType = KeyStringHelper.normalize(fixedParamType); } else { fixedParamType = fixedParamName; } return new ParameterInfo(fixedParamType, fixedParamName); case CSharp4AST.PARAMETER_ARRAY: String paramArrayName = walk(commonTreeNode.getFirstChildWithType(CSharp4AST.IDENTIFIER), parent, String.class); String paramArrayType = walk(commonTreeNode.getFirstChildWithType(CSharp4AST.TYPE), parent, String.class); paramArrayType = KeyStringHelper.normalize(paramArrayType); return new ParameterInfo(paramArrayType, paramArrayName); case CSharp4AST.FIELD_DECL: // do not process children of type TYPE for (int i = 0; i < commonTreeNode.getChildCount(); i++) { Tree child = commonTreeNode.getChild(i); if (child.getType() == CSharp4AST.VARIABLE_DECLARATOR) { walk(child, parent); } } break; case CSharp4AST.PROPERTY_DECL: FieldInfo propertyInfo = new FieldInfo(parent); String propertyName = walk( commonTreeNode.getFirstChildWithType(CSharp4AST.MEMBER_NAME), parent, String.class); if (propertyName != null) propertyInfo.setName(propertyName); String propertyType = walk(commonTreeNode.getFirstChildWithType(CSharp4AST.TYPE), parent, String.class); propertyInfo.setType(propertyType); parent.addFieldInfo(propertyInfo); addToReferences(propertyInfo); LOGGER.fine("LOAD PROPERTY: " + propertyInfo.getName()); return propertyInfo; case CSharp4AST.VARIABLE_DECLARATOR: FieldInfo fieldInfo = new FieldInfo(parent); String fieldName = walk(commonTreeNode.getFirstChildWithType(CSharp4AST.IDENTIFIER), parent, String.class); if (fieldName != null) fieldInfo.setName(fieldName); String variableType = walk(commonTreeNode.getFirstChildWithType(CSharp4AST.TYPE), parent, String.class); fieldInfo.setType(variableType); parent.addFieldInfo(fieldInfo); addToReferences(fieldInfo); return fieldInfo; case CSharp4AST.ENUM_MEMBER_DECLARATION: LOGGER.warning("UNSUPPORTED: enum member declarations are not yet supported."); break; case CSharp4AST.TYPE: StringBuilder builder = new StringBuilder(); Tree baseTypeNode = commonTreeNode.getChild(0); switch (baseTypeNode.getType()) { case CSharp4AST.NAMESPACE_OR_TYPE_NAME: String qualifiedBaseType = walk(baseTypeNode, parent, String.class); builder.append(qualifiedBaseType); break; default: // OBJECT, STRING, VOID, IDENTIFIER(dynamic), and primitive // types builder.append(baseTypeNode.getText()); break; } for (int i = 1; i < commonTreeNode.getChildCount(); i++) { Tree typeExtension = commonTreeNode.getChild(i); // INTERR, rank_specifier, STAR switch (typeExtension.getType()) { case CSharp4AST.INTERR: LOGGER.warning("UNSUPPORTED: INTERR is not yet supported"); break; case CSharp4AST.RANK_SPECIFIER: builder.append("["); int numCommas = typeExtension.getChildCount(); do { builder.append(","); } while (numCommas-- > 0); builder.append("]"); break; case CSharp4AST.STAR: builder.append("*"); break; default: break; } } return builder.toString(); case CSharp4AST.NAMESPACE_OR_TYPE_NAME: builder = new StringBuilder(); String nsIdentifier = walk(commonTreeNode.getFirstChildWithType(CSharp4AST.IDENTIFIER), parent, String.class); if (nsIdentifier != null) builder.append(nsIdentifier); List<String> qualified_alias_member = walk( commonTreeNode.getFirstChildWithType(CSharp4AST.QUALIFIED_ALIAS_MEMBER), parent, List.class); if (qualified_alias_member != null) { for (String qam : qualified_alias_member) { builder.append("."); builder.append(qam); } } for (int i = 1; i < node.getChildCount(); i++) { CommonTree child = (CommonTree) node.getChild(i); if (child.getType() == CSharp4AST.NAMESPACE_OR_TYPE_PART) { String nsPart = walk(child.getFirstChildWithType(CSharp4AST.IDENTIFIER), parent, String.class); builder.append("."); builder.append(nsPart); } } return builder.toString(); case CSharp4AST.IDENTIFIER: return node.getText(); default: return null; } return null; }
@Override public void resume() { if (isSuspended.compareAndSet(true, false)) { producersStack.addAll(producersPool); } }