public void reset(@NotNull Collection<X509Certificate> certificates) { myCertificates.clear(); for (X509Certificate certificate : certificates) { addCertificate(certificate); } // expand organization nodes at the same time // initRootNode(); queueUpdateFrom(RootDescriptor.ROOT, true) .doWhenDone(() -> CertificateTreeBuilder.this.expandAll(null)); }
public void search() { myFiles.clear(); myJars.clear(); PsiManager psiManager = PsiManager.getInstance(myProject); for (PsiFile file : search(myModule, myProject)) { VirtualFile jar = JarFileSystem.getInstance().getVirtualFileForJar(file.getVirtualFile()); if (jar != null) { myJars.putValue(jar, file); } else { Module module = ModuleUtilCore.findModuleForPsiElement(file); if (module != null) { myFiles.putValue(module, file); } else { VirtualFile virtualFile = file.getVirtualFile(); myVirtualFiles.putValue(virtualFile.getParent(), psiManager.findFile(virtualFile)); } } } }
public void doClearFoldRegions() { myGroups.clear(); myFoldTree.clear(); }
public final RunnerResult analyzeMethod( @NotNull PsiElement psiBlock, InstructionVisitor visitor, boolean ignoreAssertions, @NotNull Collection<DfaMemoryState> initialStates) { try { prepareAnalysis(psiBlock, initialStates); final ControlFlow flow = createControlFlowAnalyzer().buildControlFlow(psiBlock, ignoreAssertions); if (flow == null) return RunnerResult.NOT_APPLICABLE; int endOffset = flow.getInstructionCount(); myInstructions = flow.getInstructions(); myFields = flow.getFields(); myNestedClosures.clear(); if (LOG.isDebugEnabled()) { LOG.debug("Analyzing code block: " + psiBlock.getText()); for (int i = 0; i < myInstructions.length; i++) { Instruction instruction = myInstructions[i]; LOG.debug(i + ": " + instruction.toString()); } } Integer tooExpensiveHash = psiBlock.getUserData(TOO_EXPENSIVE_HASH); if (tooExpensiveHash != null && tooExpensiveHash == psiBlock.getText().hashCode()) { LOG.debug("Too complex because hasn't changed since being too complex already"); return RunnerResult.TOO_COMPLEX; } final ArrayList<DfaInstructionState> queue = new ArrayList<DfaInstructionState>(); for (final DfaMemoryState initialState : initialStates) { queue.add(new DfaInstructionState(myInstructions[0], initialState)); } long timeLimit = ourTimeLimit; final boolean unitTestMode = ApplicationManager.getApplication().isUnitTestMode(); WorkingTimeMeasurer measurer = new WorkingTimeMeasurer(timeLimit); int count = 0; while (!queue.isEmpty()) { if (count % 50 == 0 && !unitTestMode && measurer.isTimeOver()) { LOG.debug("Too complex because the analysis took too long"); psiBlock.putUserData(TOO_EXPENSIVE_HASH, psiBlock.getText().hashCode()); return RunnerResult.TOO_COMPLEX; } ProgressManager.checkCanceled(); DfaInstructionState instructionState = queue.remove(0); if (LOG.isDebugEnabled()) { LOG.debug(instructionState.toString()); } // System.out.println(instructionState.toString()); Instruction instruction = instructionState.getInstruction(); long distance = instructionState.getDistanceFromStart(); if (instruction instanceof BranchingInstruction) { if (!instruction.setMemoryStateProcessed( instructionState.getMemoryState().createCopy())) { LOG.debug("Too complex because too many different possible states"); return RunnerResult.TOO_COMPLEX; // Too complex :( } } DfaInstructionState[] after = acceptInstruction(visitor, instructionState); for (DfaInstructionState state : after) { Instruction nextInstruction = state.getInstruction(); if ((!(nextInstruction instanceof BranchingInstruction) || !nextInstruction.isMemoryStateProcessed(state.getMemoryState())) && instruction.getIndex() < endOffset) { state.setDistanceFromStart(distance + 1); queue.add(state); } } count++; } psiBlock.putUserData(TOO_EXPENSIVE_HASH, null); LOG.debug("Analysis ok"); return RunnerResult.OK; } catch (ArrayIndexOutOfBoundsException e) { LOG.error(psiBlock.getText(), e); // TODO fix in better times return RunnerResult.ABORTED; } catch (EmptyStackException e) { if (LOG.isDebugEnabled()) { LOG.error(e); // TODO fix in better times } return RunnerResult.ABORTED; } }
public void dispose() { myGroups.clear(); myFoldTree.clear(); }
public boolean updateStep() { final MultiMap<VcsRoot, String> dirty = new MultiMap<VcsRoot, String>(); final long oldPoint = System.currentTimeMillis() - (myVcsConfiguration.CHANGED_ON_SERVER_INTERVAL > 0 ? myVcsConfiguration.CHANGED_ON_SERVER_INTERVAL * 60000 : DISCRETE); synchronized (myLock) { // just copies myQueries MultiMap to dirty MultiMap for (VcsRoot root : myQueries.keySet()) { final Collection<String> collection = myQueries.get(root); for (String s : collection) { dirty.putValue(root, s); } } myQueries.clear(); // collect roots for which cache update should be performed (by timestamp) final Set<VcsRoot> roots = new HashSet<VcsRoot>(); for (Map.Entry<VcsRoot, Long> entry : myTs.entrySet()) { // ignore timestamp, as still remote changes checking is required // TODO: why not to add in roots anyway??? - as dirty is still checked when adding myChanged // files. if (!dirty.get(entry.getKey()).isEmpty()) continue; // update only if timeout expired final Long ts = entry.getValue(); if ((ts == null) || (oldPoint > ts)) { roots.add(entry.getKey()); } } // Add dirty files from those vcs roots, that // - needs to be update by timestamp criteria // - that already contain files for update through manually added requests for (Map.Entry<String, Pair<Boolean, VcsRoot>> entry : myChanged.entrySet()) { final VcsRoot vcsRoot = entry.getValue().getSecond(); if ((!dirty.get(vcsRoot).isEmpty()) || roots.contains(vcsRoot)) { dirty.putValue(vcsRoot, entry.getKey()); } } } if (dirty.isEmpty()) return false; final Map<String, Pair<Boolean, VcsRoot>> results = new HashMap<String, Pair<Boolean, VcsRoot>>(); for (VcsRoot vcsRoot : dirty.keySet()) { // todo - actually it means nothing since the only known VCS to use this scheme is Git and now // it always allow // todo - background operations. when it changes, develop more flexible behavior here if (!vcsRoot.getVcs().isVcsBackgroundOperationsAllowed(vcsRoot.getPath())) continue; final TreeDiffProvider provider = vcsRoot.getVcs().getTreeDiffProvider(); if (provider == null) continue; final Collection<String> paths = dirty.get(vcsRoot); final Collection<String> remotelyChanged = provider.getRemotelyChanged(vcsRoot.getPath(), paths); for (String path : paths) { // TODO: Contains invoked for each file - better to use Set (implementations just use List) // TODO: Why to store boolean for changed or not - why not just remove such values from // myChanged??? results.put(path, new Pair<Boolean, VcsRoot>(remotelyChanged.contains(path), vcsRoot)); } } final long curTime = System.currentTimeMillis(); synchronized (myLock) { myChanged.putAll(results); for (VcsRoot vcsRoot : dirty.keySet()) { myTs.put(vcsRoot, curTime); } } return true; }
public boolean updateStep(final AtomicSectionsAware atomicSectionsAware) { final MultiMap<VcsRoot, String> dirty = new MultiMap<VcsRoot, String>(); final long oldPoint = System.currentTimeMillis() - (myVcsConfiguration.CHANGED_ON_SERVER_INTERVAL > 0 ? myVcsConfiguration.CHANGED_ON_SERVER_INTERVAL * 60000 : DISCRETE); synchronized (myLock) { for (VcsRoot root : myQueries.keySet()) { final Collection<String> collection = myQueries.get(root); for (String s : collection) { dirty.putValue(root, s); } } myQueries.clear(); final Set<VcsRoot> roots = new HashSet<VcsRoot>(); for (Map.Entry<VcsRoot, Long> entry : myTs.entrySet()) { if (!dirty.get(entry.getKey()).isEmpty()) continue; final Long ts = entry.getValue(); if ((ts == null) || (oldPoint > ts)) { roots.add(entry.getKey()); } } for (Map.Entry<String, Pair<Boolean, VcsRoot>> entry : myChanged.entrySet()) { final VcsRoot vcsRoot = entry.getValue().getSecond(); if ((!dirty.get(vcsRoot).isEmpty()) || roots.contains(vcsRoot)) { dirty.putValue(vcsRoot, entry.getKey()); } } } if (dirty.isEmpty()) return false; final Map<String, Pair<Boolean, VcsRoot>> results = new HashMap<String, Pair<Boolean, VcsRoot>>(); for (VcsRoot vcsRoot : dirty.keySet()) { atomicSectionsAware.checkShouldExit(); // todo - actually it means nothing since the only known VCS to use this scheme is Git and now // it always allow // todo - background operations. when it changes, develop more flexible behavior here if (!vcsRoot.vcs.isVcsBackgroundOperationsAllowed(vcsRoot.path)) continue; final TreeDiffProvider provider = vcsRoot.vcs.getTreeDiffProvider(); if (provider == null) continue; final Collection<String> paths = dirty.get(vcsRoot); final Collection<String> remotelyChanged = provider.getRemotelyChanged(vcsRoot.path, paths); for (String path : paths) { results.put(path, new Pair<Boolean, VcsRoot>(remotelyChanged.contains(path), vcsRoot)); } } final long curTime = System.currentTimeMillis(); synchronized (myLock) { myChanged.putAll(results); for (VcsRoot vcsRoot : dirty.keySet()) { myTs.put(vcsRoot, curTime); } } return true; }
@NotNull final RunnerResult analyzeMethod( @NotNull PsiElement psiBlock, @NotNull InstructionVisitor visitor, boolean ignoreAssertions, @NotNull Collection<DfaMemoryState> initialStates) { if (PsiTreeUtil.findChildOfType(psiBlock, OuterLanguageElement.class) != null) return RunnerResult.NOT_APPLICABLE; try { final ControlFlow flow = new ControlFlowAnalyzer(myValueFactory, psiBlock, ignoreAssertions).buildControlFlow(); if (flow == null) return RunnerResult.NOT_APPLICABLE; int[] loopNumber = LoopAnalyzer.calcInLoop(flow); int endOffset = flow.getInstructionCount(); myInstructions = flow.getInstructions(); myNestedClosures.clear(); Set<Instruction> joinInstructions = ContainerUtil.newHashSet(); for (int index = 0; index < myInstructions.length; index++) { Instruction instruction = myInstructions[index]; if (instruction instanceof GotoInstruction) { joinInstructions.add(myInstructions[((GotoInstruction) instruction).getOffset()]); } else if (instruction instanceof ConditionalGotoInstruction) { joinInstructions.add( myInstructions[((ConditionalGotoInstruction) instruction).getOffset()]); } else if (instruction instanceof MethodCallInstruction && !((MethodCallInstruction) instruction).getContracts().isEmpty()) { joinInstructions.add(myInstructions[index + 1]); } } if (LOG.isDebugEnabled()) { LOG.debug("Analyzing code block: " + psiBlock.getText()); for (int i = 0; i < myInstructions.length; i++) { LOG.debug(i + ": " + myInstructions[i]); } } // for (int i = 0; i < myInstructions.length; i++) System.out.println(i + ": " + // myInstructions[i].toString()); Integer tooExpensiveHash = psiBlock.getUserData(TOO_EXPENSIVE_HASH); if (tooExpensiveHash != null && tooExpensiveHash == psiBlock.getText().hashCode()) { LOG.debug("Too complex because hasn't changed since being too complex already"); return RunnerResult.TOO_COMPLEX; } final StateQueue queue = new StateQueue(); for (final DfaMemoryState initialState : initialStates) { queue.offer(new DfaInstructionState(myInstructions[0], initialState)); } MultiMap<BranchingInstruction, DfaMemoryState> processedStates = MultiMap.createSet(); MultiMap<BranchingInstruction, DfaMemoryState> incomingStates = MultiMap.createSet(); long msLimit = shouldCheckTimeLimit() ? Registry.intValue("ide.dfa.time.limit.online") : Registry.intValue("ide.dfa.time.limit.offline"); WorkingTimeMeasurer measurer = new WorkingTimeMeasurer(msLimit * 1000 * 1000); int count = 0; while (!queue.isEmpty()) { List<DfaInstructionState> states = queue.getNextInstructionStates(joinInstructions); for (DfaInstructionState instructionState : states) { if (count++ % 1024 == 0 && measurer.isTimeOver()) { LOG.debug("Too complex because the analysis took too long"); psiBlock.putUserData(TOO_EXPENSIVE_HASH, psiBlock.getText().hashCode()); return RunnerResult.TOO_COMPLEX; } ProgressManager.checkCanceled(); if (LOG.isDebugEnabled()) { LOG.debug(instructionState.toString()); } // System.out.println(instructionState.toString()); Instruction instruction = instructionState.getInstruction(); if (instruction instanceof BranchingInstruction) { BranchingInstruction branching = (BranchingInstruction) instruction; Collection<DfaMemoryState> processed = processedStates.get(branching); if (processed.contains(instructionState.getMemoryState())) { continue; } if (processed.size() > MAX_STATES_PER_BRANCH) { LOG.debug("Too complex because too many different possible states"); return RunnerResult.TOO_COMPLEX; // Too complex :( } if (loopNumber[branching.getIndex()] != 0) { processedStates.putValue(branching, instructionState.getMemoryState().createCopy()); } } DfaInstructionState[] after = acceptInstruction(visitor, instructionState); for (DfaInstructionState state : after) { Instruction nextInstruction = state.getInstruction(); if (nextInstruction.getIndex() >= endOffset) { continue; } handleStepOutOfLoop( instruction, nextInstruction, loopNumber, processedStates, incomingStates, states, after, queue); if (nextInstruction instanceof BranchingInstruction) { BranchingInstruction branching = (BranchingInstruction) nextInstruction; if (processedStates.get(branching).contains(state.getMemoryState()) || incomingStates.get(branching).contains(state.getMemoryState())) { continue; } if (loopNumber[branching.getIndex()] != 0) { incomingStates.putValue(branching, state.getMemoryState().createCopy()); } } queue.offer(state); } } } psiBlock.putUserData(TOO_EXPENSIVE_HASH, null); LOG.debug("Analysis ok"); return RunnerResult.OK; } catch (ArrayIndexOutOfBoundsException e) { LOG.error(psiBlock.getText(), e); return RunnerResult.ABORTED; } catch (EmptyStackException e) { LOG.error(psiBlock.getText(), e); return RunnerResult.ABORTED; } }