// returns a macro adder for the given morph item private MacroAdder getMacAdder(MorphItem mi) { // check map MacroAdder retval = macAdderMap.get(mi); if (retval != null) return retval; // set up macro adder IntHashSetMap macrosFromLex = new IntHashSetMap(); String[] newMacroNames = mi.getMacros(); List<MacroItem> macroItems = new ArrayList<MacroItem>(); for (int i = 0; i < newMacroNames.length; i++) { Set<FeatureStructure> featStrucs = (Set<FeatureStructure>) _macros.get(newMacroNames[i]); if (featStrucs != null) { for (Iterator<FeatureStructure> fsIt = featStrucs.iterator(); fsIt.hasNext(); ) { FeatureStructure fs = fsIt.next(); macrosFromLex.put(fs.getIndex(), fs); } } MacroItem macroItem = _macroItems.get(newMacroNames[i]); if (macroItem != null) { macroItems.add(macroItem); } else { // should be checked earlier too System.err.println( "Warning: macro " + newMacroNames[i] + " not found for word '" + mi.getWord() + "'"); } } retval = new MacroAdder(macrosFromLex, macroItems); // update map and return macAdderMap.put(mi, retval); return retval; }
private void applyChildrenChangeEvents(VirtualFile parent, List<VFileEvent> events) { final NewVirtualFileSystem delegate = getDelegate(parent); TIntArrayList childrenIdsUpdated = new TIntArrayList(); List<VirtualFile> childrenToBeUpdated = new SmartList<VirtualFile>(); assert parent != null && parent != mySuperRoot; final int parentId = getFileId(parent); assert parentId != 0; TIntHashSet parentChildrenIds = new TIntHashSet(FSRecords.list(parentId)); boolean hasRemovedChildren = false; for (VFileEvent event : events) { if (event instanceof VFileCreateEvent) { String name = ((VFileCreateEvent) event).getChildName(); final VirtualFile fake = new FakeVirtualFile(parent, name); final FileAttributes attributes = delegate.getAttributes(fake); if (attributes != null) { final int childId = createAndFillRecord(delegate, fake, parentId, attributes); assert parent instanceof VirtualDirectoryImpl : parent; final VirtualDirectoryImpl dir = (VirtualDirectoryImpl) parent; VirtualFileSystemEntry child = dir.createChild(name, childId, dir.getFileSystem()); childrenToBeUpdated.add(child); childrenIdsUpdated.add(childId); parentChildrenIds.add(childId); } } else if (event instanceof VFileDeleteEvent) { VirtualFile file = ((VFileDeleteEvent) event).getFile(); if (!file.exists()) { LOG.error("Deleting a file, which does not exist: " + file.getPath()); continue; } hasRemovedChildren = true; int id = getFileId(file); childrenToBeUpdated.add(file); childrenIdsUpdated.add(-id); parentChildrenIds.remove(id); } } FSRecords.updateList(parentId, parentChildrenIds.toArray()); if (hasRemovedChildren) clearIdCache(); VirtualDirectoryImpl parentImpl = (VirtualDirectoryImpl) parent; for (int i = 0, len = childrenIdsUpdated.size(); i < len; ++i) { final int childId = childrenIdsUpdated.get(i); final VirtualFile childFile = childrenToBeUpdated.get(i); if (childId > 0) { parentImpl.addChild((VirtualFileSystemEntry) childFile); } else { FSRecords.deleteRecordRecursively(-childId); parentImpl.removeChild(childFile); invalidateSubtree(childFile); } } }
@NotNull private static String toVfString(@NotNull Collection<VirtualFile> list) { List<VirtualFile> sub = new ArrayList<VirtualFile>(list).subList(0, Math.min(list.size(), 100)); return list.size() + " files: " + StringUtil.join(sub, file -> file.getName(), ", ") + (list.size() == sub.size() ? "" : "..."); }
private boolean processFilesConcurrently( @NotNull Set<VirtualFile> files, @NotNull final ProgressIndicator indicator, @NotNull final Processor<VirtualFile> processor) { final List<VirtualFile> fileList = new ArrayList<VirtualFile>(files); // fine but grabs all CPUs // return JobLauncher.getInstance().invokeConcurrentlyUnderProgress(fileList, indicator, false, // false, processor); int parallelism = CacheUpdateRunner.indexingThreadCount(); final Callable<Boolean> processFileFromSet = () -> { final boolean[] result = {true}; ProgressManager.getInstance() .executeProcessUnderProgress( () -> { while (true) { ProgressManager.checkCanceled(); VirtualFile file; synchronized (fileList) { file = fileList.isEmpty() ? null : fileList.remove(fileList.size() - 1); } if (file == null) { break; } if (!processor.process(file)) { result[0] = false; break; } } }, indicator); return result[0]; }; List<Future<Boolean>> futures = ContainerUtil.map( Collections.nCopies(parallelism, ""), s -> myApplication.executeOnPooledThread(processFileFromSet)); List<Boolean> results = ContainerUtil.map( futures, future -> { try { return future.get(); } catch (Exception e) { LOG.error(e); } return false; }); return !ContainerUtil.exists( results, result -> { return result != null && !result; // null means PCE }); }
// look up and apply coarts for given rels to each sign in result private void applyCoarts(List<String> coartRels, Collection<Sign> result) { List<Sign> inputSigns = new ArrayList<Sign>(result); result.clear(); List<Sign> outputSigns = new ArrayList<Sign>(inputSigns.size()); // for each rel, lookup coarts and apply to input signs, storing results in output signs for (Iterator<String> it = coartRels.iterator(); it.hasNext(); ) { String rel = it.next(); Collection<String> preds = (Collection<String>) _coartRelsToPreds.get(rel); if (preds == null) continue; // not expected Collection<Sign> coartResult = getSignsFromRelAndPreds(rel, preds); if (coartResult == null) continue; for (Iterator<Sign> it2 = coartResult.iterator(); it2.hasNext(); ) { Sign coartSign = it2.next(); // apply to each input for (int j = 0; j < inputSigns.size(); j++) { Sign sign = inputSigns.get(j); grammar.rules.applyCoart(sign, coartSign, outputSigns); } } // switch output to input for next iteration inputSigns.clear(); inputSigns.addAll(outputSigns); outputSigns.clear(); } // add results back result.addAll(inputSigns); }
LinkedHashSet<UnorderedPair<EqClass>> getDistinctClassPairs() { if (myCachedDistinctClassPairs != null) return myCachedDistinctClassPairs; LinkedHashSet<UnorderedPair<EqClass>> result = ContainerUtil.newLinkedHashSet(); for (long encodedPair : myDistinctClasses.toArray()) { result.add( new UnorderedPair<>( myEqClasses.get(low(encodedPair)), myEqClasses.get(high(encodedPair)))); } return myCachedDistinctClassPairs = result; }
@NotNull private static List<VirtualFile> toVf(@NotNull int[] ids) { List<VirtualFile> res = new ArrayList<VirtualFile>(); for (int id : ids) { VirtualFile file = PersistentFS.getInstance().findFileById(id); if (file != null) { res.add(file); } } return res; }
// get signs for rel via preds, or null if none private Collection<Sign> getSignsFromRelAndPreds(String rel, Collection<String> preds) { List<Sign> retval = new ArrayList<Sign>(); for (Iterator<String> it = preds.iterator(); it.hasNext(); ) { String pred = it.next(); Collection<Sign> signs = getSignsFromPredAndTargetRel(pred, rel); if (signs != null) retval.addAll(signs); } // return null if none survive filter if (retval.size() > 0) return retval; else return null; }
// get signs using an additional arg for a target rel private Collection<Sign> getSignsFromPredAndTargetRel(String pred, String targetRel) { Collection<Word> words = (Collection<Word>) _predToWords.get(pred); String specialTokenConst = null; // for robustness, when using supertagger, add words for pred sans sense index int dotIndex = -1; if (_supertagger != null && !Character.isDigit(pred.charAt(0)) && // skip numbers (dotIndex = pred.lastIndexOf('.')) > 0 && pred.length() > dotIndex + 1 && pred.charAt(dotIndex + 1) != '_') // skip titles, eg Mr._Smith { String barePred = pred.substring(0, dotIndex); Collection<Word> barePredWords = (Collection<Word>) _predToWords.get(barePred); if (words == null) words = barePredWords; else if (barePredWords != null) { Set<Word> unionWords = new HashSet<Word>(words); unionWords.addAll(barePredWords); words = unionWords; } } if (words == null) { specialTokenConst = tokenizer.getSpecialTokenConstant(tokenizer.isSpecialToken(pred)); if (specialTokenConst == null) return null; // lookup words with pred = special token const Collection<Word> specialTokenWords = (Collection<Word>) _predToWords.get(specialTokenConst); // replace special token const with pred if (specialTokenWords == null) return null; words = new ArrayList<Word>(specialTokenWords.size()); for (Iterator<Word> it = specialTokenWords.iterator(); it.hasNext(); ) { Word stw = it.next(); Word w = Word.createSurfaceWord(stw, pred); words.add(w); } } List<Sign> retval = new ArrayList<Sign>(); for (Iterator<Word> it = words.iterator(); it.hasNext(); ) { Word w = it.next(); try { SignHash signs = getSignsFromWord(w, specialTokenConst, pred, targetRel); retval.addAll(signs.asSignSet()); } // shouldn't happen catch (LexException exc) { System.err.println("Unexpected lex exception for word " + w + ": " + exc); } } return retval; }
// get licensing features, with appropriate defaults @SuppressWarnings("unchecked") private void loadLicensingFeatures(Element licensingElt) { List<LicensingFeature> licensingFeats = new ArrayList<LicensingFeature>(); boolean containsLexFeat = false; if (licensingElt != null) { for (Iterator<Element> it = licensingElt.getChildren("feat").iterator(); it.hasNext(); ) { Element featElt = it.next(); String attr = featElt.getAttributeValue("attr"); if (attr.equals("lex")) containsLexFeat = true; String val = featElt.getAttributeValue("val"); List<String> alsoLicensedBy = null; String alsoVals = featElt.getAttributeValue("also-licensed-by"); if (alsoVals != null) { alsoLicensedBy = Arrays.asList(alsoVals.split("\\s+")); } boolean licenseEmptyCats = true; boolean licenseMarkedCats = false; boolean instantiate = true; byte loc = LicensingFeature.BOTH; String lmc = featElt.getAttributeValue("license-marked-cats"); if (lmc != null) { licenseMarkedCats = Boolean.valueOf(lmc).booleanValue(); // change defaults licenseEmptyCats = false; loc = LicensingFeature.TARGET_ONLY; instantiate = false; } String lec = featElt.getAttributeValue("license-empty-cats"); if (lec != null) { licenseEmptyCats = Boolean.valueOf(lec).booleanValue(); } String inst = featElt.getAttributeValue("instantiate"); if (inst != null) { instantiate = Boolean.valueOf(inst).booleanValue(); } String locStr = featElt.getAttributeValue("location"); if (locStr != null) { if (locStr.equals("target-only")) loc = LicensingFeature.TARGET_ONLY; if (locStr.equals("args-only")) loc = LicensingFeature.ARGS_ONLY; if (locStr.equals("both")) loc = LicensingFeature.BOTH; } licensingFeats.add( new LicensingFeature( attr, val, alsoLicensedBy, licenseEmptyCats, licenseMarkedCats, instantiate, loc)); } } if (!containsLexFeat) { licensingFeats.add(LicensingFeature.defaultLexFeature); } _licensingFeatures = new LicensingFeature[licensingFeats.size()]; licensingFeats.toArray(_licensingFeatures); }
/** * For a string of 1 or more surface words, return all of the lexical entries for each word as a * list of sign hashes. Tokenization is performed using the configured tokenizer. * * @param w the words in string format * @return a list of sign hashes * @exception LexException thrown if word not found */ public List<SignHash> getEntriesFromWords(String s) throws LexException { List<SignHash> entries = new ArrayList<SignHash>(); List<Word> words = tokenizer.tokenize(s); for (Iterator<Word> it = words.iterator(); it.hasNext(); ) { Word w = it.next(); SignHash signs = getSignsFromWord(w); if (signs.size() == 0) { throw new LexException("Word not in lexicon: \"" + w + "\""); } entries.add(signs); } return entries; }
// look up and apply coarts for w to each sign in result @SuppressWarnings("unchecked") private void applyCoarts(Word w, SignHash result) throws LexException { List<Sign> inputSigns = new ArrayList<Sign>(result.asSignSet()); result.clear(); List<Sign> outputSigns = new ArrayList<Sign>(inputSigns.size()); // for each surface attr, lookup coarts and apply to input signs, storing results in output // signs for (Iterator<Pair<String, String>> it = w.getSurfaceAttrValPairs(); it.hasNext(); ) { Pair<String, String> p = it.next(); String attr = (String) p.a; if (!_indexedCoartAttrs.contains(attr)) continue; String val = (String) p.b; Word coartWord = Word.createWord(attr, val); SignHash coartResult = getSignsFromWord(coartWord, null, null, null); for (Iterator<Sign> it2 = coartResult.iterator(); it2.hasNext(); ) { Sign coartSign = it2.next(); // apply to each input for (int j = 0; j < inputSigns.size(); j++) { Sign sign = inputSigns.get(j); grammar.rules.applyCoart(sign, coartSign, outputSigns); } } // switch output to input for next iteration inputSigns.clear(); inputSigns.addAll(outputSigns); outputSigns.clear(); } // add results back result.addAll(inputSigns); }
@RequiredWriteAction @Override public void processEvents(@NotNull List<VFileEvent> events) { ApplicationManager.getApplication().assertWriteAccessAllowed(); List<VFileEvent> validated = validateEvents(events); BulkFileListener publisher = myEventBus.syncPublisher(VirtualFileManager.VFS_CHANGES); publisher.before(validated); THashMap<VirtualFile, List<VFileEvent>> parentToChildrenEventsChanges = null; for (VFileEvent event : validated) { VirtualFile changedParent = null; if (event instanceof VFileCreateEvent) { changedParent = ((VFileCreateEvent) event).getParent(); ((VFileCreateEvent) event).resetCache(); } else if (event instanceof VFileDeleteEvent) { changedParent = ((VFileDeleteEvent) event).getFile().getParent(); } if (changedParent != null) { if (parentToChildrenEventsChanges == null) parentToChildrenEventsChanges = new THashMap<VirtualFile, List<VFileEvent>>(); List<VFileEvent> parentChildrenChanges = parentToChildrenEventsChanges.get(changedParent); if (parentChildrenChanges == null) { parentToChildrenEventsChanges.put( changedParent, parentChildrenChanges = new SmartList<VFileEvent>()); } parentChildrenChanges.add(event); } else { applyEvent(event); } } if (parentToChildrenEventsChanges != null) { parentToChildrenEventsChanges.forEachEntry( new TObjectObjectProcedure<VirtualFile, List<VFileEvent>>() { @Override public boolean execute(VirtualFile parent, List<VFileEvent> childrenEvents) { applyChildrenChangeEvents(parent, childrenEvents); return true; } }); parentToChildrenEventsChanges.clear(); } publisher.after(validated); }
public boolean equals(Object obj) { if (!(obj instanceof PredLookup)) return false; PredLookup pLook = (PredLookup) obj; if (!pred.equals(pLook.pred)) return false; if (coartRels == null) return (pLook.coartRels == null); return coartRels.equals(pLook.coartRels); }
@Override @Nullable public DfaConstValue getConstantValue(@NotNull DfaVariableValue value) { int index = getEqClassIndex(value); EqClass ec = index == -1 ? null : myEqClasses.get(index); return ec == null ? null : (DfaConstValue) unwrap(ec.findConstant(true)); }
@Nullable private Double getDoubleValue(int eqClassIndex) { EqClass ec = myEqClasses.get(eqClassIndex); DfaValue dfaConst = ec == null ? null : ec.findConstant(false); Object constValue = dfaConst instanceof DfaConstValue ? ((DfaConstValue) dfaConst).getValue() : null; return constValue instanceof Number ? ((Number) constValue).doubleValue() : null; }
@Override @NotNull public VirtualFile[] getRoots(@NotNull final NewVirtualFileSystem fs) { final List<VirtualFile> roots = new ArrayList<VirtualFile>(); myRootsLock.readLock().lock(); try { for (NewVirtualFile root : myRoots.values()) { if (root.getFileSystem() == fs) { roots.add(root); } } } finally { myRootsLock.readLock().unlock(); } return VfsUtilCore.toVirtualFileArray(roots); }
@Override @NotNull public VirtualFile[] getLocalRoots() { List<VirtualFile> roots = ContainerUtil.newSmartList(); myRootsLock.readLock().lock(); try { for (NewVirtualFile root : myRoots.values()) { if (root.isInLocalFileSystem() && !(root.getFileSystem() instanceof TempFileSystem)) { roots.add(root); } } } finally { myRootsLock.readLock().unlock(); } return VfsUtilCore.toVirtualFileArray(roots); }
@NotNull List<DfaValue> getEquivalentValues(@NotNull DfaValue dfaValue) { int index = getEqClassIndex(dfaValue); EqClass set = index == -1 ? null : myEqClasses.get(index); if (set == null) { return Collections.emptyList(); } return set.getMemberValues(); }
@SuppressWarnings("HardCodedStringLiteral") public String toString() { StringBuilder result = new StringBuilder(); result.append('<'); if (myEphemeral) { result.append("ephemeral, "); } for (EqClass set : getNonTrivialEqClasses()) { result.append(set); } if (!myDistinctClasses.isEmpty()) { result.append("\n distincts: "); List<String> distincts = new ArrayList<>(); for (UnorderedPair<EqClass> pair : getDistinctClassPairs()) { distincts.add("{" + pair.first + ", " + pair.second + "}"); } Collections.sort(distincts); result.append(StringUtil.join(distincts, " ")); } if (!myStack.isEmpty()) { result.append("\n stack: ").append(StringUtil.join(myStack, ",")); } if (!myVariableStates.isEmpty()) { result.append("\n vars: "); for (Map.Entry<DfaVariableValue, DfaVariableState> entry : myVariableStates.entrySet()) { result .append("[") .append(entry.getKey()) .append("->") .append(entry.getValue()) .append("] "); } } if (!myUnknownVariables.isEmpty()) { result.append("\n unknowns: ").append(new HashSet<>(myUnknownVariables)); } result.append('>'); return result.toString(); }
private boolean applyRelation( @NotNull final DfaValue dfaLeft, @NotNull final DfaValue dfaRight, boolean isNegated) { if (isUnknownState(dfaLeft) || isUnknownState(dfaRight)) { return true; } // DfaConstValue || DfaVariableValue Integer c1Index = getOrCreateEqClassIndex(dfaLeft); Integer c2Index = getOrCreateEqClassIndex(dfaRight); if (c1Index == null || c2Index == null) { return true; } if (!isNegated) { // Equals if (c1Index.equals(c2Index) || areCompatibleConstants(c1Index, c2Index)) return true; if (!uniteClasses(c1Index, c2Index)) return false; for (long encodedPair : myDistinctClasses.toArray()) { EqClass c1 = myEqClasses.get(low(encodedPair)); EqClass c2 = myEqClasses.get(high(encodedPair)); DfaConstValue const1 = (DfaConstValue) c1.findConstant(false); DfaConstValue const2 = (DfaConstValue) c2.findConstant(false); if (const1 != null && const2 != null && !preserveConstantDistinction(const1.getValue(), const2.getValue())) { myDistinctClasses.remove(encodedPair); } } myCachedDistinctClassPairs = null; myCachedNonTrivialEqClasses = null; myCachedHash = null; } else { // Not Equals if (c1Index.equals(c2Index) || areCompatibleConstants(c1Index, c2Index)) return false; if (isNull(dfaLeft) && isPrimitive(dfaRight) || isNull(dfaRight) && isPrimitive(dfaLeft)) return true; makeClassesDistinct(c1Index, c2Index); myCachedDistinctClassPairs = null; myCachedHash = null; } return true; }
@Override public boolean queue(@NotNull Collection<VirtualFile> files, @NotNull Object reason) { if (files.isEmpty()) { return false; } boolean queued = false; List<VirtualFile> added = new ArrayList<VirtualFile>(files.size()); for (VirtualFile file : files) { boolean wasAdded = queueIfNeeded(file, myProject); if (wasAdded) { added.add(file); } queued |= wasAdded; } if (queued) { log("Queued to resolve (from " + reason + "): " + toVfString(added)); flushLog(); } return queued; }
public void handleElement(Element e) { // create morph item if (e.getName().equals("entry")) { try { morphItems.add(new MorphItem(e)); } catch (RuntimeException exc) { System.err.println("Skipping morph item: " + e.getAttributeValue("word")); System.err.println(exc.toString()); } } // create macro item else if (e.getName().equals("macro")) { try { macroItems.add(new MacroItem(e)); } catch (RuntimeException exc) { System.err.println("Skipping macro item: " + e.getAttributeValue("name")); System.err.println(exc.toString()); } } }
@Override public void addListener(@NotNull Disposable parent, @NotNull final Listener listener) { myListeners.add(listener); Disposer.register( parent, new Disposable() { @Override public void dispose() { myListeners.remove(listener); } }); }
@NotNull private FSRecords.NameId[] persistAllChildren( @NotNull final VirtualFile file, final int id, @NotNull FSRecords.NameId[] current) { assert file != mySuperRoot; final NewVirtualFileSystem fs = replaceWithNativeFS(getDelegate(file)); String[] delegateNames = VfsUtil.filterNames(fs.list(file)); if (delegateNames.length == 0 && current.length > 0) { return current; } Set<String> toAdd = ContainerUtil.newHashSet(delegateNames); for (FSRecords.NameId nameId : current) { toAdd.remove(nameId.name); } final TIntArrayList childrenIds = new TIntArrayList(current.length + toAdd.size()); final List<FSRecords.NameId> nameIds = ContainerUtil.newArrayListWithExpectedSize(current.length + toAdd.size()); for (FSRecords.NameId nameId : current) { childrenIds.add(nameId.id); nameIds.add(nameId); } for (String newName : toAdd) { FakeVirtualFile child = new FakeVirtualFile(file, newName); FileAttributes attributes = fs.getAttributes(child); if (attributes != null) { int childId = createAndFillRecord(fs, child, id, attributes); childrenIds.add(childId); nameIds.add(new FSRecords.NameId(childId, FileNameCache.storeName(newName), newName)); } } FSRecords.updateList(id, childrenIds.toNativeArray()); setChildrenCached(id); return nameIds.toArray(new FSRecords.NameId[nameIds.size()]); }
@Nullable("for boxed values which can't be compared by ==") private Integer getOrCreateEqClassIndex(@NotNull DfaValue dfaValue) { int i = getEqClassIndex(dfaValue); if (i != -1) return i; if (!canBeInRelation(dfaValue) || !canBeReused(dfaValue) && !(((DfaBoxedValue) dfaValue).getWrappedValue() instanceof DfaConstValue)) { return null; } int freeIndex = myEqClasses.indexOf(null); int resultIndex = freeIndex >= 0 ? freeIndex : myEqClasses.size(); EqClass aClass = new EqClass(myFactory); aClass.add(dfaValue.getID()); if (freeIndex >= 0) { myEqClasses.set(freeIndex, aClass); } else { myEqClasses.add(aClass); } addToMap(dfaValue.getID(), resultIndex); return resultIndex; }
private int getEqClassIndex(@NotNull final DfaValue dfaValue) { final int id = unwrap(dfaValue).getID(); int[] classes = myIdToEqClassesIndices.get(id); int result = -1; if (classes != null) { for (int index : classes) { EqClass aClass = myEqClasses.get(index); if (!aClass.contains(dfaValue.getID())) continue; if (!canBeReused(dfaValue) && aClass.size() > 1) break; result = index; break; } } return result; }
private boolean shouldMarkUnknown(@NotNull DfaVariableValue value) { int eqClassIndex = getEqClassIndex(value); if (eqClassIndex < 0) return false; EqClass eqClass = myEqClasses.get(eqClassIndex); if (eqClass == null) return false; if (eqClass.findConstant(true) != null) return true; for (UnorderedPair<EqClass> pair : getDistinctClassPairs()) { if (pair.first == eqClass && pair.second.findConstant(true) != null || pair.second == eqClass && pair.first.findConstant(true) != null) { return true; } } return false; }
public void handleElement(Element e) { // create family if (e.getName().equals("family")) { try { lexicon.add(new Family(e)); } catch (RuntimeException exc) { System.err.println("Skipping family: " + e.getAttributeValue("name")); System.err.println(exc.toString()); } } // save distributive attributes else if (e.getName().equals("distributive-features")) distrElt = e; // save licensing features else if (e.getName().equals("licensing-features")) licensingElt = e; // save relation sort order else if (e.getName().equals("relation-sorting")) relationSortingElt = e; }
@NotNull private static List<VFileEvent> validateEvents(@NotNull List<VFileEvent> events) { final List<EventWrapper> deletionEvents = ContainerUtil.newArrayList(); for (int i = 0, size = events.size(); i < size; i++) { final VFileEvent event = events.get(i); if (event instanceof VFileDeleteEvent && event.isValid()) { deletionEvents.add(new EventWrapper((VFileDeleteEvent) event, i)); } } final TIntHashSet invalidIDs; if (deletionEvents.isEmpty()) { invalidIDs = EmptyIntHashSet.INSTANCE; } else { ContainerUtil.quickSort(deletionEvents, DEPTH_COMPARATOR); invalidIDs = new TIntHashSet(deletionEvents.size()); final Set<VirtualFile> dirsToBeDeleted = new THashSet<VirtualFile>(deletionEvents.size()); nextEvent: for (EventWrapper wrapper : deletionEvents) { final VirtualFile candidate = wrapper.event.getFile(); VirtualFile parent = candidate; while (parent != null) { if (dirsToBeDeleted.contains(parent)) { invalidIDs.add(wrapper.id); continue nextEvent; } parent = parent.getParent(); } if (candidate.isDirectory()) { dirsToBeDeleted.add(candidate); } } } final List<VFileEvent> filtered = new ArrayList<VFileEvent>(events.size() - invalidIDs.size()); for (int i = 0, size = events.size(); i < size; i++) { final VFileEvent event = events.get(i); if (event.isValid() && !(event instanceof VFileDeleteEvent && invalidIDs.contains(i))) { filtered.add(event); } } return filtered; }