private void applyChildrenChangeEvents(VirtualFile parent, List<VFileEvent> events) {
    final NewVirtualFileSystem delegate = getDelegate(parent);
    TIntArrayList childrenIdsUpdated = new TIntArrayList();
    List<VirtualFile> childrenToBeUpdated = new SmartList<VirtualFile>();

    assert parent != null && parent != mySuperRoot;
    final int parentId = getFileId(parent);
    assert parentId != 0;
    TIntHashSet parentChildrenIds = new TIntHashSet(FSRecords.list(parentId));
    boolean hasRemovedChildren = false;

    for (VFileEvent event : events) {
      if (event instanceof VFileCreateEvent) {
        String name = ((VFileCreateEvent) event).getChildName();
        final VirtualFile fake = new FakeVirtualFile(parent, name);
        final FileAttributes attributes = delegate.getAttributes(fake);

        if (attributes != null) {
          final int childId = createAndFillRecord(delegate, fake, parentId, attributes);
          assert parent instanceof VirtualDirectoryImpl : parent;
          final VirtualDirectoryImpl dir = (VirtualDirectoryImpl) parent;
          VirtualFileSystemEntry child = dir.createChild(name, childId, dir.getFileSystem());
          childrenToBeUpdated.add(child);
          childrenIdsUpdated.add(childId);
          parentChildrenIds.add(childId);
        }
      } else if (event instanceof VFileDeleteEvent) {
        VirtualFile file = ((VFileDeleteEvent) event).getFile();
        if (!file.exists()) {
          LOG.error("Deleting a file, which does not exist: " + file.getPath());
          continue;
        }

        hasRemovedChildren = true;
        int id = getFileId(file);

        childrenToBeUpdated.add(file);
        childrenIdsUpdated.add(-id);
        parentChildrenIds.remove(id);
      }
    }

    FSRecords.updateList(parentId, parentChildrenIds.toArray());

    if (hasRemovedChildren) clearIdCache();
    VirtualDirectoryImpl parentImpl = (VirtualDirectoryImpl) parent;

    for (int i = 0, len = childrenIdsUpdated.size(); i < len; ++i) {
      final int childId = childrenIdsUpdated.get(i);
      final VirtualFile childFile = childrenToBeUpdated.get(i);

      if (childId > 0) {
        parentImpl.addChild((VirtualFileSystemEntry) childFile);
      } else {
        FSRecords.deleteRecordRecursively(-childId);
        parentImpl.removeChild(childFile);
        invalidateSubtree(childFile);
      }
    }
  }
Exemplo n.º 2
0
 // get licensing features, with appropriate defaults
 @SuppressWarnings("unchecked")
 private void loadLicensingFeatures(Element licensingElt) {
   List<LicensingFeature> licensingFeats = new ArrayList<LicensingFeature>();
   boolean containsLexFeat = false;
   if (licensingElt != null) {
     for (Iterator<Element> it = licensingElt.getChildren("feat").iterator(); it.hasNext(); ) {
       Element featElt = it.next();
       String attr = featElt.getAttributeValue("attr");
       if (attr.equals("lex")) containsLexFeat = true;
       String val = featElt.getAttributeValue("val");
       List<String> alsoLicensedBy = null;
       String alsoVals = featElt.getAttributeValue("also-licensed-by");
       if (alsoVals != null) {
         alsoLicensedBy = Arrays.asList(alsoVals.split("\\s+"));
       }
       boolean licenseEmptyCats = true;
       boolean licenseMarkedCats = false;
       boolean instantiate = true;
       byte loc = LicensingFeature.BOTH;
       String lmc = featElt.getAttributeValue("license-marked-cats");
       if (lmc != null) {
         licenseMarkedCats = Boolean.valueOf(lmc).booleanValue();
         // change defaults
         licenseEmptyCats = false;
         loc = LicensingFeature.TARGET_ONLY;
         instantiate = false;
       }
       String lec = featElt.getAttributeValue("license-empty-cats");
       if (lec != null) {
         licenseEmptyCats = Boolean.valueOf(lec).booleanValue();
       }
       String inst = featElt.getAttributeValue("instantiate");
       if (inst != null) {
         instantiate = Boolean.valueOf(inst).booleanValue();
       }
       String locStr = featElt.getAttributeValue("location");
       if (locStr != null) {
         if (locStr.equals("target-only")) loc = LicensingFeature.TARGET_ONLY;
         if (locStr.equals("args-only")) loc = LicensingFeature.ARGS_ONLY;
         if (locStr.equals("both")) loc = LicensingFeature.BOTH;
       }
       licensingFeats.add(
           new LicensingFeature(
               attr, val, alsoLicensedBy, licenseEmptyCats, licenseMarkedCats, instantiate, loc));
     }
   }
   if (!containsLexFeat) {
     licensingFeats.add(LicensingFeature.defaultLexFeature);
   }
   _licensingFeatures = new LicensingFeature[licensingFeats.size()];
   licensingFeats.toArray(_licensingFeatures);
 }
Exemplo n.º 3
0
  // returns a macro adder for the given morph item
  private MacroAdder getMacAdder(MorphItem mi) {

    // check map
    MacroAdder retval = macAdderMap.get(mi);
    if (retval != null) return retval;

    // set up macro adder
    IntHashSetMap macrosFromLex = new IntHashSetMap();
    String[] newMacroNames = mi.getMacros();
    List<MacroItem> macroItems = new ArrayList<MacroItem>();
    for (int i = 0; i < newMacroNames.length; i++) {
      Set<FeatureStructure> featStrucs = (Set<FeatureStructure>) _macros.get(newMacroNames[i]);
      if (featStrucs != null) {
        for (Iterator<FeatureStructure> fsIt = featStrucs.iterator(); fsIt.hasNext(); ) {
          FeatureStructure fs = fsIt.next();
          macrosFromLex.put(fs.getIndex(), fs);
        }
      }
      MacroItem macroItem = _macroItems.get(newMacroNames[i]);
      if (macroItem != null) {
        macroItems.add(macroItem);
      } else {
        // should be checked earlier too
        System.err.println(
            "Warning: macro " + newMacroNames[i] + " not found for word '" + mi.getWord() + "'");
      }
    }
    retval = new MacroAdder(macrosFromLex, macroItems);

    // update map and return
    macAdderMap.put(mi, retval);
    return retval;
  }
Exemplo n.º 4
0
  @NotNull
  private static List<VFileEvent> validateEvents(@NotNull List<VFileEvent> events) {
    final List<EventWrapper> deletionEvents = ContainerUtil.newArrayList();
    for (int i = 0, size = events.size(); i < size; i++) {
      final VFileEvent event = events.get(i);
      if (event instanceof VFileDeleteEvent && event.isValid()) {
        deletionEvents.add(new EventWrapper((VFileDeleteEvent) event, i));
      }
    }

    final TIntHashSet invalidIDs;
    if (deletionEvents.isEmpty()) {
      invalidIDs = EmptyIntHashSet.INSTANCE;
    } else {
      ContainerUtil.quickSort(deletionEvents, DEPTH_COMPARATOR);

      invalidIDs = new TIntHashSet(deletionEvents.size());
      final Set<VirtualFile> dirsToBeDeleted = new THashSet<VirtualFile>(deletionEvents.size());
      nextEvent:
      for (EventWrapper wrapper : deletionEvents) {
        final VirtualFile candidate = wrapper.event.getFile();
        VirtualFile parent = candidate;
        while (parent != null) {
          if (dirsToBeDeleted.contains(parent)) {
            invalidIDs.add(wrapper.id);
            continue nextEvent;
          }
          parent = parent.getParent();
        }

        if (candidate.isDirectory()) {
          dirsToBeDeleted.add(candidate);
        }
      }
    }

    final List<VFileEvent> filtered = new ArrayList<VFileEvent>(events.size() - invalidIDs.size());
    for (int i = 0, size = events.size(); i < size; i++) {
      final VFileEvent event = events.get(i);
      if (event.isValid() && !(event instanceof VFileDeleteEvent && invalidIDs.contains(i))) {
        filtered.add(event);
      }
    }
    return filtered;
  }
 @NotNull
 private static List<VirtualFile> toVf(@NotNull int[] ids) {
   List<VirtualFile> res = new ArrayList<VirtualFile>();
   for (int id : ids) {
     VirtualFile file = PersistentFS.getInstance().findFileById(id);
     if (file != null) {
       res.add(file);
     }
   }
   return res;
 }
Exemplo n.º 6
0
 public void handleElement(Element e) {
   // create morph item
   if (e.getName().equals("entry")) {
     try {
       morphItems.add(new MorphItem(e));
     } catch (RuntimeException exc) {
       System.err.println("Skipping morph item: " + e.getAttributeValue("word"));
       System.err.println(exc.toString());
     }
   }
   // create macro item
   else if (e.getName().equals("macro")) {
     try {
       macroItems.add(new MacroItem(e));
     } catch (RuntimeException exc) {
       System.err.println("Skipping macro item: " + e.getAttributeValue("name"));
       System.err.println(exc.toString());
     }
   }
 }
 @Override
 public void addListener(@NotNull Disposable parent, @NotNull final Listener listener) {
   myListeners.add(listener);
   Disposer.register(
       parent,
       new Disposable() {
         @Override
         public void dispose() {
           myListeners.remove(listener);
         }
       });
 }
Exemplo n.º 8
0
 /**
  * For a string of 1 or more surface words, return all of the lexical entries for each word as a
  * list of sign hashes. Tokenization is performed using the configured tokenizer.
  *
  * @param w the words in string format
  * @return a list of sign hashes
  * @exception LexException thrown if word not found
  */
 public List<SignHash> getEntriesFromWords(String s) throws LexException {
   List<SignHash> entries = new ArrayList<SignHash>();
   List<Word> words = tokenizer.tokenize(s);
   for (Iterator<Word> it = words.iterator(); it.hasNext(); ) {
     Word w = it.next();
     SignHash signs = getSignsFromWord(w);
     if (signs.size() == 0) {
       throw new LexException("Word not in lexicon: \"" + w + "\"");
     }
     entries.add(signs);
   }
   return entries;
 }
  @NotNull
  private FSRecords.NameId[] persistAllChildren(
      @NotNull final VirtualFile file, final int id, @NotNull FSRecords.NameId[] current) {
    assert file != mySuperRoot;
    final NewVirtualFileSystem fs = replaceWithNativeFS(getDelegate(file));

    String[] delegateNames = VfsUtil.filterNames(fs.list(file));
    if (delegateNames.length == 0 && current.length > 0) {
      return current;
    }

    Set<String> toAdd = ContainerUtil.newHashSet(delegateNames);
    for (FSRecords.NameId nameId : current) {
      toAdd.remove(nameId.name);
    }

    final TIntArrayList childrenIds = new TIntArrayList(current.length + toAdd.size());
    final List<FSRecords.NameId> nameIds =
        ContainerUtil.newArrayListWithExpectedSize(current.length + toAdd.size());
    for (FSRecords.NameId nameId : current) {
      childrenIds.add(nameId.id);
      nameIds.add(nameId);
    }
    for (String newName : toAdd) {
      FakeVirtualFile child = new FakeVirtualFile(file, newName);
      FileAttributes attributes = fs.getAttributes(child);
      if (attributes != null) {
        int childId = createAndFillRecord(fs, child, id, attributes);
        childrenIds.add(childId);
        nameIds.add(new FSRecords.NameId(childId, FileNameCache.storeName(newName), newName));
      }
    }

    FSRecords.updateList(id, childrenIds.toNativeArray());
    setChildrenCached(id);

    return nameIds.toArray(new FSRecords.NameId[nameIds.size()]);
  }
Exemplo n.º 10
0
  @RequiredWriteAction
  @Override
  public void processEvents(@NotNull List<VFileEvent> events) {
    ApplicationManager.getApplication().assertWriteAccessAllowed();

    List<VFileEvent> validated = validateEvents(events);

    BulkFileListener publisher = myEventBus.syncPublisher(VirtualFileManager.VFS_CHANGES);
    publisher.before(validated);

    THashMap<VirtualFile, List<VFileEvent>> parentToChildrenEventsChanges = null;
    for (VFileEvent event : validated) {
      VirtualFile changedParent = null;
      if (event instanceof VFileCreateEvent) {
        changedParent = ((VFileCreateEvent) event).getParent();
        ((VFileCreateEvent) event).resetCache();
      } else if (event instanceof VFileDeleteEvent) {
        changedParent = ((VFileDeleteEvent) event).getFile().getParent();
      }

      if (changedParent != null) {
        if (parentToChildrenEventsChanges == null)
          parentToChildrenEventsChanges = new THashMap<VirtualFile, List<VFileEvent>>();
        List<VFileEvent> parentChildrenChanges = parentToChildrenEventsChanges.get(changedParent);
        if (parentChildrenChanges == null) {
          parentToChildrenEventsChanges.put(
              changedParent, parentChildrenChanges = new SmartList<VFileEvent>());
        }
        parentChildrenChanges.add(event);
      } else {
        applyEvent(event);
      }
    }

    if (parentToChildrenEventsChanges != null) {
      parentToChildrenEventsChanges.forEachEntry(
          new TObjectObjectProcedure<VirtualFile, List<VFileEvent>>() {
            @Override
            public boolean execute(VirtualFile parent, List<VFileEvent> childrenEvents) {
              applyChildrenChangeEvents(parent, childrenEvents);
              return true;
            }
          });
      parentToChildrenEventsChanges.clear();
    }

    publisher.after(validated);
  }
Exemplo n.º 11
0
 public void handleElement(Element e) {
   // create family
   if (e.getName().equals("family")) {
     try {
       lexicon.add(new Family(e));
     } catch (RuntimeException exc) {
       System.err.println("Skipping family: " + e.getAttributeValue("name"));
       System.err.println(exc.toString());
     }
   }
   // save distributive attributes
   else if (e.getName().equals("distributive-features")) distrElt = e;
   // save licensing features
   else if (e.getName().equals("licensing-features")) licensingElt = e;
   // save relation sort order
   else if (e.getName().equals("relation-sorting")) relationSortingElt = e;
 }
Exemplo n.º 12
0
  @Override
  @NotNull
  public VirtualFile[] getLocalRoots() {
    List<VirtualFile> roots = ContainerUtil.newSmartList();

    myRootsLock.readLock().lock();
    try {
      for (NewVirtualFile root : myRoots.values()) {
        if (root.isInLocalFileSystem() && !(root.getFileSystem() instanceof TempFileSystem)) {
          roots.add(root);
        }
      }
    } finally {
      myRootsLock.readLock().unlock();
    }

    return VfsUtilCore.toVirtualFileArray(roots);
  }
Exemplo n.º 13
0
  @Override
  @NotNull
  public VirtualFile[] getRoots(@NotNull final NewVirtualFileSystem fs) {
    final List<VirtualFile> roots = new ArrayList<VirtualFile>();

    myRootsLock.readLock().lock();
    try {
      for (NewVirtualFile root : myRoots.values()) {
        if (root.getFileSystem() == fs) {
          roots.add(root);
        }
      }
    } finally {
      myRootsLock.readLock().unlock();
    }

    return VfsUtilCore.toVirtualFileArray(roots);
  }
Exemplo n.º 14
0
  @SuppressWarnings("HardCodedStringLiteral")
  public String toString() {
    StringBuilder result = new StringBuilder();
    result.append('<');
    if (myEphemeral) {
      result.append("ephemeral, ");
    }

    for (EqClass set : getNonTrivialEqClasses()) {
      result.append(set);
    }

    if (!myDistinctClasses.isEmpty()) {
      result.append("\n  distincts: ");
      List<String> distincts = new ArrayList<>();
      for (UnorderedPair<EqClass> pair : getDistinctClassPairs()) {
        distincts.add("{" + pair.first + ", " + pair.second + "}");
      }
      Collections.sort(distincts);
      result.append(StringUtil.join(distincts, " "));
    }

    if (!myStack.isEmpty()) {
      result.append("\n  stack: ").append(StringUtil.join(myStack, ","));
    }
    if (!myVariableStates.isEmpty()) {
      result.append("\n  vars: ");
      for (Map.Entry<DfaVariableValue, DfaVariableState> entry : myVariableStates.entrySet()) {
        result
            .append("[")
            .append(entry.getKey())
            .append("->")
            .append(entry.getValue())
            .append("] ");
      }
    }
    if (!myUnknownVariables.isEmpty()) {
      result.append("\n  unknowns: ").append(new HashSet<>(myUnknownVariables));
    }
    result.append('>');
    return result.toString();
  }
 @Override
 public boolean queue(@NotNull Collection<VirtualFile> files, @NotNull Object reason) {
   if (files.isEmpty()) {
     return false;
   }
   boolean queued = false;
   List<VirtualFile> added = new ArrayList<VirtualFile>(files.size());
   for (VirtualFile file : files) {
     boolean wasAdded = queueIfNeeded(file, myProject);
     if (wasAdded) {
       added.add(file);
     }
     queued |= wasAdded;
   }
   if (queued) {
     log("Queued to resolve (from " + reason + "): " + toVfString(added));
     flushLog();
   }
   return queued;
 }
Exemplo n.º 16
0
  @Nullable("for boxed values which can't be compared by ==")
  private Integer getOrCreateEqClassIndex(@NotNull DfaValue dfaValue) {
    int i = getEqClassIndex(dfaValue);
    if (i != -1) return i;
    if (!canBeInRelation(dfaValue)
        || !canBeReused(dfaValue)
            && !(((DfaBoxedValue) dfaValue).getWrappedValue() instanceof DfaConstValue)) {
      return null;
    }
    int freeIndex = myEqClasses.indexOf(null);
    int resultIndex = freeIndex >= 0 ? freeIndex : myEqClasses.size();
    EqClass aClass = new EqClass(myFactory);
    aClass.add(dfaValue.getID());

    if (freeIndex >= 0) {
      myEqClasses.set(freeIndex, aClass);
    } else {
      myEqClasses.add(aClass);
    }
    addToMap(dfaValue.getID(), resultIndex);

    return resultIndex;
  }
Exemplo n.º 17
0
  /** Loads the lexicon and morph files. */
  public void init(URL lexiconUrl, URL morphUrl) throws IOException {

    List<Family> lexicon = null;
    List<MorphItem> morph = null;
    List<MacroItem> macroModel = null;

    // load category families (lexicon), morph forms and macros
    lexicon = getLexicon(lexiconUrl);
    Pair<List<MorphItem>, List<MacroItem>> morphInfo = getMorph(morphUrl);
    morph = morphInfo.a;
    macroModel = morphInfo.b;

    // index words; also index stems to words, as default preds
    // store indexed coarticulation attrs too
    _words = new GroupMap<Word, MorphItem>();
    _predToWords = new GroupMap<String, Word>();
    _coartAttrs = new HashSet<String>();
    _indexedCoartAttrs = new HashSet<String>();
    for (MorphItem morphItem : morph) {
      Word surfaceWord = morphItem.getSurfaceWord();
      _words.put(surfaceWord, morphItem);
      _predToWords.put(morphItem.getWord().getStem(), surfaceWord);
      if (morphItem.isCoart()) {
        Word indexingWord = morphItem.getCoartIndexingWord();
        _words.put(indexingWord, morphItem);
        Pair<String, String> first = indexingWord.getSurfaceAttrValPairs().next();
        _indexedCoartAttrs.add(first.a);
        for (Iterator<Pair<String, String>> it = surfaceWord.getSurfaceAttrValPairs();
            it.hasNext(); ) {
          Pair<String, String> p = it.next();
          _coartAttrs.add(p.a);
        }
      }
    }

    // index entries based on stem+pos
    _stems = new GroupMap<String, Object>();
    _posToEntries = new GroupMap<String, EntriesItem[]>();
    // index entries by supertag+pos, for supertagging
    _stagToEntries = new GroupMap<String, EntriesItem>();
    // also index rels and coart rels to preds
    _relsToPreds = new GroupMap<String, String>();
    _coartRelsToPreds = new GroupMap<String, String>();
    // and gather list of attributes used per atomic category type
    _catsToAttrs = new GroupMap<String, String>();
    _lfAttrs = new HashSet<String>();
    // and remember family and ent, names, for checking excluded list on morph items
    HashSet<String> familyAndEntryNames = new HashSet<String>();

    // index each family
    for (Family family : lexicon) {

      familyAndEntryNames.add(family.getName());
      EntriesItem[] entries = family.getEntries();
      DataItem[] data = family.getData();

      // for generic use when we get an unknown stem
      // from the morphological analyzer
      if (!family.isClosed()) {
        _posToEntries.put(family.getPOS(), entries);
      }

      // scan through entries
      for (int j = 0; j < entries.length; j++) {
        // index
        EntriesItem eItem = entries[j];
        _stagToEntries.put(eItem.getSupertag() + family.getPOS(), eItem);
        if (eItem.getStem().length() > 0) {
          _stems.put(eItem.getStem() + family.getPOS(), eItem);
        }
        try {
          // gather features
          eItem.getCat().forall(gatherAttrs);
          // record names
          familyAndEntryNames.add(eItem.getName());
          familyAndEntryNames.add(eItem.getQualifiedName());
        } catch (RuntimeException exc) {
          System.err.println("exception for: " + family.getName() + ": " + exc);
        }
      }

      // scan through data
      for (int j = 0; j < data.length; j++) {
        DataItem dItem = data[j];
        _stems.put(
            dItem.getStem() + family.getPOS(), new Pair<DataItem, EntriesItem[]>(dItem, entries));
        // index non-default preds to words
        if (!dItem.getStem().equals(dItem.getPred())) {
          Collection<Word> words = (Collection<Word>) _predToWords.get(dItem.getStem());
          if (words == null) {
            if (!openlex) {
              System.out.print("Warning: couldn't find words for pred '");
              System.out.println(dItem.getPred() + "' with stem '" + dItem.getStem() + "'");
            }
          } else {
            for (Iterator<Word> it = words.iterator(); it.hasNext(); ) {
              _predToWords.put(dItem.getPred(), it.next());
            }
          }
        }
      }

      // index rels to preds
      // nb: this covers relational (eg @x<GenRel>e) and featural (eg @e<tense>past)
      //     elementary predications
      List<String> indexRels = new ArrayList<String>(3);
      String familyIndexRel = family.getIndexRel();
      if (familyIndexRel.length() > 0) {
        indexRels.add(familyIndexRel);
      }
      for (int j = 0; j < entries.length; j++) {
        EntriesItem eItem = entries[j];
        String indexRel = eItem.getIndexRel();
        if (indexRel.length() > 0 && !indexRel.equals(familyIndexRel)) {
          indexRels.add(indexRel);
        }
      }
      for (Iterator<String> it = indexRels.iterator(); it.hasNext(); ) {
        String indexRel = it.next();
        // nb: not indexing on entries items, b/c some stems are still defaults
        for (int j = 0; j < data.length; j++) {
          DataItem dItem = data[j];
          _relsToPreds.put(indexRel, dItem.getPred());
        }
      }

      // index coart rels (features, really) to preds
      String coartRel = family.getCoartRel();
      if (coartRel.length() > 0) {
        for (int j = 0; j < data.length; j++) {
          _coartRelsToPreds.put(coartRel, data[j].getPred());
        }
      }
    }

    // index the macros
    _macros = new GroupMap<String, FeatureStructure>();
    // nb: could just index MacroItem objects for feature structures too;
    //     this might be a bit cleaner, but life is short
    _macroItems = new HashMap<String, MacroItem>();
    for (MacroItem mi : macroModel) {
      String macName = mi.getName();
      FeatureStructure[] specs = mi.getFeatureStructures();
      for (int j = 0; j < specs.length; j++) {
        _macros.put(macName, specs[j]);
      }
      // this is for handling LF part of macros
      _macroItems.put(macName, mi);
    }

    // with morph items, check POS, macro names, excluded list for xref
    for (MorphItem morphItem : morph) {
      Word w = morphItem.getWord();
      if (!openlex
          && !_stems.containsKey(w.getStem() + w.getPOS())
          && !_posToEntries.containsKey(w.getPOS())) {
        System.err.println(
            "Warning: no entries for stem '"
                + w.getStem()
                + "' and POS '"
                + w.getPOS()
                + "' found for word '"
                + w
                + "'");
      }
      String[] macroNames = morphItem.getMacros();
      for (int j = 0; j < macroNames.length; j++) {
        if (!_macroItems.containsKey(macroNames[j])) {
          System.err.println(
              "Warning: macro "
                  + macroNames[j]
                  + " not found for word '"
                  + morphItem.getWord()
                  + "'");
        }
      }
      String[] excludedNames = morphItem.getExcluded();
      for (int j = 0; j < excludedNames.length; j++) {
        if (!familyAndEntryNames.contains(excludedNames[j])) {
          System.err.println(
              "Warning: excluded family or entry '"
                  + excludedNames[j]
                  + "' not found for word '"
                  + morphItem.getWord()
                  + "'");
        }
      }
    }
  }