Esempio n. 1
0
  public TIntObjectHashMap<TIntHashSet> resolve(
      DirectedGraph<Integer, RDFEdge> graph,
      Set<Set<Integer>> literalSubjectPairs,
      Document document,
      KnowledgeBase kb) {

    TIntObjectHashMap<TIntHashSet> resolvedSubjects = new TIntObjectHashMap<TIntHashSet>();

    TIntHashSet ham = new TIntHashSet();
    TIntHashSet spam = new TIntHashSet();

    resolvedSubjects.put(0, spam);
    resolvedSubjects.put(1, ham);

    for (Set<Integer> c : literalSubjectPairs) {
      TIntHashSet subjects = getAmbiguousURIRefs(c);

      if (subjects.size() > 1) {
        ham.add(subjects.toArray()[new Random().nextInt(subjects.size())]);
        if (resolvedSubjects.size() < subjects.size()) {
          for (int s : subjects.toArray()) {
            if (!ham.contains(s)) {
              spam.add(s);
            }
          }
        }
      }
    }
    return resolvedSubjects;
  }
  // returns list of resolved files if updated successfully, or null if write action or dumb mode
  // started
  private int[] processFile(
      @NotNull final VirtualFile file, int fileId, @NotNull final ProgressIndicator indicator) {
    final TIntHashSet forward;
    try {
      forward = calcForwardRefs(file, indicator);
    } catch (IndexNotReadyException e) {
      return null;
    } catch (ApplicationUtil.CannotRunReadActionException e) {
      return null;
    } catch (ProcessCanceledException e) {
      throw e;
    } catch (Exception e) {
      log(ExceptionUtil.getThrowableText(e));
      flushLog();
      return null;
    }

    int[] forwardIds = forward.toArray();
    fileIsResolved.set(fileId);
    logf(
        "  ---- " + file.getPresentableUrl() + " processed. forwardIds: " + toVfString(forwardIds));
    for (Listener listener : myListeners) {
      listener.fileResolved(file);
    }
    return forwardIds;
  }
  private void applyChildrenChangeEvents(VirtualFile parent, List<VFileEvent> events) {
    final NewVirtualFileSystem delegate = getDelegate(parent);
    TIntArrayList childrenIdsUpdated = new TIntArrayList();
    List<VirtualFile> childrenToBeUpdated = new SmartList<VirtualFile>();

    assert parent != null && parent != mySuperRoot;
    final int parentId = getFileId(parent);
    assert parentId != 0;
    TIntHashSet parentChildrenIds = new TIntHashSet(FSRecords.list(parentId));
    boolean hasRemovedChildren = false;

    for (VFileEvent event : events) {
      if (event instanceof VFileCreateEvent) {
        String name = ((VFileCreateEvent) event).getChildName();
        final VirtualFile fake = new FakeVirtualFile(parent, name);
        final FileAttributes attributes = delegate.getAttributes(fake);

        if (attributes != null) {
          final int childId = createAndFillRecord(delegate, fake, parentId, attributes);
          assert parent instanceof VirtualDirectoryImpl : parent;
          final VirtualDirectoryImpl dir = (VirtualDirectoryImpl) parent;
          VirtualFileSystemEntry child = dir.createChild(name, childId, dir.getFileSystem());
          childrenToBeUpdated.add(child);
          childrenIdsUpdated.add(childId);
          parentChildrenIds.add(childId);
        }
      } else if (event instanceof VFileDeleteEvent) {
        VirtualFile file = ((VFileDeleteEvent) event).getFile();
        if (!file.exists()) {
          LOG.error("Deleting a file, which does not exist: " + file.getPath());
          continue;
        }

        hasRemovedChildren = true;
        int id = getFileId(file);

        childrenToBeUpdated.add(file);
        childrenIdsUpdated.add(-id);
        parentChildrenIds.remove(id);
      }
    }

    FSRecords.updateList(parentId, parentChildrenIds.toArray());

    if (hasRemovedChildren) clearIdCache();
    VirtualDirectoryImpl parentImpl = (VirtualDirectoryImpl) parent;

    for (int i = 0, len = childrenIdsUpdated.size(); i < len; ++i) {
      final int childId = childrenIdsUpdated.get(i);
      final VirtualFile childFile = childrenToBeUpdated.get(i);

      if (childId > 0) {
        parentImpl.addChild((VirtualFileSystemEntry) childFile);
      } else {
        FSRecords.deleteRecordRecursively(-childId);
        parentImpl.removeChild(childFile);
        invalidateSubtree(childFile);
      }
    }
  }
      public void visitEnd() {
        final TIntHashSet s = myAnnotationArguments.get(myType);

        if (s == null) {
          myAnnotationArguments.put(myType, myUsedArguments);
        } else {
          s.retainAll(myUsedArguments.toArray());
        }
      }
 public boolean execute(final String a, final int b) {
   termids[i] = getTermId(a);
   tfs[i] = b;
   final TIntHashSet ids = term_blocks.get(a);
   blockfreqs[i] = ids.size();
   blockTotal += ids.size();
   final int[] bids = ids.toArray();
   Arrays.sort(bids);
   term2blockids.put(termids[i], bids);
   // System.err.println(a+": tid="+termids[i]+" tf="+tfs[i]+" bf="+blockfreqs[i] +"
   // blocks="+Arrays.toString(bids));
   i++;
   return true;
 }
  @Override
  public TIntDoubleHashMap getRating(
      Document document, List<TokenSequence<SemanticEntity>> entities) {

    DirectedGraph<Integer, RDFEdge> graph = document.getGraph();

    TIntHashSet nodes = new TIntHashSet();
    for (TokenSequence<SemanticEntity> ts : entities) {
      nodes.add(ts.getValue().getSubjectIndex());
    }

    final TIntDoubleHashMap m = new TIntDoubleHashMap();

    for (int node : nodes.toArray()) {
      m.put(node, Math.min(graph.getOutEdges(node).size(), graph.getInEdges(node).size()));
    }
    return m;
  }
Esempio n. 7
0
 public void processTerm(String t) {
   //	null means the term has been filtered out (eg stopwords)
   if (t != null) {
     // add term to document posting list
     for (String fieldName : termFields) {
       int tmp = fieldNames.get(fieldName);
       if (tmp > 0) {
         fields.add(tmp - 1);
       }
     }
     if (ELSE_ENABLED && fields.size() == 0) {
       fields.add(ELSE_FIELD_ID);
     }
     ((BlockFieldDocumentPostingList) termsInDocument).insert(t, fields.toArray(), blockId);
     numOfTokensInDocument++;
     if (++numOfTokensInBlock >= BLOCK_SIZE && blockId < MAX_BLOCKS) {
       numOfTokensInBlock = 0;
       blockId++;
     }
     fields.clear();
   }
 }
  @Override
  protected void doPut(Key key, ValueContainer<Value> container) throws IOException {
    synchronized (myEnumerator) {
      ChangeTrackingValueContainer<Value> valueContainer =
          (ChangeTrackingValueContainer<Value>) container;
      if (!valueContainer.needsCompacting()) {
        final BufferExposingByteArrayOutputStream bytes = new BufferExposingByteArrayOutputStream();
        //noinspection IOResourceOpenedButNotSafelyClosed
        final DataOutputStream _out = new DataOutputStream(bytes);
        final TIntHashSet set = valueContainer.getInvalidated();
        if (set.size() > 0) {
          for (int inputId : set.toArray()) {
            ValueContainerExternalizer.saveInvalidateCommand(_out, inputId);
          }
        }
        final ValueContainer<Value> toRemove = valueContainer.getRemovedDelta();
        if (toRemove.size() > 0) {
          myValueContainerExternalizer.saveAsRemoved(_out, toRemove);
        }

        final ValueContainer<Value> toAppend = valueContainer.getAddedDelta();
        if (toAppend.size() > 0) {
          myValueContainerExternalizer.save(_out, toAppend);
        }

        appendData(
            key,
            new PersistentHashMap.ValueDataAppender() {
              public void append(final DataOutput out) throws IOException {
                out.write(bytes.getInternalBuffer(), 0, bytes.size());
              }
            });
      } else {
        // rewrite the value container for defragmentation
        super.doPut(key, valueContainer);
      }
    }
  }
Esempio n. 9
0
  public UNVDomain(UNVParser parser, int id, Color color) {
    this.id = id;
    this.color = color;
    TIntHashSet nodeset = new TIntHashSet();
    tria3 = parser.getTria3FromGroup(id);
    if (tria3.length > 0) {
      nodeset.ensureCapacity(tria3.length);
      nodeset.addAll(tria3);
    }
    quad4 = parser.getQuad4FromGroup(id);
    if (quad4.length > 0) {
      nodeset.ensureCapacity(nodeset.size() + quad4.length);
      nodeset.addAll(quad4);
    }
    beam2 = parser.getBeam2FromGroup(id);
    if (beam2.length > 0) {
      nodeset.ensureCapacity(nodeset.size() + beam2.length);
      nodeset.addAll(beam2);
    }
    tria6 = parser.getTria6FromGroup(id);
    if (tria6.length > 0) {
      nodeset.ensureCapacity(nodeset.size() + tria6.length);
      nodeset.addAll(tria6);
    }

    int[] nodesID = nodeset.toArray();
    nodes = readNodes(nodesID, parser.getNodesCoordinates());

    // Compute inverse relation
    TIntIntHashMap map = new TIntIntHashMap(nodesID.length);
    for (int i = 0; i < nodesID.length; i++) map.put(nodesID[i], i);

    for (int i = 0; i < tria3.length; i++) tria3[i] = map.get(tria3[i]);
    for (int i = 0; i < quad4.length; i++) quad4[i] = map.get(quad4[i]);
    for (int i = 0; i < beam2.length; i++) beam2[i] = map.get(beam2[i]);
    for (int i = 0; i < tria6.length; i++) tria6[i] = map.get(tria6[i]);
  }
 /**
  * @param nested
  * @return ids of terms in which the provided term nests in
  */
 public int[] getNestsOf(String nested) {
   TIntHashSet res = _termNest.getNestIdsOf(nested);
   return res == null ? new int[0] : res.toArray();
 }
  private static void findReadsBeforeWrites(
      Instruction[] flow,
      TIntHashSet[] definitelyAssigned,
      List<ReadWriteVariableInstruction> result,
      TObjectIntHashMap<String> namesIndex,
      int[] postorder,
      int[] invpostorder,
      boolean onlyFirstRead) {
    // skip instructions that are not reachable from the start
    int start = ArrayUtil.find(invpostorder, 0);

    for (int i = start; i < flow.length; i++) {
      int j = invpostorder[i];
      Instruction curr = flow[j];
      if (curr instanceof ReadWriteVariableInstruction) {
        ReadWriteVariableInstruction rw = (ReadWriteVariableInstruction) curr;
        int name = namesIndex.get(rw.getVariableName());
        TIntHashSet vars = definitelyAssigned[j];
        if (rw.isWrite()) {
          if (vars == null) {
            vars = new TIntHashSet();
            definitelyAssigned[j] = vars;
          }
          vars.add(name);
        } else {
          if (vars == null || !vars.contains(name)) {
            result.add(rw);
            if (onlyFirstRead) {
              if (vars == null) {
                vars = new TIntHashSet();
                definitelyAssigned[j] = vars;
              }
              vars.add(name);
            }
          }
        }
      }

      for (Instruction succ : curr.allSuccessors()) {
        if (postorder[succ.num()] > postorder[curr.num()]) {
          TIntHashSet currDefinitelyAssigned = definitelyAssigned[curr.num()];
          TIntHashSet succDefinitelyAssigned = definitelyAssigned[succ.num()];
          if (currDefinitelyAssigned != null) {
            int[] currArray = currDefinitelyAssigned.toArray();
            if (succDefinitelyAssigned == null) {
              succDefinitelyAssigned = new TIntHashSet();
              succDefinitelyAssigned.addAll(currArray);
              definitelyAssigned[succ.num()] = succDefinitelyAssigned;
            } else {
              succDefinitelyAssigned.retainAll(currArray);
            }
          } else {
            if (succDefinitelyAssigned != null) {
              succDefinitelyAssigned.clear();
            } else {
              succDefinitelyAssigned = new TIntHashSet();
              definitelyAssigned[succ.num()] = succDefinitelyAssigned;
            }
          }
        }
      }
    }
  }
Esempio n. 12
0
 /** Freeze the iterator, cannot be reused. */
 public void init(final TIntHashSet theIndices, final DeterministicIndicedList<E> theElements) {
   super.init();
   current = 0;
   indices = theIndices.toArray();
   elements = theElements;
 }