@NotNull private Pair<TIntHashSet, Integer> findRowsToSelectAndScroll( @NotNull GraphTableModel model, @NotNull VisibleGraph<Integer> visibleGraph) { TIntHashSet rowsToSelect = new TIntHashSet(); if (model.getRowCount() == 0) { // this should have been covered by facade.getVisibleCommitCount, // but if the table is empty (no commits match the filter), the GraphFacade is not updated, // because it can't handle it // => it has previous values set. return Pair.create(rowsToSelect, null); } Integer rowToScroll = null; for (int row = 0; row < visibleGraph.getVisibleCommitCount() && (rowsToSelect.size() < mySelectedCommits.size() || rowToScroll == null); row++) { // stop iterating if found all hashes int commit = visibleGraph.getRowInfo(row).getCommit(); if (mySelectedCommits.contains(commit)) { rowsToSelect.add(row); } if (myVisibleSelectedCommit != null && myVisibleSelectedCommit == commit) { rowToScroll = row; } } return Pair.create(rowsToSelect, rowToScroll); }
public TIntObjectHashMap<TIntHashSet> resolve( DirectedGraph<Integer, RDFEdge> graph, Set<Set<Integer>> literalSubjectPairs, Document document, KnowledgeBase kb) { TIntObjectHashMap<TIntHashSet> resolvedSubjects = new TIntObjectHashMap<TIntHashSet>(); TIntHashSet ham = new TIntHashSet(); TIntHashSet spam = new TIntHashSet(); resolvedSubjects.put(0, spam); resolvedSubjects.put(1, ham); for (Set<Integer> c : literalSubjectPairs) { TIntHashSet subjects = getAmbiguousURIRefs(c); if (subjects.size() > 1) { ham.add(subjects.toArray()[new Random().nextInt(subjects.size())]); if (resolvedSubjects.size() < subjects.size()) { for (int s : subjects.toArray()) { if (!ham.contains(s)) { spam.add(s); } } } } } return resolvedSubjects; }
public boolean execute(final String a, final int b) { termids[i] = getTermId(a); tfs[i] = b; final TIntHashSet ids = term_blocks.get(a); blockfreqs[i] = ids.size(); blockTotal += ids.size(); final int[] bids = ids.toArray(); Arrays.sort(bids); term2blockids.put(termids[i], bids); // System.err.println(a+": tid="+termids[i]+" tf="+tfs[i]+" bf="+blockfreqs[i] +" // blocks="+Arrays.toString(bids)); i++; return true; }
public void testTrailingSoftWrapOffsetShiftOnTyping() throws IOException { // The main idea is to type on a logical line before soft wrap in order to ensure that its // offset is correctly shifted back. String text = "line1<caret>\n" + "second line that is long enough to be soft wrapped"; init(15, text); TIntHashSet offsetsBefore = collectSoftWrapStartOffsets(1); assertTrue(!offsetsBefore.isEmpty()); type('2'); final TIntHashSet offsetsAfter = collectSoftWrapStartOffsets(1); assertSame(offsetsBefore.size(), offsetsAfter.size()); offsetsBefore.forEach( value -> { assertTrue(offsetsAfter.contains(value + 1)); return true; }); }
public boolean equals(final Object other) { if (!(other instanceof TIntHashSet)) { return false; } final TIntHashSet that = (TIntHashSet) other; return that.size() == this.size() && this.forEach( new TIntProcedure() { public final boolean execute(final int value) { return that.contains(value); } }); }
public UNVDomain(UNVParser parser, int id, Color color) { this.id = id; this.color = color; TIntHashSet nodeset = new TIntHashSet(); tria3 = parser.getTria3FromGroup(id); if (tria3.length > 0) { nodeset.ensureCapacity(tria3.length); nodeset.addAll(tria3); } quad4 = parser.getQuad4FromGroup(id); if (quad4.length > 0) { nodeset.ensureCapacity(nodeset.size() + quad4.length); nodeset.addAll(quad4); } beam2 = parser.getBeam2FromGroup(id); if (beam2.length > 0) { nodeset.ensureCapacity(nodeset.size() + beam2.length); nodeset.addAll(beam2); } tria6 = parser.getTria6FromGroup(id); if (tria6.length > 0) { nodeset.ensureCapacity(nodeset.size() + tria6.length); nodeset.addAll(tria6); } int[] nodesID = nodeset.toArray(); nodes = readNodes(nodesID, parser.getNodesCoordinates()); // Compute inverse relation TIntIntHashMap map = new TIntIntHashMap(nodesID.length); for (int i = 0; i < nodesID.length; i++) map.put(nodesID[i], i); for (int i = 0; i < tria3.length; i++) tria3[i] = map.get(tria3[i]); for (int i = 0; i < quad4.length; i++) quad4[i] = map.get(quad4[i]); for (int i = 0; i < beam2.length; i++) beam2[i] = map.get(beam2[i]); for (int i = 0; i < tria6.length; i++) tria6[i] = map.get(tria6[i]); }
@NotNull private static List<VFileEvent> validateEvents(@NotNull List<VFileEvent> events) { final List<EventWrapper> deletionEvents = ContainerUtil.newArrayList(); for (int i = 0, size = events.size(); i < size; i++) { final VFileEvent event = events.get(i); if (event instanceof VFileDeleteEvent && event.isValid()) { deletionEvents.add(new EventWrapper((VFileDeleteEvent) event, i)); } } final TIntHashSet invalidIDs; if (deletionEvents.isEmpty()) { invalidIDs = EmptyIntHashSet.INSTANCE; } else { ContainerUtil.quickSort(deletionEvents, DEPTH_COMPARATOR); invalidIDs = new TIntHashSet(deletionEvents.size()); final Set<VirtualFile> dirsToBeDeleted = new THashSet<VirtualFile>(deletionEvents.size()); nextEvent: for (EventWrapper wrapper : deletionEvents) { final VirtualFile candidate = wrapper.event.getFile(); VirtualFile parent = candidate; while (parent != null) { if (dirsToBeDeleted.contains(parent)) { invalidIDs.add(wrapper.id); continue nextEvent; } parent = parent.getParent(); } if (candidate.isDirectory()) { dirsToBeDeleted.add(candidate); } } } final List<VFileEvent> filtered = new ArrayList<VFileEvent>(events.size() - invalidIDs.size()); for (int i = 0, size = events.size(); i < size; i++) { final VFileEvent event = events.get(i); if (event.isValid() && !(event instanceof VFileDeleteEvent && invalidIDs.contains(i))) { filtered.add(event); } } return filtered; }
public void processTerm(String t) { // null means the term has been filtered out (eg stopwords) if (t != null) { // add term to document posting list for (String fieldName : termFields) { int tmp = fieldNames.get(fieldName); if (tmp > 0) { fields.add(tmp - 1); } } if (ELSE_ENABLED && fields.size() == 0) { fields.add(ELSE_FIELD_ID); } ((BlockFieldDocumentPostingList) termsInDocument).insert(t, fields.toArray(), blockId); numOfTokensInDocument++; if (++numOfTokensInBlock >= BLOCK_SIZE && blockId < MAX_BLOCKS) { numOfTokensInBlock = 0; blockId++; } fields.clear(); } }
@Override protected void doPut(Key key, ValueContainer<Value> container) throws IOException { synchronized (myEnumerator) { ChangeTrackingValueContainer<Value> valueContainer = (ChangeTrackingValueContainer<Value>) container; if (!valueContainer.needsCompacting()) { final BufferExposingByteArrayOutputStream bytes = new BufferExposingByteArrayOutputStream(); //noinspection IOResourceOpenedButNotSafelyClosed final DataOutputStream _out = new DataOutputStream(bytes); final TIntHashSet set = valueContainer.getInvalidated(); if (set.size() > 0) { for (int inputId : set.toArray()) { ValueContainerExternalizer.saveInvalidateCommand(_out, inputId); } } final ValueContainer<Value> toRemove = valueContainer.getRemovedDelta(); if (toRemove.size() > 0) { myValueContainerExternalizer.saveAsRemoved(_out, toRemove); } final ValueContainer<Value> toAppend = valueContainer.getAddedDelta(); if (toAppend.size() > 0) { myValueContainerExternalizer.save(_out, toAppend); } appendData( key, new PersistentHashMap.ValueDataAppender() { public void append(final DataOutput out) throws IOException { out.write(bytes.getInternalBuffer(), 0, bytes.size()); } }); } else { // rewrite the value container for defragmentation super.doPut(key, valueContainer); } } }