private void applyChildrenChangeEvents(VirtualFile parent, List<VFileEvent> events) { final NewVirtualFileSystem delegate = getDelegate(parent); TIntArrayList childrenIdsUpdated = new TIntArrayList(); List<VirtualFile> childrenToBeUpdated = new SmartList<VirtualFile>(); assert parent != null && parent != mySuperRoot; final int parentId = getFileId(parent); assert parentId != 0; TIntHashSet parentChildrenIds = new TIntHashSet(FSRecords.list(parentId)); boolean hasRemovedChildren = false; for (VFileEvent event : events) { if (event instanceof VFileCreateEvent) { String name = ((VFileCreateEvent) event).getChildName(); final VirtualFile fake = new FakeVirtualFile(parent, name); final FileAttributes attributes = delegate.getAttributes(fake); if (attributes != null) { final int childId = createAndFillRecord(delegate, fake, parentId, attributes); assert parent instanceof VirtualDirectoryImpl : parent; final VirtualDirectoryImpl dir = (VirtualDirectoryImpl) parent; VirtualFileSystemEntry child = dir.createChild(name, childId, dir.getFileSystem()); childrenToBeUpdated.add(child); childrenIdsUpdated.add(childId); parentChildrenIds.add(childId); } } else if (event instanceof VFileDeleteEvent) { VirtualFile file = ((VFileDeleteEvent) event).getFile(); if (!file.exists()) { LOG.error("Deleting a file, which does not exist: " + file.getPath()); continue; } hasRemovedChildren = true; int id = getFileId(file); childrenToBeUpdated.add(file); childrenIdsUpdated.add(-id); parentChildrenIds.remove(id); } } FSRecords.updateList(parentId, parentChildrenIds.toArray()); if (hasRemovedChildren) clearIdCache(); VirtualDirectoryImpl parentImpl = (VirtualDirectoryImpl) parent; for (int i = 0, len = childrenIdsUpdated.size(); i < len; ++i) { final int childId = childrenIdsUpdated.get(i); final VirtualFile childFile = childrenToBeUpdated.get(i); if (childId > 0) { parentImpl.addChild((VirtualFileSystemEntry) childFile); } else { FSRecords.deleteRecordRecursively(-childId); parentImpl.removeChild(childFile); invalidateSubtree(childFile); } } }
@NotNull private static FontInfo doGetFontAbleToDisplay( int codePoint, int size, @JdkConstants.FontStyle int style) { synchronized (lock) { if (ourUndisplayableChars.contains(codePoint)) return ourSharedDefaultFont; final Collection<FontInfo> descriptors = ourUsedFonts.values(); for (FontInfo font : descriptors) { if (font.getSize() == size && font.getStyle() == style && font.canDisplay(codePoint)) { return font; } } for (int i = 0; i < ourFontNames.size(); i++) { String name = ourFontNames.get(i); FontInfo font = new FontInfo(name, size, style); if (font.canDisplay(codePoint)) { ourUsedFonts.put(new FontKey(name, size, style), font); ourFontNames.remove(i); return font; } } ourUndisplayableChars.add(codePoint); return ourSharedDefaultFont; } }
private static TIntHashSet collectSoftWrapStartOffsets(int documentLine) { TIntHashSet result = new TIntHashSet(); for (SoftWrap softWrap : myEditor.getSoftWrapModel().getSoftWrapsForLine(documentLine)) { result.add(softWrap.getStart()); } return result; }
public synchronized void addAll(final int id, @NotNull final int[] values) { assert 0 < values.length && values.length <= MAX_LIST_LENGTH : values.length; assert id > 0; Arrays.sort(values); try { if (id >= pointers.size) { pointers = pointers.reallocWith(pointers.toBuffer(), id + 1); writeInt(data, 0, pointers.base); assert pointers.size > id : id + " > " + pointers.size; } int arrayBase = pointers.get(id); IntArray array = arrayBase == 0 ? EMPTY : new IntArray(data, arrayBase); IntArray newArray = array.addAll(values); if (newArray != null) { pointers.put(id, newArray.base); } } catch (IOException e) { throw new RuntimeException(e); } int[] ids = get(id); assertSorted(ids); TIntHashSet set = new TIntHashSet(ids); assert set.containsAll(values) : "ids: " + Arrays.toString(ids) + ";\n values:" + Arrays.toString(values); }
// returns list of resolved files if updated successfully, or null if write action or dumb mode // started private int[] processFile( @NotNull final VirtualFile file, int fileId, @NotNull final ProgressIndicator indicator) { final TIntHashSet forward; try { forward = calcForwardRefs(file, indicator); } catch (IndexNotReadyException e) { return null; } catch (ApplicationUtil.CannotRunReadActionException e) { return null; } catch (ProcessCanceledException e) { throw e; } catch (Exception e) { log(ExceptionUtil.getThrowableText(e)); flushLog(); return null; } int[] forwardIds = forward.toArray(); fileIsResolved.set(fileId); logf( " ---- " + file.getPresentableUrl() + " processed. forwardIds: " + toVfString(forwardIds)); for (Listener listener : myListeners) { listener.fileResolved(file); } return forwardIds; }
@NotNull private Pair<TIntHashSet, Integer> findRowsToSelectAndScroll( @NotNull GraphTableModel model, @NotNull VisibleGraph<Integer> visibleGraph) { TIntHashSet rowsToSelect = new TIntHashSet(); if (model.getRowCount() == 0) { // this should have been covered by facade.getVisibleCommitCount, // but if the table is empty (no commits match the filter), the GraphFacade is not updated, // because it can't handle it // => it has previous values set. return Pair.create(rowsToSelect, null); } Integer rowToScroll = null; for (int row = 0; row < visibleGraph.getVisibleCommitCount() && (rowsToSelect.size() < mySelectedCommits.size() || rowToScroll == null); row++) { // stop iterating if found all hashes int commit = visibleGraph.getRowInfo(row).getCommit(); if (mySelectedCommits.contains(commit)) { rowsToSelect.add(row); } if (myVisibleSelectedCommit != null && myVisibleSelectedCommit == commit) { rowToScroll = row; } } return Pair.create(rowsToSelect, rowToScroll); }
public void testSet() { // Long-long TLongHashSet llist = new TLongHashSet(); assertTrue(serializesCorrectly(llist, "set-l-1")); llist.add(0); llist.add(1); assertTrue(serializesCorrectly(llist, "set-l-2")); llist.add(Long.MIN_VALUE); assertTrue(serializesCorrectly(llist, "set-l-3")); llist.add(Long.MAX_VALUE); assertTrue(serializesCorrectly(llist, "set-l-4")); // Int-int TIntHashSet ilist = new TIntHashSet(); assertTrue(serializesCorrectly(ilist, "set-i-1")); ilist.add(0); ilist.add(1); assertTrue(serializesCorrectly(ilist, "set-i-2")); ilist.add(Integer.MIN_VALUE); assertTrue(serializesCorrectly(ilist, "set-i-3")); ilist.add(Integer.MAX_VALUE); assertTrue(serializesCorrectly(ilist, "set-i-4")); // Double-double TDoubleHashSet dlist = new TDoubleHashSet(); assertTrue(serializesCorrectly(dlist, "set-d-1")); dlist.add(0); dlist.add(1); assertTrue(serializesCorrectly(dlist, "set-d-2")); dlist.add(Double.MIN_VALUE); assertTrue(serializesCorrectly(dlist, "set-d-3")); dlist.add(Double.MAX_VALUE); assertTrue(serializesCorrectly(dlist, "set-d-4")); dlist.add(Double.POSITIVE_INFINITY); assertTrue(serializesCorrectly(dlist, "set-d-5")); dlist.add(Double.NEGATIVE_INFINITY); assertTrue(serializesCorrectly(dlist, "set-d-6")); // NOTE: trove doesn't deal well with NaN // ddmap.add( Double.NaN, Double.NaN ); // assertTrue( serializesCorrectly( ddmap ) ); // Float-float TFloatHashSet flist = new TFloatHashSet(); assertTrue(serializesCorrectly(flist, "set-f-1")); flist.add(0); flist.add(1); assertTrue(serializesCorrectly(flist, "set-f-2")); flist.add(Float.MIN_VALUE); assertTrue(serializesCorrectly(flist, "set-f-3")); flist.add(Float.MAX_VALUE); assertTrue(serializesCorrectly(flist, "set-f-4")); flist.add(Float.POSITIVE_INFINITY); assertTrue(serializesCorrectly(flist, "set-f-5")); flist.add(Float.NEGATIVE_INFINITY); assertTrue(serializesCorrectly(flist, "set-f-6")); // NOTE: trove doesn't deal well with NaN // ffmap.add( Float.NaN ); // assertTrue( serializesCorrectly( ffmap ) ); }
/** Insert a term into this document, occurs at given block id */ public void insert(String t, int blockId) { insert(t); TIntHashSet blockids = null; if ((blockids = term_blocks.get(t)) == null) { term_blocks.put(t, blockids = new TIntHashSet(/*TODO */ )); } blockids.add(blockId); blockCount++; }
public void visitEnd() { final TIntHashSet s = myAnnotationArguments.get(myType); if (s == null) { myAnnotationArguments.put(myType, myUsedArguments); } else { s.retainAll(myUsedArguments.toArray()); } }
@NotNull @Override public PsiFunctionalExpression[] findFunExpressionsInFile( @NotNull Integer[] funExpressions, @NotNull PsiFileWithStubSupport file) { TIntHashSet requiredIndices = new TIntHashSet(funExpressions.length); for (int funExpr : funExpressions) { requiredIndices.add(funExpr); } return JavaCompilerElementRetriever.retrieveFunExpressionsByIndices(requiredIndices, file); }
private void loadCommitsData( @NotNull final TIntIntHashMap commits, @NotNull final Consumer<List<T>> consumer, @Nullable ProgressIndicator indicator) { final List<T> result = ContainerUtil.newArrayList(); final TIntHashSet toLoad = new TIntHashSet(); long taskNumber = myCurrentTaskIndex++; for (int id : commits.keys()) { T details = getFromCache(id); if (details == null || details instanceof LoadingDetails) { toLoad.add(id); cacheCommit(id, taskNumber); } else { result.add(details); } } if (toLoad.isEmpty()) { sortCommitsByRow(result, commits); consumer.consume(result); } else { Task.Backgroundable task = new Task.Backgroundable( null, "Loading Selected Details", true, PerformInBackgroundOption.ALWAYS_BACKGROUND) { @Override public void run(@NotNull final ProgressIndicator indicator) { indicator.checkCanceled(); try { TIntObjectHashMap<T> map = preLoadCommitData(toLoad); map.forEachValue( value -> { result.add(value); return true; }); sortCommitsByRow(result, commits); notifyLoaded(); } catch (VcsException e) { LOG.error(e); } } @Override public void onSuccess() { consumer.consume(result); } }; if (indicator != null) { ProgressManager.getInstance().runProcessWithProgressAsynchronously(task, indicator); } else { ProgressManager.getInstance().run(task); } } }
private void runLoadCommitsData(@NotNull Iterable<Integer> hashes) { long taskNumber = myCurrentTaskIndex++; TIntIntHashMap commits = getCommitsMap(hashes); TIntHashSet toLoad = new TIntHashSet(); for (int id : commits.keys()) { cacheCommit(id, taskNumber); toLoad.add(id); } myLoader.queue(new TaskDescriptor(toLoad)); }
public boolean equals(final Object other) { if (!(other instanceof TIntHashSet)) { return false; } final TIntHashSet that = (TIntHashSet) other; return that.size() == this.size() && this.forEach( new TIntProcedure() { public final boolean execute(final int value) { return that.contains(value); } }); }
// need 'synchronized' to ensure atomic initialization of merged data // because several threads that acquired read lock may simultaneously execute the method private ValueContainerImpl<Value> getMergedData() { ValueContainerImpl<Value> merged = myMerged; if (merged != null) { return merged; } synchronized (myInitializer.getLock()) { merged = myMerged; if (merged != null) { return merged; } final ValueContainer<Value> fromDisk = myInitializer.compute(); final ValueContainerImpl<Value> newMerged; if (fromDisk instanceof ValueContainerImpl) { newMerged = ((ValueContainerImpl<Value>) fromDisk).copy(); } else { newMerged = ((ChangeTrackingValueContainer<Value>) fromDisk).getMergedData().copy(); } TIntHashSet invalidated = myInvalidated; if (invalidated != null) { invalidated.forEach( new TIntProcedure() { @Override public boolean execute(int inputId) { newMerged.removeAssociatedValue(inputId); return true; } }); } ValueContainerImpl<Value> added = myAdded; if (added != null) { added.forEach( new ContainerAction<Value>() { @Override public boolean perform(final int id, final Value value) { newMerged.removeAssociatedValue( id); // enforcing "one-value-per-file for particular key" invariant newMerged.addValue(id, value); return true; } }); } setNeedsCompacting(fromDisk.needsCompacting()); myMerged = newMerged; return newMerged; } }
public boolean canDisplay(char c) { try { if (c < 128) return true; if (mySafeCharacters.contains(c)) return true; if (canDisplayImpl(c)) { mySafeCharacters.add(c); return true; } return false; } catch (Exception e) { // JRE has problems working with the font. Just skip. return false; } }
public boolean execute(final String a, final int b) { termids[i] = getTermId(a); tfs[i] = b; final TIntHashSet ids = term_blocks.get(a); blockfreqs[i] = ids.size(); blockTotal += ids.size(); final int[] bids = ids.toArray(); Arrays.sort(bids); term2blockids.put(termids[i], bids); // System.err.println(a+": tid="+termids[i]+" tf="+tfs[i]+" bf="+blockfreqs[i] +" // blocks="+Arrays.toString(bids)); i++; return true; }
@NotNull public TIntObjectHashMap<T> preLoadCommitData(@NotNull TIntHashSet commits) throws VcsException { TIntObjectHashMap<T> result = new TIntObjectHashMap<>(); final MultiMap<VirtualFile, String> rootsAndHashes = MultiMap.create(); commits.forEach( commit -> { CommitId commitId = myHashMap.getCommitId(commit); if (commitId != null) { rootsAndHashes.putValue(commitId.getRoot(), commitId.getHash().asString()); } return true; }); for (Map.Entry<VirtualFile, Collection<String>> entry : rootsAndHashes.entrySet()) { VcsLogProvider logProvider = myLogProviders.get(entry.getKey()); if (logProvider != null) { List<? extends T> details = readDetails(logProvider, entry.getKey(), ContainerUtil.newArrayList(entry.getValue())); for (T data : details) { int index = myHashMap.getCommitIndex(data.getId(), data.getRoot()); result.put(index, data); } saveInCache(result); } else { LOG.error( "No log provider for root " + entry.getKey().getPath() + ". All known log providers " + myLogProviders); } } return result; }
public Selection(@NotNull VcsLogGraphTable table) { myTable = table; List<Integer> selectedRows = ContainerUtil.sorted(toList(myTable.getSelectedRows())); Couple<Integer> visibleRows = ScrollingUtil.getVisibleRows(myTable); myScrollToTop = visibleRows.first - 1 == 0; VisibleGraph<Integer> graph = myTable.getVisibleGraph(); mySelectedCommits = new TIntHashSet(); Integer visibleSelectedCommit = null; Integer delta = null; for (int row : selectedRows) { if (row < graph.getVisibleCommitCount()) { Integer commit = graph.getRowInfo(row).getCommit(); mySelectedCommits.add(commit); if (visibleRows.first - 1 <= row && row <= visibleRows.second && visibleSelectedCommit == null) { visibleSelectedCommit = commit; delta = myTable.getCellRect(row, 0, false).y - myTable.getVisibleRect().y; } } } if (visibleSelectedCommit == null && visibleRows.first - 1 >= 0) { visibleSelectedCommit = graph.getRowInfo(visibleRows.first - 1).getCommit(); delta = myTable.getCellRect(visibleRows.first - 1, 0, false).y - myTable.getVisibleRect().y; } myVisibleSelectedCommit = visibleSelectedCommit; myDelta = delta; }
private void updateBreakpoints(@NotNull Document document) { Collection<XLineBreakpointImpl> breakpoints = myBreakpoints.getKeysByValue(document); if (breakpoints == null) { return; } TIntHashSet lines = new TIntHashSet(); List<XBreakpoint<?>> toRemove = new SmartList<XBreakpoint<?>>(); for (XLineBreakpointImpl breakpoint : breakpoints) { breakpoint.updatePosition(); if (!breakpoint.isValid() || !lines.add(breakpoint.getLine())) { toRemove.add(breakpoint); } } removeBreakpoints(toRemove); }
@Override public void removeAssociatedValue(int inputId) { ValueContainerImpl<Value> merged = myMerged; if (merged != null) { merged.removeAssociatedValue(inputId); } ValueContainerImpl<Value> added = myAdded; if (added != null) added.removeAssociatedValue(inputId); TIntHashSet invalidated = myInvalidated; if (invalidated == null) { invalidated = new TIntHashSet(1); } invalidated.add(inputId); myInvalidated = invalidated; // volatile write }
@NotNull private static List<VFileEvent> validateEvents(@NotNull List<VFileEvent> events) { final List<EventWrapper> deletionEvents = ContainerUtil.newArrayList(); for (int i = 0, size = events.size(); i < size; i++) { final VFileEvent event = events.get(i); if (event instanceof VFileDeleteEvent && event.isValid()) { deletionEvents.add(new EventWrapper((VFileDeleteEvent) event, i)); } } final TIntHashSet invalidIDs; if (deletionEvents.isEmpty()) { invalidIDs = EmptyIntHashSet.INSTANCE; } else { ContainerUtil.quickSort(deletionEvents, DEPTH_COMPARATOR); invalidIDs = new TIntHashSet(deletionEvents.size()); final Set<VirtualFile> dirsToBeDeleted = new THashSet<VirtualFile>(deletionEvents.size()); nextEvent: for (EventWrapper wrapper : deletionEvents) { final VirtualFile candidate = wrapper.event.getFile(); VirtualFile parent = candidate; while (parent != null) { if (dirsToBeDeleted.contains(parent)) { invalidIDs.add(wrapper.id); continue nextEvent; } parent = parent.getParent(); } if (candidate.isDirectory()) { dirsToBeDeleted.add(candidate); } } } final List<VFileEvent> filtered = new ArrayList<VFileEvent>(events.size() - invalidIDs.size()); for (int i = 0, size = events.size(); i < size; i++) { final VFileEvent event = events.get(i); if (event.isValid() && !(event instanceof VFileDeleteEvent && invalidIDs.contains(i))) { filtered.add(event); } } return filtered; }
@Override public TIntDoubleHashMap getRating( Document document, List<TokenSequence<SemanticEntity>> entities) { DirectedGraph<Integer, RDFEdge> graph = document.getGraph(); TIntHashSet nodes = new TIntHashSet(); for (TokenSequence<SemanticEntity> ts : entities) { nodes.add(ts.getValue().getSubjectIndex()); } final TIntDoubleHashMap m = new TIntDoubleHashMap(); for (int node : nodes.toArray()) { m.put(node, Math.min(graph.getOutEdges(node).size(), graph.getInEdges(node).size())); } return m; }
public RuleModel() { int[] tmp = {0, 1, 2}; alpha = new TIntHashSet(); alpha.addAll(tmp); work = "("; for (int i = 0; i < tmp.length - 1; i++) work += tmp[i] + "|"; work = work.substring(0, work.length() - 1) + ")"; all = "(" + tmp[2] + "|" + work.substring(1, work.length()); }
public void visitEnum(String name, String desc, String value) { final int methodName = myContext.get(name); final String methodDescr = "()" + desc; myUsages.add( UsageRepr.createMethodUsage(myContext, methodName, myType.myClassName, methodDescr)); myUsages.add( UsageRepr.createMetaMethodUsage( myContext, methodName, myType.myClassName, methodDescr)); myUsedArguments.add(methodName); }
private void update(@NotNull List<BreakpointWithHighlighter> breakpoints) { final TIntHashSet intHash = new TIntHashSet(); for (BreakpointWithHighlighter breakpoint : breakpoints) { SourcePosition sourcePosition = breakpoint.getSourcePosition(); breakpoint.reload(); if (breakpoint.isValid()) { if (sourcePosition == null || breakpoint.getSourcePosition().getLine() != sourcePosition.getLine()) { fireBreakpointChanged(breakpoint); } if (intHash.contains(breakpoint.getLineIndex())) { remove(breakpoint); } else { intHash.add(breakpoint.getLineIndex()); } } else { remove(breakpoint); } } }
public void processTerm(String t) { // null means the term has been filtered out (eg stopwords) if (t != null) { // add term to document posting list for (String fieldName : termFields) { int tmp = fieldNames.get(fieldName); if (tmp > 0) { fields.add(tmp - 1); } } if (ELSE_ENABLED && fields.size() == 0) { fields.add(ELSE_FIELD_ID); } ((BlockFieldDocumentPostingList) termsInDocument).insert(t, fields.toArray(), blockId); numOfTokensInDocument++; if (++numOfTokensInBlock >= BLOCK_SIZE && blockId < MAX_BLOCKS) { numOfTokensInBlock = 0; blockId++; } fields.clear(); } }
private void parseProblemGlyphs() { myCheckedForProblemGlyphs = true; BufferedImage buffer = UIUtil.createImage(20, 20, BufferedImage.TYPE_INT_RGB); final Graphics graphics = buffer.getGraphics(); if (!(graphics instanceof Graphics2D)) { return; } final FontRenderContext context = ((Graphics2D) graphics).getFontRenderContext(); char[] charBuffer = new char[1]; for (char c = 0; c < 128; c++) { if (!myFont.canDisplay(c)) { continue; } charBuffer[0] = c; final GlyphVector vector = myFont.createGlyphVector(context, charBuffer); final float y = vector.getGlyphMetrics(0).getAdvanceY(); if (Math.round(y) != 0) { mySymbolsToBreakDrawingIteration.add(c); } } myHasGlyphsToBreakDrawingIteration = !mySymbolsToBreakDrawingIteration.isEmpty(); }
@Override protected void doPut(Key key, ValueContainer<Value> container) throws IOException { synchronized (myEnumerator) { ChangeTrackingValueContainer<Value> valueContainer = (ChangeTrackingValueContainer<Value>) container; if (!valueContainer.needsCompacting()) { final BufferExposingByteArrayOutputStream bytes = new BufferExposingByteArrayOutputStream(); //noinspection IOResourceOpenedButNotSafelyClosed final DataOutputStream _out = new DataOutputStream(bytes); final TIntHashSet set = valueContainer.getInvalidated(); if (set.size() > 0) { for (int inputId : set.toArray()) { ValueContainerExternalizer.saveInvalidateCommand(_out, inputId); } } final ValueContainer<Value> toRemove = valueContainer.getRemovedDelta(); if (toRemove.size() > 0) { myValueContainerExternalizer.saveAsRemoved(_out, toRemove); } final ValueContainer<Value> toAppend = valueContainer.getAddedDelta(); if (toAppend.size() > 0) { myValueContainerExternalizer.save(_out, toAppend); } appendData( key, new PersistentHashMap.ValueDataAppender() { public void append(final DataOutput out) throws IOException { out.write(bytes.getInternalBuffer(), 0, bytes.size()); } }); } else { // rewrite the value container for defragmentation super.doPut(key, valueContainer); } } }
private static void addIdAndSuperClasses(PsiFile file, @NotNull TIntHashSet forward) { if (file instanceof PsiJavaFile && file.getName().equals("Object.class") && ((PsiJavaFile) file).getPackageName().equals("java.lang")) { return; } VirtualFile virtualFile = PsiUtilCore.getVirtualFile(file); if (virtualFile instanceof VirtualFileWithId && forward.add(getAbsId(virtualFile)) && file instanceof PsiClassOwner) { for (PsiClass aClass : ((PsiClassOwner) file).getClasses()) { for (PsiClass superClass : aClass.getSupers()) { addIdAndSuperClasses(superClass.getContainingFile(), forward); } } } }
public void testTrailingSoftWrapOffsetShiftOnTyping() throws IOException { // The main idea is to type on a logical line before soft wrap in order to ensure that its // offset is correctly shifted back. String text = "line1<caret>\n" + "second line that is long enough to be soft wrapped"; init(15, text); TIntHashSet offsetsBefore = collectSoftWrapStartOffsets(1); assertTrue(!offsetsBefore.isEmpty()); type('2'); final TIntHashSet offsetsAfter = collectSoftWrapStartOffsets(1); assertSame(offsetsBefore.size(), offsetsAfter.size()); offsetsBefore.forEach( value -> { assertTrue(offsetsAfter.contains(value + 1)); return true; }); }