@NotNull public static char[] adaptiveLoadText(@NotNull Reader reader) throws IOException { char[] chars = new char[4096]; List<char[]> buffers = null; int count = 0; int total = 0; while (true) { int n = reader.read(chars, count, chars.length - count); if (n <= 0) break; count += n; if (total > 1024 * 1024 * 10) throw new FileTooBigException("File too big " + reader); total += n; if (count == chars.length) { if (buffers == null) { buffers = new ArrayList<char[]>(); } buffers.add(chars); int newLength = Math.min(1024 * 1024, chars.length * 2); chars = new char[newLength]; count = 0; } } char[] result = new char[total]; if (buffers != null) { for (char[] buffer : buffers) { System.arraycopy(buffer, 0, result, result.length - total, buffer.length); total -= buffer.length; } } System.arraycopy(chars, 0, result, result.length - total, total); return result; }
@NotNull public static byte[] adaptiveLoadBytes(@NotNull InputStream stream) throws IOException { byte[] bytes = new byte[4096]; List<byte[]> buffers = null; int count = 0; int total = 0; while (true) { int n = stream.read(bytes, count, bytes.length - count); if (n <= 0) break; count += n; if (total > 1024 * 1024 * 10) throw new FileTooBigException("File too big " + stream); total += n; if (count == bytes.length) { if (buffers == null) { buffers = new ArrayList<byte[]>(); } buffers.add(bytes); int newLength = Math.min(1024 * 1024, bytes.length * 2); bytes = new byte[newLength]; count = 0; } } byte[] result = new byte[total]; if (buffers != null) { for (byte[] buffer : buffers) { System.arraycopy(buffer, 0, result, result.length - total, buffer.length); total -= buffer.length; } } System.arraycopy(bytes, 0, result, result.length - total, total); return result; }
public static void initOffsets(final PsiFile file, final OffsetMap offsetMap) { int offset = Math.max( offsetMap.getOffset(CompletionInitializationContext.SELECTION_END_OFFSET), offsetMap.getOffset(CompletionInitializationContext.IDENTIFIER_END_OFFSET)); PsiElement element = file.findElementAt(offset); if (element instanceof PsiWhiteSpace && (!element.textContains('\n') || CodeStyleSettingsManager.getSettings(file.getProject()) .getCommonSettings(JavaLanguage.INSTANCE) .METHOD_PARAMETERS_LPAREN_ON_NEXT_LINE)) { element = file.findElementAt(element.getTextRange().getEndOffset()); } if (element == null) return; if (LEFT_PAREN.accepts(element)) { offsetMap.addOffset(LPAREN_OFFSET, element.getTextRange().getStartOffset()); PsiElement list = element.getParent(); PsiElement last = list.getLastChild(); if (last instanceof PsiJavaToken && ((PsiJavaToken) last).getTokenType() == JavaTokenType.RPARENTH) { offsetMap.addOffset(RPAREN_OFFSET, last.getTextRange().getStartOffset()); } offsetMap.addOffset(ARG_LIST_END_OFFSET, list.getTextRange().getEndOffset()); } }
private int getNamesMaxLength() { int len = 0; for (ParameterTableModelItemBase<ParameterInfoImpl> item : myParametersTableModel.getItems()) { final String text = item.parameter.getName(); len = Math.max(len, text == null ? 0 : text.length()); } return len; }
private int getTypesMaxLength() { int len = 0; for (ParameterTableModelItemBase<ParameterInfoImpl> item : myParametersTableModel.getItems()) { final String text = item.typeCodeFragment == null ? null : item.typeCodeFragment.getText(); len = Math.max(len, text == null ? 0 : text.length()); } return len; }
public static void copy( @NotNull InputStream inputStream, int maxSize, @NotNull OutputStream outputStream) throws IOException { final byte[] buffer = BUFFER.get(); int toRead = maxSize; while (toRead > 0) { int read = inputStream.read(buffer, 0, Math.min(buffer.length, toRead)); if (read < 0) break; toRead -= read; outputStream.write(buffer, 0, read); } }
@NotNull public static byte[] loadFirst(@NotNull InputStream stream, int maxLength) throws IOException { ByteArrayOutputStream buffer = new ByteArrayOutputStream(); final byte[] bytes = BUFFER.get(); while (maxLength > 0) { int n = stream.read(bytes, 0, Math.min(maxLength, bytes.length)); if (n <= 0) break; buffer.write(bytes, 0, n); maxLength -= n; } buffer.close(); return buffer.toByteArray(); }
private void performRemove() { final int selectedRow = myInjectionsTable.getSelectedRow(); if (selectedRow < 0) return; final List<InjInfo> selected = getSelectedInjections(); for (InjInfo info : selected) { if (info.bundled) continue; info.cfgInfo.injectionInfos.remove(info); } myInjectionsTable.getListTableModel().setItems(getInjInfoList(myInfos)); final int index = Math.min(myInjectionsTable.getListTableModel().getRowCount() - 1, selectedRow); myInjectionsTable.getSelectionModel().setSelectionInterval(index, index); TableUtil.scrollSelectionToVisible(myInjectionsTable); updateCountLabel(); }
private static String printCommits(List<VcsCommitMetadata> commits) { StringBuilder sb = new StringBuilder(); for (int i = 0; i < Math.min(commits.size(), 100); i++) { GraphCommit<Hash> commit = commits.get(i); sb.append( String.format( "%s -> %s\n", commit.getId().toShortString(), StringUtil.join( commit.getParents(), new Function<Hash, String>() { @Override public String fun(Hash hash) { return hash.toShortString(); } }, ", "))); } return sb.toString(); }
@NotNull @Override public DetailedLogData readFirstBlock( @NotNull VirtualFile root, @NotNull Requirements requirements) throws VcsException { if (!isRepositoryReady(root)) { return LogDataImpl.empty(); } GitRepository repository = ObjectUtils.assertNotNull(myRepositoryManager.getRepositoryForRoot(root)); // need to query more to sort them manually; this doesn't affect performance: it is equal for // -1000 and -2000 int commitCount = requirements.getCommitCount() * 2; String[] params = new String[] {"HEAD", "--branches", "--remotes", "--max-count=" + commitCount}; // NB: not specifying --tags, because it introduces great slowdown if there are many tags, // but makes sense only if there are heads without branch or HEAD labels (rare case). Such cases // are partially handled below. boolean refresh = requirements instanceof VcsLogProviderRequirementsEx && ((VcsLogProviderRequirementsEx) requirements).isRefresh(); DetailedLogData data = GitHistoryUtils.loadMetadata(myProject, root, true, params); Set<VcsRef> safeRefs = data.getRefs(); Set<VcsRef> allRefs = new OpenTHashSet<VcsRef>(safeRefs, DONT_CONSIDER_SHA); Set<VcsRef> branches = readBranches(repository); addNewElements(allRefs, branches); Collection<VcsCommitMetadata> allDetails; Set<String> currentTagNames = null; DetailedLogData commitsFromTags = null; if (!refresh) { allDetails = data.getCommits(); } else { // on refresh: get new tags, which point to commits not from the first block; then get // history, walking down just from these tags // on init: just ignore such tagged-only branches. The price for speed-up. VcsLogProviderRequirementsEx rex = (VcsLogProviderRequirementsEx) requirements; currentTagNames = readCurrentTagNames(root); addOldStillExistingTags(allRefs, currentTagNames, rex.getPreviousRefs()); allDetails = newHashSet(data.getCommits()); Set<String> previousTags = newHashSet(ContainerUtil.mapNotNull(rex.getPreviousRefs(), GET_TAG_NAME)); Set<String> safeTags = newHashSet(ContainerUtil.mapNotNull(safeRefs, GET_TAG_NAME)); Set<String> newUnmatchedTags = remove(currentTagNames, previousTags, safeTags); if (!newUnmatchedTags.isEmpty()) { commitsFromTags = loadSomeCommitsOnTaggedBranches(root, commitCount, newUnmatchedTags); addNewElements(allDetails, commitsFromTags.getCommits()); addNewElements(allRefs, commitsFromTags.getRefs()); } } StopWatch sw = StopWatch.start("sorting commits in " + root.getName()); List<VcsCommitMetadata> sortedCommits = VcsLogSorter.sortByDateTopoOrder(allDetails); sortedCommits = sortedCommits.subList(0, Math.min(sortedCommits.size(), requirements.getCommitCount())); sw.report(); if (LOG.isDebugEnabled()) { validateDataAndReportError( root, allRefs, sortedCommits, data, branches, currentTagNames, commitsFromTags); } return new LogDataImpl(allRefs, sortedCommits); }