private void refreshTable() { IDevice[] devices = myDetectedDevicesRef.get(); myDisplayedDevices = devices; final IDevice[] selectedDevices = getSelectedDevices(); final TIntArrayList selectedRows = new TIntArrayList(); for (int i = 0; i < devices.length; i++) { if (ArrayUtil.indexOf(selectedDevices, devices[i]) >= 0) { selectedRows.add(i); } } myProcessSelectionFlag = false; myDeviceTable.setModel(new MyDeviceTableModel(devices)); if (selectedRows.size() == 0 && devices.length > 0) { myDeviceTable.getSelectionModel().setSelectionInterval(0, 0); } for (int selectedRow : selectedRows.toNativeArray()) { if (selectedRow < devices.length) { myDeviceTable.getSelectionModel().addSelectionInterval(selectedRow, selectedRow); } } fireSelectedDevicesChanged(); myProcessSelectionFlag = true; updatePreviouslySelectedSerials(); }
public SelectionData selectTests(String methodName, Edge[] dangerousEdges) throws MethodNotFoundException { TestHistory th = thHandler.getTestHistory(methodName); int maxTestID = th.getHighestTestID(); TIntArrayList testList = new TIntArrayList(); for (int i = 0; i < dangerousEdges.length; i++) { // int predBlockID = dangerousEdges[i].getPredNodeID(); int succBlockID = dangerousEdges[i].getSuccNodeID(); for (int j = 0; j < maxTestID; j++) { if ( /*th.query(predBlockID, j) &&*/ th.query(succBlockID, j)) { testList.add(j + 1); } } } SelectionData sd = new SelectionData(); if (testList.size() > 0) { sd.tests = testList.toNativeArray(); } else { sd.tests = new int[0]; } return sd; }
/** Helper method to represent an Attribute as a single-element vector. */ private int[] toValueIndexArray(Attribute attribute) { TIntArrayList out = new TIntArrayList(); for (String value : values) { int valueIndex = attribute.indexOfValue(value); if (valueIndex < 0) throw new NoSuchElementException( "no such value: '" + value + "' in attribute '" + attribute.toString() + "'"); out.add(valueIndex); } return out.toNativeArray(); }
public void removePaths(VirtualFile... paths) { final Set<VirtualFile> pathsSet = new java.util.HashSet<VirtualFile>(Arrays.asList(paths)); int size = getRowCount(); final TIntArrayList indicesToRemove = new TIntArrayList(paths.length); for (int idx = 0; idx < size; idx++) { VirtualFile path = getValueAt(idx); if (pathsSet.contains(path)) { indicesToRemove.add(idx); } } final List list = ListUtil.removeIndices(myList, indicesToRemove.toNativeArray()); itemsRemoved(list); }
@NotNull @Override protected List<HighlightInfo> doHighlighting() { PsiDocumentManager.getInstance(myProject).commitAllDocuments(); TIntArrayList toIgnore = new TIntArrayList(); toIgnore.add(Pass.EXTERNAL_TOOLS); toIgnore.add(Pass.LOCAL_INSPECTIONS); toIgnore.add(Pass.WHOLE_FILE_LOCAL_INSPECTIONS); toIgnore.add(Pass.POPUP_HINTS); return CodeInsightTestFixtureImpl.instantiateAndRun( getFile(), getEditor(), toIgnore.toNativeArray(), false); }
@NotNull private static LineSet createLineSet(@NotNull CharSequence text, boolean markModified) { TIntArrayList starts = new TIntArrayList(); TByteArrayList flags = new TByteArrayList(); LineTokenizer lineTokenizer = new LineTokenizer(text); while (!lineTokenizer.atEnd()) { starts.add(lineTokenizer.getOffset()); flags.add( (byte) (lineTokenizer.getLineSeparatorLength() | (markModified ? MODIFIED_MASK : 0))); lineTokenizer.advance(); } return new LineSet(starts.toNativeArray(), flags.toNativeArray(), text.length()); }
public void forceInitFromModel() { Set<ClasspathTableItem<?>> oldSelection = new HashSet<ClasspathTableItem<?>>(); for (int i : myEntryTable.getSelectedRows()) { ContainerUtil.addIfNotNull(getItemAt(i), oldSelection); } myModel.clear(); myModel.init(); myModel.fireTableDataChanged(); TIntArrayList newSelection = new TIntArrayList(); for (int i = 0; i < myModel.getRowCount(); i++) { if (oldSelection.contains(getItemAt(i))) { newSelection.add(i); } } TableUtil.selectRows(myEntryTable, newSelection.toNativeArray()); }
@Override public boolean setInputFormat(Instances instanceInfo) throws Exception { TIntArrayList matchingIndices = new TIntArrayList(); for (int i = 0; i < instanceInfo.numAttributes(); i++) { if (matchClass && i == instanceInfo.classIndex()) { matchingIndices.add(i); continue; } if (prefix != null && instanceInfo.attribute(i).name().startsWith(prefix)) { matchingIndices.add(i); continue; } } super.setAttributeIndicesArray(matchingIndices.toNativeArray()); return super.setInputFormat(instanceInfo); }
@Override public void addItems(List<ClasspathTableItem<?>> toAdd) { for (ClasspathTableItem<?> item : toAdd) { myModel.addRow(item); } TIntArrayList toSelect = new TIntArrayList(); for (int i = myModel.getRowCount() - toAdd.size(); i < myModel.getRowCount(); i++) { toSelect.add(myEntryTable.convertRowIndexToView(i)); } TableUtil.selectRows(myEntryTable, toSelect.toNativeArray()); TableUtil.scrollSelectionToVisible(myEntryTable); final StructureConfigurableContext context = ModuleStructureConfigurable.getInstance(myState.getProject()).getContext(); context .getDaemonAnalyzer() .queueUpdate(new ModuleProjectStructureElement(context, getRootModel().getModule())); }
private List index(int numEvents, EventStream es, TObjectIntHashMap predicateIndex) { TObjectIntHashMap omap = new TObjectIntHashMap(); int outcomeCount = 0; List eventsToCompare = new ArrayList(numEvents); TIntArrayList indexedContext = new TIntArrayList(); while (es.hasNext()) { Event ev = es.nextEvent(); String[] econtext = ev.getContext(); ComparableEvent ce; int ocID; String oc = ev.getOutcome(); if (omap.containsKey(oc)) { ocID = omap.get(oc); } else { ocID = outcomeCount++; omap.put(oc, ocID); } for (int i = 0; i < econtext.length; i++) { String pred = econtext[i]; if (predicateIndex.containsKey(pred)) { indexedContext.add(predicateIndex.get(pred)); } } // drop events with no active features if (indexedContext.size() > 0) { ce = new ComparableEvent(ocID, indexedContext.toNativeArray()); eventsToCompare.add(ce); } else { System.err.println( "Dropped event " + ev.getOutcome() + ":" + Arrays.asList(ev.getContext())); } // recycle the TIntArrayList indexedContext.resetQuick(); } outcomeLabels = toIndexedStringArray(omap); predLabels = toIndexedStringArray(predicateIndex); return eventsToCompare; }
private void queueUnresolvedFilesSinceLastRestart() { PersistentFS fs = PersistentFS.getInstance(); int maxId = FSRecords.getMaxId(); TIntArrayList list = new TIntArrayList(); for (int id = fileIsResolved.nextClearBit(1); id >= 0 && id < maxId; id = fileIsResolved.nextClearBit(id + 1)) { int nextSetBit = fileIsResolved.nextSetBit(id); int endOfRun = Math.min(maxId, nextSetBit == -1 ? maxId : nextSetBit); do { VirtualFile virtualFile = fs.findFileById(id); if (queueIfNeeded(virtualFile, myProject)) { list.add(id); } else { fileIsResolved.set(id); } } while (++id < endOfRun); } log("Initially added to resolve " + toVfString(list.toNativeArray())); }
@NotNull private FSRecords.NameId[] persistAllChildren( @NotNull final VirtualFile file, final int id, @NotNull FSRecords.NameId[] current) { assert file != mySuperRoot; final NewVirtualFileSystem fs = replaceWithNativeFS(getDelegate(file)); String[] delegateNames = VfsUtil.filterNames(fs.list(file)); if (delegateNames.length == 0 && current.length > 0) { return current; } Set<String> toAdd = ContainerUtil.newHashSet(delegateNames); for (FSRecords.NameId nameId : current) { toAdd.remove(nameId.name); } final TIntArrayList childrenIds = new TIntArrayList(current.length + toAdd.size()); final List<FSRecords.NameId> nameIds = ContainerUtil.newArrayListWithExpectedSize(current.length + toAdd.size()); for (FSRecords.NameId nameId : current) { childrenIds.add(nameId.id); nameIds.add(nameId); } for (String newName : toAdd) { FakeVirtualFile child = new FakeVirtualFile(file, newName); FileAttributes attributes = fs.getAttributes(child); if (attributes != null) { int childId = createAndFillRecord(fs, child, id, attributes); childrenIds.add(childId); nameIds.add(new FSRecords.NameId(childId, FileNameCache.storeName(newName), newName)); } } FSRecords.updateList(id, childrenIds.toNativeArray()); setChildrenCached(id); return nameIds.toArray(new FSRecords.NameId[nameIds.size()]); }
@NotNull protected List<HighlightInfo> doHighlighting() { PsiDocumentManager.getInstance(myProject).commitAllDocuments(); TIntArrayList toIgnore = new TIntArrayList(); if (!doTestLineMarkers()) { toIgnore.add(Pass.UPDATE_OVERRIDEN_MARKERS); toIgnore.add(Pass.VISIBLE_LINE_MARKERS); toIgnore.add(Pass.LINE_MARKERS); } if (!doExternalValidation()) { toIgnore.add(Pass.EXTERNAL_TOOLS); } if (forceExternalValidation()) { toIgnore.add(Pass.LINE_MARKERS); toIgnore.add(Pass.LOCAL_INSPECTIONS); toIgnore.add(Pass.WHOLE_FILE_LOCAL_INSPECTIONS); toIgnore.add(Pass.POPUP_HINTS); toIgnore.add(Pass.UPDATE_ALL); toIgnore.add(Pass.UPDATE_OVERRIDEN_MARKERS); toIgnore.add(Pass.VISIBLE_LINE_MARKERS); } boolean canChange = canChangeDocumentDuringHighlighting(); List<HighlightInfo> infos = CodeInsightTestFixtureImpl.instantiateAndRun( getFile(), getEditor(), toIgnore.toNativeArray(), canChange); if (!canChange) { Document document = getDocument(getFile()); DaemonCodeAnalyzerEx daemonCodeAnalyzer = DaemonCodeAnalyzerEx.getInstanceEx(myProject); daemonCodeAnalyzer.getFileStatusMap().assertAllDirtyScopesAreNull(document); } return infos; }
/** * Returns a 2D array containing the document ids, the term frequencies, the field scores the * block frequencies and the block ids for the given documents. * * @return int[][] the five dimensional [5][] array containing the document ids, frequencies, * field scores and block frequencies, while the last vector contains the block identifiers * and it has a different length from the document identifiers. * @param pointer start byte and bit offset of the postings in the inverted file, together with * number of postings to expect */ public int[][] getDocuments(BitIndexPointer pointer) { final long startOffset = pointer.getOffset(); final byte startBitOffset = pointer.getOffsetBits(); final int df = pointer.getNumberOfEntries(); final boolean loadTagInformation = fieldCount > 0; final int[][] documentTerms = new int[4 + fieldCount][]; for (int i = 0; i < fieldCount + 3; i++) documentTerms[i] = new int[df]; final TIntArrayList blockids = new TIntArrayList(df); // ideally we'd have TF here try { final BitIn file = this.file[pointer.getFileNumber()].readReset(startOffset, startBitOffset); if (loadTagInformation) { // if there are tag information to process // documentTerms[2] = new int[df]; documentTerms[0][0] = file.readGamma() - 1; documentTerms[1][0] = file.readUnary(); for (int fi = 0; fi < fieldCount; fi++) documentTerms[2 + fi][0] = file.readUnary() - 1; int blockfreq = documentTerms[2 + fieldCount][0] = file.readUnary() - DocumentBlockCountDelta; int tmpBlocks[] = new int[blockfreq]; int previousBlockId = -1; for (int j = 0; j < blockfreq; j++) { tmpBlocks[j] = previousBlockId = file.readGamma() + previousBlockId; } blockids.add(tmpBlocks); for (int i = 1; i < df; i++) { documentTerms[0][i] = file.readGamma() + documentTerms[0][i - 1]; documentTerms[1][i] = file.readUnary(); for (int fi = 0; fi < fieldCount; fi++) documentTerms[2 + fi][0] = file.readUnary() - 1; blockfreq = documentTerms[2 + fieldCount][i] = file.readUnary() - DocumentBlockCountDelta; tmpBlocks = new int[blockfreq]; previousBlockId = -1; for (int j = 0; j < blockfreq; j++) { tmpBlocks[j] = previousBlockId = file.readGamma() + previousBlockId; } blockids.add(tmpBlocks); } } else { // no tag information to process documentTerms[0][0] = file.readGamma() - 1; documentTerms[1][0] = file.readUnary(); int blockfreq = documentTerms[2][0] = file.readUnary() - DocumentBlockCountDelta; int tmpBlocks[] = new int[blockfreq]; int previousBlockId = -1; for (int j = 0; j < blockfreq; j++) { tmpBlocks[j] = previousBlockId = file.readGamma() + previousBlockId; } blockids.add(tmpBlocks); for (int i = 1; i < df; i++) { documentTerms[0][i] = file.readGamma() + documentTerms[0][i - 1]; documentTerms[1][i] = file.readUnary(); blockfreq = documentTerms[2][i] = file.readUnary() - DocumentBlockCountDelta; tmpBlocks = new int[blockfreq]; previousBlockId = -1; for (int j = 0; j < blockfreq; j++) { tmpBlocks[j] = previousBlockId = file.readGamma() + previousBlockId; } blockids.add(tmpBlocks); } } documentTerms[documentTerms.length - 1] = blockids.toNativeArray(); return documentTerms; } catch (IOException ioe) { logger.error("Problem reading block inverted index", ioe); return null; } }