public boolean equals(final Object other) { if (!(other instanceof TIntObjectHashMap)) { return false; } final TIntObjectHashMap that = (TIntObjectHashMap) other; return that.size() == this.size() && this.forEachEntry(new EqProcedure(that)); }
public RefsModel( @NotNull Map<VirtualFile, CompressedRefs> refs, @NotNull Set<Integer> heads, @NotNull VcsLogStorage hashMap, @NotNull Map<VirtualFile, VcsLogProvider> providers) { myRefs = refs; myHashMap = hashMap; myBestRefForHead = new TIntObjectHashMap<>(); myRootForHead = new TIntObjectHashMap<>(); for (int head : heads) { CommitId commitId = myHashMap.getCommitId(head); if (commitId != null) { VirtualFile root = commitId.getRoot(); myRootForHead.put(head, root); Optional<VcsRef> bestRef = myRefs .get(root) .refsToCommit(head) .stream() .min(providers.get(root).getReferenceManager().getBranchLayoutComparator()); if (bestRef.isPresent()) { myBestRefForHead.put(head, bestRef.get()); } else { LOG.warn("No references at head " + commitId); } } } }
@Override protected synchronized void onError() { super.onError(); for (int each : myDataMap.keys()) { myDataMap.get(each).remoteId = -1; } }
@Override public synchronized int registerTextEditorHighlightingPass( @NotNull TextEditorHighlightingPassFactory factory, @Nullable int[] runAfterCompletionOf, @Nullable int[] runAfterOfStartingOf, boolean runIntentionsPassAfter, int forcedPassId) { assert !checkedForCycles; PassConfig info = new PassConfig( factory, runAfterCompletionOf == null || runAfterCompletionOf.length == 0 ? ArrayUtil.EMPTY_INT_ARRAY : runAfterCompletionOf, runAfterOfStartingOf == null || runAfterOfStartingOf.length == 0 ? ArrayUtil.EMPTY_INT_ARRAY : runAfterOfStartingOf); int passId = forcedPassId == -1 ? nextAvailableId++ : forcedPassId; PassConfig registered = myRegisteredPassFactories.get(passId); assert registered == null : "Pass id " + passId + " has already been registered in: " + registered.passFactory; myRegisteredPassFactories.put(passId, info); if (factory instanceof DirtyScopeTrackingHighlightingPassFactory) { myDirtyScopeTrackingFactories.add((DirtyScopeTrackingHighlightingPassFactory) factory); } return passId; }
public void merge(final Binding b, final boolean removeObject) { for (final PsiTypeVariable var : b.getBoundVariables()) { final int index = var.getIndex(); if (myBindings.get(index) != null) { LOG.error("Oops... Binding conflict..."); } else { final PsiType type = b.apply(var); final PsiClassType javaLangObject = PsiType.getJavaLangObject( PsiManager.getInstance(myProject), GlobalSearchScope.allScope(myProject)); if (removeObject && javaLangObject.equals(type)) { final HashSet<PsiTypeVariable> cluster = myFactory.getClusterOf(var.getIndex()); if (cluster != null) { for (final PsiTypeVariable war : cluster) { final PsiType wtype = b.apply(war); if (!javaLangObject.equals(wtype)) { myBindings.put(index, type); break; } } } } else { myBindings.put(index, type); } } } }
public TIntObjectHashMap<TIntHashSet> resolve( DirectedGraph<Integer, RDFEdge> graph, Set<Set<Integer>> literalSubjectPairs, Document document, KnowledgeBase kb) { TIntObjectHashMap<TIntHashSet> resolvedSubjects = new TIntObjectHashMap<TIntHashSet>(); TIntHashSet ham = new TIntHashSet(); TIntHashSet spam = new TIntHashSet(); resolvedSubjects.put(0, spam); resolvedSubjects.put(1, ham); for (Set<Integer> c : literalSubjectPairs) { TIntHashSet subjects = getAmbiguousURIRefs(c); if (subjects.size() > 1) { ham.add(subjects.toArray()[new Random().nextInt(subjects.size())]); if (resolvedSubjects.size() < subjects.size()) { for (int s : subjects.toArray()) { if (!ham.contains(s)) { spam.add(s); } } } } } return resolvedSubjects; }
@NotNull public TIntObjectHashMap<T> preLoadCommitData(@NotNull TIntHashSet commits) throws VcsException { TIntObjectHashMap<T> result = new TIntObjectHashMap<>(); final MultiMap<VirtualFile, String> rootsAndHashes = MultiMap.create(); commits.forEach( commit -> { CommitId commitId = myHashMap.getCommitId(commit); if (commitId != null) { rootsAndHashes.putValue(commitId.getRoot(), commitId.getHash().asString()); } return true; }); for (Map.Entry<VirtualFile, Collection<String>> entry : rootsAndHashes.entrySet()) { VcsLogProvider logProvider = myLogProviders.get(entry.getKey()); if (logProvider != null) { List<? extends T> details = readDetails(logProvider, entry.getKey(), ContainerUtil.newArrayList(entry.getValue())); for (T data : details) { int index = myHashMap.getCommitIndex(data.getId(), data.getRoot()); result.put(index, data); } saveInCache(result); } else { LOG.error( "No log provider for root " + entry.getKey().getPath() + ". All known log providers " + myLogProviders); } } return result; }
private void setDirtyScope(int passId, RangeMarker scope) { RangeMarker marker = dirtyScopes.get(passId); if (marker != scope) { if (marker != null) { marker.dispose(); } dirtyScopes.put(passId, scope); } }
public final ItemAuction[] getAuctions() { final ItemAuction[] auctions; synchronized (_auctions) { auctions = _auctions.getValues(new ItemAuction[_auctions.size()]); } return auctions; }
public void testP2OMap() { // Long-long TLongObjectHashMap<Long> lomap = new TLongObjectHashMap<Long>(); assertTrue(serializesCorrectly(lomap, "p2o-l-1")); lomap.put(0, Long.valueOf(1)); assertTrue(serializesCorrectly(lomap, "p2o-l-2")); lomap.put(Long.MIN_VALUE, Long.valueOf(Long.MIN_VALUE)); assertTrue(serializesCorrectly(lomap, "p2o-l-3")); lomap.put(Long.MAX_VALUE, Long.valueOf(Long.MAX_VALUE)); assertTrue(serializesCorrectly(lomap, "p2o-l-4")); // Int-int TIntObjectHashMap<Integer> iomap = new TIntObjectHashMap<Integer>(); assertTrue(serializesCorrectly(iomap, "p2o-i-1")); iomap.put(0, Integer.valueOf(1)); assertTrue(serializesCorrectly(iomap, "p2o-i-2")); iomap.put(Integer.MIN_VALUE, Integer.valueOf(Integer.MIN_VALUE)); assertTrue(serializesCorrectly(iomap, "p2o-i-3")); iomap.put(Integer.MAX_VALUE, Integer.valueOf(Integer.MAX_VALUE)); assertTrue(serializesCorrectly(iomap, "p2o-i-4")); // Double-double TDoubleObjectHashMap<Double> domap = new TDoubleObjectHashMap<Double>(); assertTrue(serializesCorrectly(domap, "p2o-d-1")); domap.put(0, Double.valueOf(1)); assertTrue(serializesCorrectly(domap, "p2o-d-2")); domap.put(Double.MIN_VALUE, Double.valueOf(Double.MIN_VALUE)); assertTrue(serializesCorrectly(domap, "p2o-d-3")); domap.put(Double.MAX_VALUE, Double.valueOf(Double.MAX_VALUE)); assertTrue(serializesCorrectly(domap, "p2o-d-4")); domap.put(Double.POSITIVE_INFINITY, Double.valueOf(Double.POSITIVE_INFINITY)); assertTrue(serializesCorrectly(domap, "p2o-d-5")); domap.put(Double.NEGATIVE_INFINITY, Double.valueOf(Double.NEGATIVE_INFINITY)); assertTrue(serializesCorrectly(domap, "p2o-d-6")); // NOTE: trove doesn't deal well with NaN // ddmap.put( Double.NaN, Double.NaN ); // assertTrue( serializesCorrectly( ddmap ) ); // Float-float TFloatObjectHashMap<Float> fomap = new TFloatObjectHashMap<Float>(); assertTrue(serializesCorrectly(fomap, "p2o-f-1")); fomap.put(0, Float.valueOf(1)); assertTrue(serializesCorrectly(fomap, "p2o-f-2")); fomap.put(Float.MIN_VALUE, Float.valueOf(Float.MIN_VALUE)); assertTrue(serializesCorrectly(fomap, "p2o-f-3")); fomap.put(Float.MAX_VALUE, Float.valueOf(Float.MAX_VALUE)); assertTrue(serializesCorrectly(fomap, "p2o-f-4")); fomap.put(Float.POSITIVE_INFINITY, Float.valueOf(Float.POSITIVE_INFINITY)); assertTrue(serializesCorrectly(fomap, "p2o-f-5")); fomap.put(Float.NEGATIVE_INFINITY, Float.valueOf(Float.NEGATIVE_INFINITY)); assertTrue(serializesCorrectly(fomap, "p2o-f-6")); // NOTE: trove doesn't deal well with NaN // ffmap.put( Float.NaN, Float.NaN ); // assertTrue( serializesCorrectly( ffmap ) ); }
private void addToMap(int id, int index) { id = unwrap(myFactory.getValue(id)).getID(); int[] classes = myIdToEqClassesIndices.get(id); if (classes == null) { classes = new int[] {index}; myIdToEqClassesIndices.put(id, classes); } else { classes = ArrayUtil.append(classes, index); myIdToEqClassesIndices.put(id, classes); } }
private void removeFromMap(int id, int index) { id = unwrap(myFactory.getValue(id)).getID(); int[] classes = myIdToEqClassesIndices.get(id); if (classes != null) { int i = ArrayUtil.indexOf(classes, index); if (i != -1) { classes = ArrayUtil.remove(classes, i); myIdToEqClassesIndices.put(id, classes); } } }
BindingImpl(final int index, final PsiType type) { myBindings = new TIntObjectHashMap<PsiType>(); myCyclic = type instanceof PsiTypeVariable; myBindings.put(index, type); if (type instanceof Bottom) { final HashSet<PsiTypeVariable> cluster = myFactory.getClusterOf(index); if (cluster != null) { for (PsiTypeVariable var : cluster) { myBindings.put(var.getIndex(), type); } } } }
public int registerCustomItemName(Plugin plugin, String key) { int id = UniqueItemStringMap.getId(key); itemPlugin.put(id, plugin.getDescription().getName()); return id; }
void afterUnmarshal(Unmarshaller u, Object parent) { items = new TIntObjectHashMap<ItemTemplate>(); for (ItemTemplate it : its) { items.put(it.getTemplateId(), it); } its = null; }
/** * Initialize implementation dependent properties of the RTree. Currently implemented properties * are: * * <ul> * <li>MaxNodeEntries This specifies the maximum number of entries in a node. The default value * is 10, which is used if the property is not specified, or is less than 2. * <li>MinNodeEntries This specifies the minimum number of entries in a node. The default value * is half of the MaxNodeEntries value (rounded down), which is used if the property is not * specified or is less than 1. * </ul> * * @see com.infomatiq.jsi.SpatialIndex#init(Properties) */ public void init(Properties props) { maxNodeEntries = Integer.parseInt(props.getProperty("MaxNodeEntries", "0")); minNodeEntries = Integer.parseInt(props.getProperty("MinNodeEntries", "0")); // Obviously a node with less than 2 entries cannot be split. // The node splitting algorithm will work with only 2 entries // per node, but will be inefficient. if (maxNodeEntries < 2) { log.warn( "Invalid MaxNodeEntries = " + maxNodeEntries + " Resetting to default value of " + DEFAULT_MAX_NODE_ENTRIES); maxNodeEntries = DEFAULT_MAX_NODE_ENTRIES; } // The MinNodeEntries must be less than or equal to (int) (MaxNodeEntries / 2) if (minNodeEntries < 1 || minNodeEntries > maxNodeEntries / 2) { log.warn("MinNodeEntries must be between 1 and MaxNodeEntries / 2"); minNodeEntries = maxNodeEntries / 2; } entryStatus = new byte[maxNodeEntries]; initialEntryStatus = new byte[maxNodeEntries]; for (int i = 0; i < maxNodeEntries; i++) { initialEntryStatus[i] = ENTRY_STATUS_UNASSIGNED; } Node root = new Node(rootNodeId, 1, maxNodeEntries); nodeMap.put(rootNodeId, root); log.info( "init() " + " MaxNodeEntries = " + maxNodeEntries + ", MinNodeEntries = " + minNodeEntries); }
/** * Iterate over the map, and ensure that if the same string is in the cache, that the entry refers * to the cached entry, rather than a new instance. * * @param stringMap */ public void mergeStrings(TIntObjectHashMap<String> stringMap) { if (stringMap == null) return; // FIXME: Investigate using String.intern() what's it's overhead if we // do it on all strings. // FIXME: Keep track of which integer keys (they should be 0-255) merge well and which don't, // by tracking successes for (TIntObjectIterator<String> it = stringMap.iterator(); it.hasNext(); ) { it.advance(); int key = it.key(); assert key < 256; // Eliminate poor merging strings // FIXME: should also remove existing by having sep cache per key, which // we can just remove once can see it's a problem. For now, we // just stop after it seems that we've got no dupls if (strFailuresByKey[key] > 5000 && strSuccessesByKey[key] < strFailuresByKey[key]) { continue; // if after 5000 failures, we don't have more successes } String possDupl = it.value(); String cachedString = stringCache.get(possDupl); if (cachedString != null) { it.setValue(cachedString); // If have one cached, use it strSuccessesByKey[key]++; } else { stringCache.put(possDupl, possDupl); strFailuresByKey[key]++; } } }
/** * Merge equal IntegerVariable into a unique one. * * @param k number of unique variables * @param nbIntVars number of integer variable within the model * @param color array of indice of unique variables * @param domainByColor domain of unique variables */ @Override protected void apply( final int k, final int nbIntVars, final int[] color, final TIntObjectHashMap<IntegerVariableMerger> domainByColor) { IntegerVariable vtmp; IntegerVariableMerger dtmp; final IntegerVariable[] var = new IntegerVariable[k + 1]; for (int i = 0; i < nbIntVars; i++) { final int col = color[i]; if (col != -1) { final IntegerVariable v = model.getIntVar(i); if (var[col] == null) { dtmp = domainByColor.get(col); if (dtmp.values != null) { vtmp = new IntegerVariable(StringUtils.randomName(), dtmp.values); } else { vtmp = new IntegerVariable(StringUtils.randomName(), dtmp.low, dtmp.upp); } vtmp.addOptions(dtmp.optionsSet); var[col] = vtmp; add(vtmp); } replaceBy(v, var[col]); delete(v); } } }
void afterUnmarshal(Unmarshaller u, Object parent) { wrappedItemData = new TIntObjectHashMap<WrapperItem>(); for (WrapperItem it : list) { wrappedItemData.put(it.getItemId(), it); } list = null; }
private void executeDelete(@NotNull VirtualFile file) { if (!file.exists()) { LOG.error("Deleting a file, which does not exist: " + file.getPath()); return; } clearIdCache(); int id = getFileId(file); final VirtualFile parent = file.getParent(); final int parentId = parent == null ? 0 : getFileId(parent); if (parentId == 0) { String rootUrl = normalizeRootUrl(file.getPath(), (NewVirtualFileSystem) file.getFileSystem()); myRootsLock.writeLock().lock(); try { myRoots.remove(rootUrl); myRootsById.remove(id); FSRecords.deleteRootRecord(id); } finally { myRootsLock.writeLock().unlock(); } } else { removeIdFromParentList(parentId, id, parent, file); VirtualDirectoryImpl directory = (VirtualDirectoryImpl) file.getParent(); assert directory != null : file; directory.removeChild(file); } FSRecords.deleteRecordRecursively(id); invalidateSubtree(file); }
public void registerTimeoutHandler(int commandId, int timeout, Runnable onTimeout) { synchronized (myLock) { myTimeoutHandlers.put( commandId, new TimeoutHandler(onTimeout, System.currentTimeMillis() + timeout)); } scheduleTimeoutCheck(); }
public Binding reduceRecursive() { final BindingImpl binding = (BindingImpl) create(); for (final PsiTypeVariable var : myBoundVariables) { final int index = var.getIndex(); final PsiType type = myBindings.get(index); if (type != null) { class Verifier extends PsiExtendedTypeVisitor<Void> { boolean myFlag = false; @Override public Void visitTypeVariable(final PsiTypeVariable var) { if (var.getIndex() == index) { myFlag = true; } return null; } } final Verifier verifier = new Verifier(); type.accept(verifier); if (verifier.myFlag) { myBindings.put(index, Bottom.BOTTOM); binding.myBindings.put(index, Bottom.BOTTOM); } else { binding.myBindings.put(index, type); } } else { binding.myBindings.put(index, type); } } for (final PsiTypeVariable var : myBoundVariables) { final int index = var.getIndex(); final PsiType type = myBindings.get(index); if (type != null) { myBindings.put(index, binding.apply(type)); } } return this; }
public TIntIntHashMap rollItems(int wrapperItemId, int playerLevel, ItemRace race) { TIntIntHashMap itemCountMap = new TIntIntHashMap(); final WrapperItem wrapperItem = wrappedItemData.get(wrapperItemId); if (wrapperItem == null) return itemCountMap; return wrapperItem.rollItems(playerLevel, race); }
@Override public String getPath(int pathId) { String path = myAlternativePaths.get(pathId); if (path != null) { return path; } return myWrapee.getPath(pathId); }
public void reset() { if (safeToReuse != null) { safeToReuse.clear(); } objectsCacheOnWrite.clear(); objectsCacheOnRead.clear(); immutableCache.clear(); objectIdOnCache = 0; }
@Nullable private VirtualFileSystemEntry findFileById( int id, boolean cachedOnly, TIntArrayList visited, int mask) { VirtualFileSystemEntry cached = myIdToDirCache.get(id); if (cached != null) return cached; if (visited != null && (visited.size() >= DEPTH_LIMIT || (mask & id) == id && visited.contains(id))) { @NonNls String sb = "Dead loop detected in persistent FS (id=" + id + " cached-only=" + cachedOnly + "):"; for (int i = 0; i < visited.size(); i++) { int _id = visited.get(i); sb += "\n " + _id + " '" + getName(_id) + "' " + String.format("%02x", getFileAttributes(_id)) + ' ' + myIdToDirCache.containsKey(_id); } LOG.error(sb); return null; } int parentId = getParent(id); if (parentId >= id) { if (visited == null) visited = new TIntArrayList(DEPTH_LIMIT); } if (visited != null) visited.add(id); VirtualFileSystemEntry result; if (parentId == 0) { myRootsLock.readLock().lock(); try { result = myRootsById.get(id); } finally { myRootsLock.readLock().unlock(); } } else { VirtualFileSystemEntry parentFile = findFileById(parentId, cachedOnly, visited, mask | id); if (parentFile instanceof VirtualDirectoryImpl) { result = ((VirtualDirectoryImpl) parentFile).findChildById(id, cachedOnly); } else { result = null; } } if (result != null && result.isDirectory()) { VirtualFileSystemEntry old = myIdToDirCache.put(id, result); if (old != null) result = old; } return result; }
public ContextRelevantStaticMethod( final PsiMethod psiMethod, @Nullable final TIntObjectHashMap<PsiVariable> parameters) { this.psiMethod = psiMethod; if (parameters == null) { this.parameters = null; } else { this.parameters = new TIntObjectHashMap<>(parameters.size()); parameters.forEachEntry( new TIntObjectProcedure<PsiVariable>() { @SuppressWarnings("ConstantConditions") @Override public boolean execute(final int pos, final PsiVariable var) { ContextRelevantStaticMethod.this.parameters.put( pos, new VariableSubLookupElement(var)); return true; } }); } }
public void saveInCache(@NotNull TIntObjectHashMap<T> details) { UIUtil.invokeAndWaitIfNeeded( (Runnable) () -> details.forEachEntry( (key, value) -> { myCache.put(key, value); return true; })); }
private void checkForCycles() { final TIntObjectHashMap<TIntHashSet> transitivePredecessors = new TIntObjectHashMap<TIntHashSet>(); myRegisteredPassFactories.forEachEntry( new TIntObjectProcedure<PassConfig>() { @Override public boolean execute(int passId, PassConfig config) { TIntHashSet allPredecessors = new TIntHashSet(config.completionPredecessorIds); allPredecessors.addAll(config.startingPredecessorIds); transitivePredecessors.put(passId, allPredecessors); allPredecessors.forEach( new TIntProcedure() { @Override public boolean execute(int predecessorId) { PassConfig predecessor = myRegisteredPassFactories.get(predecessorId); if (predecessor == null) return true; TIntHashSet transitives = transitivePredecessors.get(predecessorId); if (transitives == null) { transitives = new TIntHashSet(); transitivePredecessors.put(predecessorId, transitives); } transitives.addAll(predecessor.completionPredecessorIds); transitives.addAll(predecessor.startingPredecessorIds); return true; } }); return true; } }); transitivePredecessors.forEachKey( new TIntProcedure() { @Override public boolean execute(int passId) { if (transitivePredecessors.get(passId).contains(passId)) { throw new IllegalArgumentException( "There is a cycle introduced involving pass " + myRegisteredPassFactories.get(passId).passFactory); } return true; } }); }
/** returns the postings suitable to be written into the block direct index */ @Override public int[][] getPostings() { final int termCount = occurrences.size(); final int[] termids = new int[termCount]; final int[] tfs = new int[termCount]; final int[] fields = null; final int[] blockfreqs = new int[termCount]; final TIntObjectHashMap<int[]> term2blockids = new TIntObjectHashMap<int[]>(); int blockTotal = 0; // TODO we already have blockTotal as this.blockCount, so no need to count? class PostingVisitor implements TObjectIntProcedure<String> { int i = 0; int blockTotal = 0; public boolean execute(final String a, final int b) { termids[i] = getTermId(a); tfs[i] = b; final TIntHashSet ids = term_blocks.get(a); blockfreqs[i] = ids.size(); blockTotal += ids.size(); final int[] bids = ids.toArray(); Arrays.sort(bids); term2blockids.put(termids[i], bids); // System.err.println(a+": tid="+termids[i]+" tf="+tfs[i]+" bf="+blockfreqs[i] +" // blocks="+Arrays.toString(bids)); i++; return true; } } PostingVisitor proc = new PostingVisitor(); occurrences.forEachEntry(proc); blockTotal = proc.blockTotal; HeapSortInt.ascendingHeapSort(termids, tfs, blockfreqs); final int[] blockids = new int[blockTotal]; int offset = 0; for (int termid : termids) { final int[] src = term2blockids.get(termid); final int src_l = src.length; System.arraycopy(src, 0, blockids, offset, src_l); offset += src_l; } return new int[][] {termids, tfs, fields, blockfreqs, blockids}; }