public void setCustomItemBlock(int id, Integer blockId, Short metaData) { if (blockId != null || metaData == null) { itemBlock.put(id, blockId); itemMetaData.put(id, metaData); } else { itemBlock.remove(id); itemMetaData.remove(id); } updateCustomClientData(id); }
/** * Tries to calculate given line's indent column assuming that there might be a comment at the * given indent offset (see {@link #getCommentPrefix(IElementType)}). * * @param line target line * @param indentOffset start indent offset to use for the given line * @param lineEndOffset given line's end offset * @param fallbackColumn column to return if it's not possible to apply comment-specific indent * calculation rules * @return given line's indent column to use */ private int calcIndent(int line, int indentOffset, int lineEndOffset, int fallbackColumn) { final HighlighterIterator it = myEditor.getHighlighter().createIterator(indentOffset); IElementType tokenType = it.getTokenType(); Language language = tokenType.getLanguage(); TokenSet comments = myComments.get(language); if (comments == null) { ParserDefinition definition = LanguageParserDefinitions.INSTANCE.forLanguage(language); if (definition != null) { comments = definition.getCommentTokens(); } if (comments == null) { return fallbackColumn; } else { myComments.put(language, comments); } } if (comments.contains(tokenType) && indentOffset == it.getStart()) { String prefix = COMMENT_PREFIXES.get(tokenType); if (prefix == null) { prefix = getCommentPrefix(tokenType); } if (!NO_COMMENT_INFO_MARKER.equals(prefix)) { final int indentInsideCommentOffset = CharArrayUtil.shiftForward( myChars, indentOffset + prefix.length(), lineEndOffset, " \t"); if (indentInsideCommentOffset < lineEndOffset) { int indent = myEditor.calcColumnNumber(indentInsideCommentOffset, line); indentAfterUncomment.put(line, indent - prefix.length()); return indent; } } } return fallbackColumn; }
@Override public void setLightLevel(int id, short data, int level) { if (!originalLight.containsKey(id)) { originalLight.put(id, getLightLevel(id, data)); } net.minecraft.server.Block.s[id] = level; }
@Override public void setOpaque(int id, short data, boolean opacity) { if (!originalOpacity.containsKey(id)) { originalOpacity.put(id, isOpaque(id, data) ? 1 : 0); } net.minecraft.server.Block.o[id] = opacity; }
@NotNull private static TIntIntHashMap getCommitsMap(@NotNull Iterable<Integer> hashes) { TIntIntHashMap commits = new TIntIntHashMap(); int row = 0; for (Integer commitId : hashes) { commits.put(commitId, row); row++; } return commits; }
private static TIntIntHashMap getCoverageVersionToCurrentLineMapping( Diff.Change change, int firstNLines) { TIntIntHashMap result = new TIntIntHashMap(); int prevLineInFirst = 0; int prevLineInSecond = 0; while (change != null) { for (int l = 0; l < change.line0 - prevLineInFirst; l++) { result.put(prevLineInFirst + l, prevLineInSecond + l); } prevLineInFirst = change.line0 + change.deleted; prevLineInSecond = change.line1 + change.inserted; change = change.link; } for (int i = prevLineInFirst; i < firstNLines; i++) { result.put(i, prevLineInSecond + i - prevLineInFirst); } return result; }
public UNVDomain(UNVParser parser, int id, Color color) { this.id = id; this.color = color; TIntHashSet nodeset = new TIntHashSet(); tria3 = parser.getTria3FromGroup(id); if (tria3.length > 0) { nodeset.ensureCapacity(tria3.length); nodeset.addAll(tria3); } quad4 = parser.getQuad4FromGroup(id); if (quad4.length > 0) { nodeset.ensureCapacity(nodeset.size() + quad4.length); nodeset.addAll(quad4); } beam2 = parser.getBeam2FromGroup(id); if (beam2.length > 0) { nodeset.ensureCapacity(nodeset.size() + beam2.length); nodeset.addAll(beam2); } tria6 = parser.getTria6FromGroup(id); if (tria6.length > 0) { nodeset.ensureCapacity(nodeset.size() + tria6.length); nodeset.addAll(tria6); } int[] nodesID = nodeset.toArray(); nodes = readNodes(nodesID, parser.getNodesCoordinates()); // Compute inverse relation TIntIntHashMap map = new TIntIntHashMap(nodesID.length); for (int i = 0; i < nodesID.length; i++) map.put(nodesID[i], i); for (int i = 0; i < tria3.length; i++) tria3[i] = map.get(tria3[i]); for (int i = 0; i < quad4.length; i++) quad4[i] = map.get(quad4[i]); for (int i = 0; i < beam2.length; i++) beam2[i] = map.get(beam2[i]); for (int i = 0; i < tria6.length; i++) tria6[i] = map.get(tria6[i]); }
public ResultSet doMatch( String queryNumber, MatchingQueryTerms queryTerms, final FatResultSet fat) throws IOException { final int numResults = fat.getResultSize(); final FeaturedQueryResultSet rtr = new FeaturedQueryResultSet(fat); if (fat.getResultSize() == 0) { rtr.scores = new double[0]; rtr.docids = new int[0]; rtr.occurrences = new short[0]; return rtr; } if (sampleFeature) rtr.putFeatureScores("SAMPLE", fat.getScores()); // for each WMODEL feature for (int fid = 0; fid < wModels.length; fid++) { final ResultSet thinChild = wModels[fid].doMatch(queryNumber, queryTerms, fat); rtr.putFeatureScores(wModelNames[fid], thinChild.getScores()); } // for each QI features if (qiFeatures.length > 0) { WritablePosting[][] postings = fat.getPostings(); int[] docids = fat.getDocids(); for (int fid = 0; fid < qiFeatures.length; fid++) { WeightingModel wm = qiFeatures[fid]; double[] scores = new double[numResults]; for (int di = 0; di < numResults; di++) { WritablePosting p = FatUtils.firstPosting(postings[di]); if (p == null) { p = new BlockFieldPostingImpl(docids[di], 0, new int[0], new int[4]); // hack ((FieldPosting) p).setFieldLengths(new int[4]); } scores[di] = wm.score(p); } rtr.putFeatureScores(qiFeatureNames[fid], scores); } } // for each DSM feature if (dsms.length > 0) { TIntIntHashMap docidMap = new TIntIntHashMap(numResults); int position = 0; for (int docid : fat.getDocids()) { docidMap.put(docid, position++); } final Index fatIndex = FatUtils.makeIndex(fat); for (int fid = 0; fid < dsms.length; fid++) { final double[] scores = new double[numResults]; final int[] docids = new int[numResults]; final short[] occurrences = new short[numResults]; System.arraycopy(fat.getDocids(), 0, docids, 0, numResults); System.arraycopy(fat.getOccurrences(), 0, occurrences, 0, numResults); // Sort by docid so that term postings we have a recoverable score ordering MultiSort.ascendingHeapSort(docids, scores, occurrences, docids.length); final ResultSet thinChild = new QueryResultSet(docids, scores, occurrences); final MatchingQueryTerms mqtLocal = new MatchingQueryTerms(queryNumber); mqtLocal.setDefaultTermWeightingModel(queryTerms.defaultWeightingModel); int ti = 0; for (String t : fat.getQueryTerms()) { mqtLocal.setTermProperty(t, fat.getKeyFrequencies()[ti]); mqtLocal.setTermProperty(t, fat.getEntryStatistics()[ti]); ti++; } // apply the dsm on the temporary resultset dsms[fid].modifyScores(fatIndex, mqtLocal, thinChild); // map scores back into original ordering double[] scoresFinal = new double[numResults]; for (int i = 0; i < numResults; i++) { scoresFinal[docidMap.get(docids[i])] = scores[i]; } // add the feature, regardless of whether it has scores or not rtr.putFeatureScores(dsmNames[fid], scoresFinal); } } final String[] labels = new String[rtr.getResultSize()]; Arrays.fill(labels, "-1"); rtr.setLabels(labels); if (fat.hasMetaItems("docno")) { rtr.addMetaItems("docno", fat.getMetaItems("docno")); } if (fat.hasMetaItems("label")) rtr.setLabels(fat.getMetaItems("label")); return rtr; }
/** * @param padding end offset that will be set after drawing current text fragment * @param align alignment of the current text fragment, if it is SwingConstants.RIGHT or * SwingConstants.TRAILING then the text fragment will be aligned to the right at the padding, * otherwise it will be aligned to the left */ public synchronized void appendTextPadding( int padding, @JdkConstants.HorizontalAlignment int align) { final int alignIndex = myFragments.size() - 1; myFragmentPadding.put(alignIndex, padding); myFragmentAlignment.put(alignIndex, align); }