protected int defineSymbol(Movie movie, SWFTagTypes timelineWriter, SWFTagTypes definitionWriter) throws IOException { // flush all symbol definitions for (Iterator<Frame> iter = frames.values().iterator(); iter.hasNext(); ) { Frame frame = iter.next(); frame.flushDefinitions(movie, timelineWriter, definitionWriter); } int id = getNextId(movie); SWFTagTypes spriteWriter = definitionWriter.tagDefineSprite(id); int lastFrame = 0; for (Iterator<Frame> iter = frames.values().iterator(); iter.hasNext(); ) { Frame frame = iter.next(); int number = frame.getFrameNumber(); // write any intermediate empty frames while (number > (lastFrame + 1)) { spriteWriter.tagShowFrame(); lastFrame++; } frame.write(movie, definitionWriter, spriteWriter); lastFrame = number; } // end of time line spriteWriter.tagEnd(); return id; }
private List<ChatMessage> sanitizeMap(final SortedMap<Date, ChatMessage> aMap) { if (aMap.isEmpty() || aMap.size() == 1) return Lists.newArrayList(aMap.values()); final LinkedList<ChatMessage> ret = Lists.newLinkedList(aMap.values()); final ListIterator<ChatMessage> i = ret.listIterator(); ChatMessage prevMsg = i.next(); do { ChatMessage msg = i.next(); if (!msg.getPreviousMessageDate().equals(prevMsg.getDate())) { if (msg.getPreviousMessageDate().before(prevMsg.getDate())) { msg.setPreviousMessageDate(prevMsg.getDate()); } else { final ChatMessage tmp = createLostMessageBetween( msg.getRoom(), prevMsg.getDate(), msg.getPreviousMessageDate()); aMap.put(tmp.getDate(), tmp); i.previous(); i.add(tmp); i.next(); msg = tmp; } } prevMsg = msg; } while (i.hasNext()); return ret; }
/** Data in the model has been updated. Refresh all derived data. */ private void calculate() { // Clear all derived data. runStatus = Status.OK; suiteDataMap.clear(); suiteMapByStatus.clear(); for (Status s : Status.values()) { suiteMapByStatus.put(s, new ArrayList<SuiteData>()); } allTests.clear(); testMapByStatus.clear(); for (Status s : Status.values()) { testMapByStatus.put(s, new ArrayList<TestData>()); } /* * Populate the Suite map with all Suites. */ Map<String, SuiteResults> resultsMap = new HashMap<String, SuiteResults>(); for (SuiteResults result : suiteResults) { resultsMap.put(result.getName(), result); } Map<String, SuiteContents> contentsMap = new HashMap<String, SuiteContents>(); for (SuiteContents contents : suiteContents) { contentsMap.put(contents.getName(), contents); } for (SuiteContents contents : suiteContents) { String name = contents.getName(); SuiteResults result = resultsMap.get(name); boolean ignored = ignoredTestList.isIgnored(name); ProcessOutput failureMessages = dataListenerInfo.getFailureMessages().get(name); suiteDataMap.put(name, new SuiteData(name, ignored, contents, result, failureMessages)); } /* * Map the Suites by status. */ for (SuiteData s : suiteDataMap.values()) { getSuites(s.getStatus()).add(s); } /** Populate the Test map with all Tests, and map by status. */ for (SuiteData s : suiteDataMap.values()) { for (TestData t : s.getTestMap().values()) { allTests.add(t); getTests(t.getStatus()).add(t); } } if (logStats.hasErrors() || !getSuites(Status.ERROR).isEmpty()) { runStatus = Status.ERROR; } else if (!getSuites(Status.PENDING).isEmpty()) { runStatus = Status.PENDING; } else { runStatus = Status.OK; } }
public void train() throws MaltChainedException { for (AtomicModel divideModel : divideModels.values()) { divideModel.train(); } masterModel.train(); save(); for (AtomicModel divideModel : divideModels.values()) { divideModel.terminate(); } masterModel.terminate(); }
@Override public List<ITEM> findGreaterThan(KEY key) { key = getKey(key); SortedMap<KEY, ITEM> keyMultiValueSortedMap = this.navigableMap.tailMap(key, false); return new ArrayList<>(keyMultiValueSortedMap.values()); }
protected void clearItems() { for (TableItem i : mItems.values()) { i.mValue.deleteObserver(i); } mModel.setRowCount(0); mItems.clear(); }
private SortedSet<Artist> createArtists( List<MusicFolder> folders, String[] ignoredArticles, String[] shortcuts) throws IOException { SortedMap<String, Artist> artistMap = new TreeMap<String, Artist>(); Set<String> shortcutSet = new HashSet<String>(Arrays.asList(shortcuts)); for (MusicFolder folder : folders) { MusicFile parent = musicFileService.getMusicFile(folder.getPath()); List<MusicFile> children = musicFileService.getChildDirectories(parent); for (MusicFile child : children) { if (shortcutSet.contains(child.getName())) { continue; } String sortableName = createSortableName(child.getName(), ignoredArticles); Artist artist = artistMap.get(sortableName); if (artist == null) { artist = new Artist(child.getName(), sortableName); artistMap.put(sortableName, artist); } artist.addMusicFile(child); } } return new TreeSet<Artist>(artistMap.values()); }
/** * This method builds a decision tree model * * @param sparkContext JavaSparkContext initialized with the application * @param modelID Model ID * @param trainingData Training data as a JavaRDD of LabeledPoints * @param testingData Testing data as a JavaRDD of LabeledPoints * @param workflow Machine learning workflow * @param mlModel Deployable machine learning model * @throws MLModelBuilderException */ private ModelSummary buildDecisionTreeModel( JavaSparkContext sparkContext, long modelID, JavaRDD<LabeledPoint> trainingData, JavaRDD<LabeledPoint> testingData, Workflow workflow, MLModel mlModel, SortedMap<Integer, String> includedFeatures, Map<Integer, Integer> categoricalFeatureInfo) throws MLModelBuilderException { try { Map<String, String> hyperParameters = workflow.getHyperParameters(); DecisionTree decisionTree = new DecisionTree(); DecisionTreeModel decisionTreeModel = decisionTree.train( trainingData, getNoOfClasses(mlModel), categoricalFeatureInfo, hyperParameters.get(MLConstants.IMPURITY), Integer.parseInt(hyperParameters.get(MLConstants.MAX_DEPTH)), Integer.parseInt(hyperParameters.get(MLConstants.MAX_BINS))); // remove from cache trainingData.unpersist(); // add test data to cache testingData.cache(); JavaPairRDD<Double, Double> predictionsAndLabels = decisionTree.test(decisionTreeModel, testingData).cache(); ClassClassificationAndRegressionModelSummary classClassificationAndRegressionModelSummary = SparkModelUtils.getClassClassificationModelSummary( sparkContext, testingData, predictionsAndLabels); // remove from cache testingData.unpersist(); mlModel.setModel(new MLDecisionTreeModel(decisionTreeModel)); classClassificationAndRegressionModelSummary.setFeatures( includedFeatures.values().toArray(new String[0])); classClassificationAndRegressionModelSummary.setAlgorithm( SUPERVISED_ALGORITHM.DECISION_TREE.toString()); MulticlassMetrics multiclassMetrics = getMulticlassMetrics(sparkContext, predictionsAndLabels); predictionsAndLabels.unpersist(); classClassificationAndRegressionModelSummary.setMulticlassConfusionMatrix( getMulticlassConfusionMatrix(multiclassMetrics, mlModel)); Double modelAccuracy = getModelAccuracy(multiclassMetrics); classClassificationAndRegressionModelSummary.setModelAccuracy(modelAccuracy); classClassificationAndRegressionModelSummary.setDatasetVersion(workflow.getDatasetVersion()); return classClassificationAndRegressionModelSummary; } catch (Exception e) { throw new MLModelBuilderException( "An error occurred while building decision tree model: " + e.getMessage(), e); } }
public List<Playlist> getReadablePlaylistsForUser(String username) { List<Playlist> result1 = getWritablePlaylistsForUser(username); List<Playlist> result2 = query("select " + COLUMNS + " from playlist where is_public", rowMapper); List<Playlist> result3 = query( "select " + prefix(COLUMNS, "playlist") + " from playlist, playlist_user where " + "playlist.id = playlist_user.playlist_id and " + "playlist.username != ? and " + "playlist_user.username = ?", rowMapper, username, username); // Put in sorted map to avoid duplicates. SortedMap<Integer, Playlist> map = new TreeMap<Integer, Playlist>(); for (Playlist playlist : result1) { map.put(playlist.getId(), playlist); } for (Playlist playlist : result2) { map.put(playlist.getId(), playlist); } for (Playlist playlist : result3) { map.put(playlist.getId(), playlist); } return new ArrayList<Playlist>(map.values()); }
/** * return the contents of the buffer. The order of the chunks returned is determined by the * ejection policy, such that the chunk most likely to be given the book is first. */ @Override protected Collection<IChunk> getSourceChunksInternal(Collection<IChunk> container) { /* * there is nothing to return.. */ if (_sourceChunks.size() == 0) return container; List<IChunk> list = null; if (container instanceof List) list = (List<IChunk>) container; else list = new ArrayList<IChunk>(_sourceChunks.size()); list.addAll(_sourceChunks.values()); switch (getEjectionPolicy()) { case MostRecentlyAdded: case MostRecentlyMatched: case MostRecentlyUsed: Collections.reverse(list); break; } if (!(container instanceof List)) container.addAll(list); return container; }
public List<EntityKnowledge> getAllies() { List<EntityKnowledge> res = new LinkedList<>(); for (SortedMap<Integer, EntityKnowledge> map : allies.values()) { res.addAll(map.values()); } return res; }
/** * Prepares the MemoryIndex for querying in a non-lazy way. * * <p>After calling this you can query the MemoryIndex from multiple threads, but you cannot * subsequently add new data. */ public void freeze() { this.frozen = true; for (Info info : fields.values()) { info.sortTerms(); info.getNormDocValues(); // lazily computed } }
public Channel getChannel(String name) { synchronized (channels) { Channel channel = channels.get(name); if (channel == null) { boolean resize = false; channel = channels.remove(INITIAL); if (channel == null) { channel = new Channel(name); resize = true; } else { channel.setName(name); } channels.put(name, channel); channelArray = channels.values().toArray(new Channel[channels.size()]); if (resize) { getDisplay() .syncExec( new Runnable() { public void run() { controlResized(null); } }); } } return channel; } }
public Integer[] getShareDistribution() { Collection<Integer> vals = stockmap.values(); Integer[] result = new Integer[vals.size()]; int i = 0; for (Integer val : vals) result[i++] = val; return result; }
public static final boolean set(final Skill skill, Properties properties) { TargetRangeAttribute value = properties.getTargetType(); int maxcount = properties.getTargetMaxCount(); switch (value) { case AREA: int areaCounter = 0; final Creature firstTarget = skill.getFirstTarget(); if (firstTarget == null) { return false; } SortedMap<Double, Creature> sortedMap = new TreeMap<Double, Creature>(); for (Creature creature : skill.getEffectedList()) { sortedMap.put(MathUtil.getDistance(firstTarget, creature), creature); } skill.getEffectedList().clear(); for (Creature creature : sortedMap.values()) { if (areaCounter >= maxcount) { break; } skill.getEffectedList().add(creature); areaCounter++; } } return true; }
/** * Removes all of {@link Cookie cookies} in this HTTP state that have expired by the specified * {@link java.util.Date date}. * * @param date The {@link java.util.Date date} to compare against. * @return true if any cookies were purged. * @see Cookie#isExpired(java.util.Date) * @see #purgeExpiredCookies() */ public synchronized boolean purgeExpiredCookies(Date date) { LOG.trace("enter HttpState.purgeExpiredCookies(Date)"); // BEGIN IA/HERITRIX CHANGES // PRIOR IMPL & COMPARISON HARNESS LEFT COMMENTED OUT FOR TEMPORARY REFERENCE // boolean arrayRemoved = false; // Iterator ita = cookiesArrayList.iterator(); // while (ita.hasNext()) { // if (((Cookie) (ita.next())).isExpired(date)) { // ita.remove(); // arrayRemoved = true; // } // } boolean removed = false; Iterator<Cookie> it = cookiesMap.values().iterator(); while (it.hasNext()) { if (it.next().isExpired(date)) { it.remove(); removed = true; } } StoredIterator.close(it); // assert removed == arrayRemoved : "discrepancy" // END IA/HERITRIX CHANGES return removed; }
private static void determineColumnWidths(List<Integer> columnWidths, List<String> row) { SortedMap<Integer, Integer> cws = new TreeMap<Integer, Integer>(); int columnNo = 0; for (Integer width : columnWidths) { cws.put(columnNo, width); columnNo++; } columnNo = 0; for (String cell : row) { int width = get(cell).length(); int origWidth = 0; if (cws.containsKey(columnNo)) { origWidth = cws.get(columnNo); } if (width >= origWidth) { cws.put(columnNo, width); } columnNo++; } columnWidths.clear(); columnWidths.addAll(cws.values()); }
@Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(name()).field(Fields.TYPE, CONTENT_TYPE); if (indexAnalyzer.name().equals(searchAnalyzer.name())) { builder.field(Fields.ANALYZER, indexAnalyzer.name()); } else { builder .field(Fields.INDEX_ANALYZER.getPreferredName(), indexAnalyzer.name()) .field(Fields.SEARCH_ANALYZER.getPreferredName(), searchAnalyzer.name()); } builder.field(Fields.PAYLOADS, this.payloads); builder.field(Fields.PRESERVE_SEPARATORS.getPreferredName(), this.preserveSeparators); builder.field( Fields.PRESERVE_POSITION_INCREMENTS.getPreferredName(), this.preservePositionIncrements); builder.field(Fields.MAX_INPUT_LENGTH.getPreferredName(), this.maxInputLength); multiFields.toXContent(builder, params); if (!contextMapping.isEmpty()) { builder.startObject(Fields.CONTEXT); for (ContextMapping mapping : contextMapping.values()) { builder.value(mapping); } builder.endObject(); } return builder.endObject(); }
/** * Saves the feature divide model settings .fsm file. * * @throws MaltChainedException */ protected void save() throws MaltChainedException { try { final BufferedWriter out = new BufferedWriter( getGuide() .getConfiguration() .getConfigurationDir() .getOutputStreamWriter(getModelName() + ".dsm")); out.write(masterModel.getIndex() + "\t" + masterModel.getFrequency() + "\n"); if (divideModels != null) { for (AtomicModel divideModel : divideModels.values()) { out.write(divideModel.getIndex() + "\t" + divideModel.getFrequency() + "\n"); } } out.close(); } catch (IOException e) { throw new GuideException( "Could not write to the guide model settings file '" + getModelName() + ".dsm" + "', when " + "saving the guide model settings to file. ", e); } }
@Override public void process(Node externs, Node root) { assignmentLog = new StringBuilder(); // Do variable reference counting. NodeTraversal.traverse(compiler, externs, new ProcessVars(true)); NodeTraversal.traverse(compiler, root, new ProcessVars(false)); // Make sure that new names don't overlap with extern names. reservedNames.addAll(externNames); // Rename vars, sorted by frequency of occurrence to minimize code size. SortedSet<Assignment> varsByFrequency = new TreeSet<Assignment>(FREQUENCY_COMPARATOR); varsByFrequency.addAll(assignments.values()); if (shouldShadow) { new ShadowVariables(compiler, assignments, varsByFrequency, pseudoNameMap) .process(externs, root); } // First try to reuse names from an earlier compilation. if (prevUsedRenameMap != null) { reusePreviouslyUsedVariableMap(); } // Assign names, sorted by descending frequency to minimize code size. assignNames(varsByFrequency); boolean changed = false; // Rename the globals! for (Node n : globalNameNodes) { String newName = getNewGlobalName(n); // Note: if newName is null, then oldName is an extern. if (newName != null) { n.setString(newName); changed = true; } } // Rename the locals! int count = 0; for (Node n : localNameNodes) { String newName = getNewLocalName(n); if (newName != null) { n.setString(newName); changed = true; } count++; } if (changed) { compiler.reportCodeChange(); } // Lastly, write the name assignments to the debug log. compiler.addToDebugLog("JS var assignments:\n" + assignmentLog); assignmentLog = null; }
/** Returns a map containing a copy of the tags in this list. */ @Override public Map<String, String> asMap() { Map<String, String> stringMap = new HashMap<String, String>(size()); for (Tag t : tagSortedMap.values()) { stringMap.put(t.getKey(), t.getValue()); } return stringMap; }
public EntityKnowledge getEntity(int id) { for (SortedMap<Integer, EntityKnowledge> map : allies.values()) { for (EntityKnowledge ke : map.values()) { if (ke.getID() == id) { return ke; } } } for (SortedMap<Integer, EntityKnowledge> map : ennemies.values()) { for (EntityKnowledge ke : map.values()) { if (ke.getID() == id) { return ke; } } } return null; }
public int mapSchedullerId(long msgId) { for (SchedullerPackage schedullerPackage : messages.values().toArray(new SchedullerPackage[0])) { if (schedullerPackage.generatedMessageIds.contains(msgId)) { return schedullerPackage.id; } } return 0; }
public Map<String, SuiteData> getSuitesWithFailureMessages() { Map<String, SuiteData> map = new TreeMap<String, SuiteData>(); for (SuiteData s : suiteDataMap.values()) { if (s.getFailureMessages() != null) { map.put(s.getName(), s); } } return map; }
/** * Bereitet das Speichern dieser ServiceInfoList in eine Datei vor, indem die Werte aus der Map in * eine Collection gestellt werden. Die Collection enthält nur verschlüsselte Daten und die Map * nur enstschlüsselte Daten. Die Umwandlung geschieht beim Befüllen der Collection aus der Map * beim Speichern und beim Befüllen der Map aus der Collection beim Lesen. */ public void encrypt(EncryptionHelper encryptionHelper) { verifier = String.valueOf(services.hashCode()); // Zur Prüfung der Korrektheit nach dem Einlesen encryptedServices = new ArrayList<ServiceInfo>(); if (services != null) { for (ServiceInfo si : services.values()) { encryptedServices.add(encrypt(encryptionHelper, si)); } } }
/** @return tab split column header string list. */ public SplitList<String> getHeaderList() { SplitList<String> headerList = new SplitList<String>(TAB); for (MZTabColumn mzTabColumn : columnMapping.values()) { headerList.add(mzTabColumn.getHeader()); } return headerList; }
@Override public List<ITEM> findBetween(KEY start, KEY end) { start = getKey(start); end = getKey(end); SortedMap<KEY, ITEM> keyMultiValueSortedMap = this.navigableMap.subMap(start, end); return new ArrayList<>(keyMultiValueSortedMap.values()); }
@Override public void phaseEnd() { // End of PlayerOrderPhase, // TODO: Game game = context.room.getGame(); // Extract new player order. game.setPlayerOrder(rolls.values()); }
@Test public void testAllRequestsAreEqual() { SortedMap<Long, CoapMessage> receivedMessages = testEndpoint.getReceivedCoapMessages(); CoapMessage firstMessage = receivedMessages.get(receivedMessages.firstKey()); for (CoapMessage message : receivedMessages.values()) { assertEquals("Received requests did not equal.", firstMessage, message); } }
// Note: this Javadoc is copied from javax.management.Descriptor // due to 6369229. @Override public synchronized int hashCode() { final int size = descriptorMap.size(); // descriptorMap is sorted with a comparator that ignores cases. // return Util.hashCode( descriptorMap.keySet().toArray(new String[size]), descriptorMap.values().toArray(new Object[size])); }