/** * Provides a “diff report” of how the two sets are similar and how they are different, comparing * the entries by some aspect. * * <p>The transformer is used to generate the value to use to compare the entries by. That is, the * entries are not compared by equals by an attribute or characteristic. * * <p>The transformer is expected to produce a unique value for each entry in a single set. * Behaviour is undefined if this condition is not met. * * @param left The set on the “left” side of the comparison. * @param right The set on the “right” side of the comparison. * @param compareBy Provides the value to compare entries from either side by * @param <T> The type of the entry objects * @return A representation of the difference */ public static <T> SetDiff<T> diffSetsBy( Set<? extends T> left, Set<? extends T> right, Transformer<?, T> compareBy) { if (left == null) { throw new NullPointerException("'left' set is null"); } if (right == null) { throw new NullPointerException("'right' set is null"); } SetDiff<T> setDiff = new SetDiff<T>(); Map<Object, T> indexedLeft = collectMap(left, compareBy); Map<Object, T> indexedRight = collectMap(right, compareBy); for (Map.Entry<Object, T> leftEntry : indexedLeft.entrySet()) { T rightValue = indexedRight.remove(leftEntry.getKey()); if (rightValue == null) { setDiff.leftOnly.add(leftEntry.getValue()); } else { Pair<T, T> pair = Pair.of(leftEntry.getValue(), rightValue); setDiff.common.add(pair); } } for (T rightValue : indexedRight.values()) { setDiff.rightOnly.add(rightValue); } return setDiff; }
public void laplace() { double alpha = 0.5; // HashMap<Vertex, Vector3> centerList = new HashMap(); for (Vertex v : vertexList) { ArrayList<Vertex> neighboursVertexes; Vector3 sum = new Vector3(0, 0, 0); neighboursVertexes = GetNeighboursVertexes(v); for (Vertex neighbour : neighboursVertexes) { sum = sum.add(neighbour.getPosition()); } sum = sum.devideValueByVector(1); centerList.put(v, sum); } for (Map.Entry<Vertex, Vector3> entry : centerList.entrySet()) { Vector3 ap = entry.getKey().getPosition().multiply(alpha); Vector3 ac = entry.getValue().multiply(1 - alpha); Vector3 newPosition = new Vector3(ap).add(ac); entry.setValue(newPosition); } vertexList = new ArrayList<>(); for (Map.Entry<Vertex, Vector3> entry : centerList.entrySet()) { Vertex vertex = new Vertex(entry.getValue()); vertex.setHalfEgde(entry.getKey().getHalfEdge()); vertex.setColor(entry.getKey().getColor()); vertexList.add(vertex); } triangulatedMesh.computeTriangleNormals(); triangulatedMesh.computeVertexNormals(); }
public void do_alarms() { // every entry may be re-added immediately after its method execution, so it's safe // to iterate over a copy of the hashmap HashMap<String, Integer> local_alarm = new HashMap<>(alarm); // iterate through the hashmap for (Map.Entry a : local_alarm.entrySet()) { if ((int) a.getValue() <= 0) { // remove the executed alarm alarm.remove(a.getKey().toString()); // execute alarm method Method method; //noinspection TryWithIdenticalCatches try { method = this.getClass().getMethod("alarm_" + a.getKey()); method.invoke(this); } catch (NoSuchMethodException e) { e.printStackTrace(); } catch (IllegalAccessException e) { e.printStackTrace(); } catch (InvocationTargetException e) { e.printStackTrace(); } } else // decrease the alarm timer alarm.put(a.getKey().toString(), (int) a.getValue() - 1); } }
Entry encode(final T o, final String parentDN) throws LDAPPersistException { // Get the attributes that should be included in the entry. final LinkedHashMap<String, Attribute> attrMap = new LinkedHashMap<String, Attribute>(); attrMap.put("objectClass", objectClassAttribute); for (final Map.Entry<String, FieldInfo> e : fieldMap.entrySet()) { final FieldInfo i = e.getValue(); if (!i.includeInAdd()) { continue; } final Attribute a = i.encode(o, false); if (a != null) { attrMap.put(e.getKey(), a); } } for (final Map.Entry<String, GetterInfo> e : getterMap.entrySet()) { final GetterInfo i = e.getValue(); if (!i.includeInAdd()) { continue; } final Attribute a = i.encode(o); if (a != null) { attrMap.put(e.getKey(), a); } } final String dn = constructDN(o, parentDN, attrMap); final Entry entry = new Entry(dn, attrMap.values()); if (postEncodeMethod != null) { try { postEncodeMethod.invoke(o, entry); } catch (Throwable t) { debugException(t); if (t instanceof InvocationTargetException) { t = ((InvocationTargetException) t).getTargetException(); } throw new LDAPPersistException( ERR_OBJECT_HANDLER_ERROR_INVOKING_POST_ENCODE_METHOD.get( postEncodeMethod.getName(), type.getName(), getExceptionMessage(t)), t); } } setDNAndEntryFields(o, entry); if (superclassHandler != null) { final Entry e = superclassHandler.encode(o, parentDN); for (final Attribute a : e.getAttributes()) { entry.addAttribute(a); } } return entry; }
/** * Finds a suitable analyzer class for a magic signature * * @param signature the magic signature look up * @return the analyzer factory to use */ private static FileAnalyzerFactory find(byte[] signature) throws IOException { // XXX this assumes ISO-8859-1 encoding (and should work in most cases // for US-ASCII, UTF-8 and other ISO-8859-* encodings, but not always), // we should try to be smarter than this... char[] chars = new char[signature.length > 8 ? 8 : signature.length]; for (int i = 0; i < chars.length; i++) { chars[i] = (char) (0xFF & signature[i]); } String sig = new String(chars); FileAnalyzerFactory a = magics.get(sig); if (a == null) { String sigWithoutBOM = stripBOM(signature); for (Map.Entry<String, FileAnalyzerFactory> entry : magics.entrySet()) { if (sig.startsWith(entry.getKey())) { return entry.getValue(); } // See if text files have the magic sequence if we remove the // byte-order marker if (sigWithoutBOM != null && entry.getValue().getGenre() == Genre.PLAIN && sigWithoutBOM.startsWith(entry.getKey())) { return entry.getValue(); } } } return a; }
private void updateProjectCache() { // Ensure we have a cache at all: initProjectCache(); // Loop through project path preference keys: for (Map.Entry<String, ?> entry : preferences.getAll().entrySet()) if (entry.getKey().startsWith(PREF_PROJECT_PATH_PREFIX) && entry.getKey().endsWith(PREF_PROJECT_PATH_POSTFIX)) { int projectID = getProjectID(entry.getKey()); int projectFingerPrint = getProjectFingerPrint(entry.getKey()); if (getCachedProject(projectID, projectFingerPrint) == null) { // Parse the project if it is not already in the cache: Project p = ProjectLoader.ParseProject(entry.getValue().toString()); if (p != null) { if (p.getFingerPrint() != projectFingerPrint) { Log.w( TAG, "XML finger print of project " + p.toString() + " has changed, possibly the " + ProjectLoader.PROJECT_FILE + " file (located in " + entry.getValue().toString() + ") was manually edited!"); // Remove old pref key: removeProjectPathPrefKey(projectID, projectFingerPrint); // Add new pref key: storeProjectPathPrefKey(p); } // Cache the project object: cacheProject(p); } } } }
/** * Moves the operators from this process to another process, keeping all connections intact. TODO: * Test more rigorously. Do we register/unregister everything correctly? * * @return the number of ports the connections of which could not be restored */ public int stealOperatorsFrom(ExecutionUnit otherUnit) { int failedReconnects = 0; // remember source and sink connections so we can reconnect them later. Map<String, InputPort> sourceMap = new HashMap<String, InputPort>(); Map<String, OutputPort> sinkMap = new HashMap<String, OutputPort>(); for (OutputPort source : otherUnit.getInnerSources().getAllPorts()) { if (source.isConnected()) { sourceMap.put(source.getName(), source.getDestination()); } } otherUnit.getInnerSources().disconnectAll(); for (InputPort sink : otherUnit.getInnerSinks().getAllPorts()) { if (sink.isConnected()) { sinkMap.put(sink.getName(), sink.getSource()); } } otherUnit.getInnerSinks().disconnectAll(); // Move operators Iterator<Operator> i = otherUnit.operators.iterator(); while (i.hasNext()) { Operator operator = i.next(); i.remove(); otherUnit.unregister(operator); Process otherProcess = operator.getProcess(); if (otherProcess != null) { operator.unregisterOperator(otherProcess); } this.operators.add(operator); operator.setEnclosingProcess(null); // operator.unregisterOperator(operator.getProcess()); registerOperator(operator, true); // operator.registerOperator(this.getEnclosingOperator().getProcess()); } // Rewire sources and sinks for (Map.Entry<String, InputPort> entry : sourceMap.entrySet()) { OutputPort mySource = getInnerSources().getPortByName(entry.getKey()); if (mySource != null) { mySource.connectTo(entry.getValue()); } else { failedReconnects++; } } getInnerSources().unlockPortExtenders(); for (Map.Entry<String, OutputPort> entry : sinkMap.entrySet()) { InputPort mySink = getInnerSinks().getPortByName(entry.getKey()); if (mySink != null) { entry.getValue().connectTo(mySink); } else { failedReconnects++; } } getInnerSinks().unlockPortExtenders(); fireUpdate(this); return failedReconnects; }
private Path[] createFiles() throws IOException { int numberOfStreams = Math.max(2, rnd.nextInt(10)); mergeFactor = Math.max(mergeFactor, numberOfStreams); LOG.info("No of streams : " + numberOfStreams); Path[] paths = new Path[numberOfStreams]; for (int i = 0; i < numberOfStreams; i++) { paths[i] = new Path(baseDir, "ifile_" + i + ".out"); FSDataOutputStream out = fs.create(paths[i]); // write data with RLE IFile.Writer writer = new IFile.Writer(conf, out, keyClass, valClass, null, null, null, true); Map<Writable, Writable> data = createData(); for (Map.Entry<Writable, Writable> entry : data.entrySet()) { writer.append(entry.getKey(), entry.getValue()); originalData.put(entry.getKey(), entry.getValue()); if (rnd.nextInt() % 2 == 0) { for (int j = 0; j < rnd.nextInt(100); j++) { // add some duplicate keys writer.append(entry.getKey(), entry.getValue()); originalData.put(entry.getKey(), entry.getValue()); } } } LOG.info("Wrote " + data.size() + " in " + paths[i]); data.clear(); writer.close(); out.close(); } return paths; }
final void remove(String name) { RenderObjectHandle obj = renderObjects.get(name); if (obj == null) { UI.printWarning(Module.API, "Unable to remove \"%s\" - object was not defined yet"); return; } UI.printDetailed(Module.API, "Removing object \"%s\"", name); renderObjects.remove(name); // scan through all objects to make sure we don't have any // references to the old object still around switch (obj.type) { case SHADER: Shader s = obj.getShader(); for (Map.Entry<String, RenderObjectHandle> e : renderObjects.entrySet()) { Instance i = e.getValue().getInstance(); if (i != null) { UI.printWarning( Module.API, "Removing shader \"%s\" from instance \"%s\"", name, e.getKey()); i.removeShader(s); } } break; case MODIFIER: Modifier m = obj.getModifier(); for (Map.Entry<String, RenderObjectHandle> e : renderObjects.entrySet()) { Instance i = e.getValue().getInstance(); if (i != null) { UI.printWarning( Module.API, "Removing modifier \"%s\" from instance \"%s\"", name, e.getKey()); i.removeModifier(m); } } break; case GEOMETRY: { Geometry g = obj.getGeometry(); for (Map.Entry<String, RenderObjectHandle> e : renderObjects.entrySet()) { Instance i = e.getValue().getInstance(); if (i != null && i.hasGeometry(g)) { UI.printWarning( Module.API, "Removing instance \"%s\" because it referenced geometry \"%s\"", e.getKey(), name); remove(e.getKey()); } } break; } case INSTANCE: rebuildInstanceList = true; break; case LIGHT: rebuildLightList = true; break; default: // no dependencies break; } }
private void loadQueries(IndexShard shard) { try { shard.refresh(new Engine.Refresh("percolator_load_queries").force(true)); // Maybe add a mode load? This isn't really a write. We need write b/c state=post_recovery Engine.Searcher searcher = shard.acquireSearcher("percolator_load_queries", IndexShard.Mode.WRITE); try { Query query = new XConstantScoreQuery( indexCache .filter() .cache( new TermFilter( new Term(TypeFieldMapper.NAME, PercolatorService.TYPE_NAME)))); QueriesLoaderCollector queryCollector = new QueriesLoaderCollector( PercolatorQueriesRegistry.this, logger, mapperService, indexFieldDataService); searcher.searcher().search(query, queryCollector); Map<HashedBytesRef, Query> queries = queryCollector.queries(); for (Map.Entry<HashedBytesRef, Query> entry : queries.entrySet()) { Query previousQuery = percolateQueries.put(entry.getKey(), entry.getValue()); shardPercolateService.addedQuery(entry.getKey(), previousQuery, entry.getValue()); } } finally { searcher.release(); } } catch (Exception e) { throw new PercolatorException( shardId.index(), "failed to load queries from percolator index", e); } }
@Override public boolean mouseMoved(int x, int y, MouseEvent source) { if (model.vlm.getAnnotationLocationVisible().length() < Configuration.getInt("geneStructureNucleotideWindow")) { ShortReadInsertion sri = null; for (java.util.Map.Entry<Rectangle, ShortReadInsertion> e : render.meta().paintedBlocks.entrySet()) { if (e.getKey().contains(x, y)) { sri = e.getValue(); break; } } if (sri != null) { if (!tooltip.isVisible()) tooltip.setVisible(true); tooltip.set(source, sri); } else { if (tooltip.isVisible()) tooltip.setVisible(false); } // // System.out.println("Moved: " + x + " " + y); for (java.util.Map.Entry<Rectangle, SAMRecord> e : render.meta().hitMap.entrySet()) { if (e.getKey().contains(x, y)) { // System.out.println("Prijs: " + e.getValue()); readinfo.set(source, e.getValue()); } } // return false; } else { if (tooltip.isVisible()) tooltip.setVisible(false); } return false; }
public List<MessageExt> takeMessags(final int batchSize) { List<MessageExt> result = new ArrayList<MessageExt>(batchSize); final long now = System.currentTimeMillis(); try { this.lockTreeMap.writeLock().lockInterruptibly(); this.lastConsumeTimestamp = now; try { if (!this.msgTreeMap.isEmpty()) { for (int i = 0; i < batchSize; i++) { Map.Entry<Long, MessageExt> entry = this.msgTreeMap.pollFirstEntry(); if (entry != null) { result.add(entry.getValue()); msgTreeMapTemp.put(entry.getKey(), entry.getValue()); } else { break; } } } if (result.isEmpty()) { consuming = false; } } finally { this.lockTreeMap.writeLock().unlock(); } } catch (InterruptedException e) { log.error("take Messages exception", e); } return result; }
/** * Returns a {@link StoredBlock} representing the last checkpoint before the given block height, * for example, normally you would want to know the checkpoint before the last block the wallet * had seen. */ public StoredBlock getCheckpointBeforeOrAtHeight(int height) { Map.Entry<Long, StoredBlock> highestCheckpointBeforeHeight = null; for (Map.Entry<Long, StoredBlock> loop : checkpoints.entrySet()) { if (loop.getValue().getHeight() < height) { // This checkpoint is before the specified height. if (highestCheckpointBeforeHeight == null) { highestCheckpointBeforeHeight = loop; } else { if (highestCheckpointBeforeHeight.getValue().getHeight() < loop.getValue().getHeight()) { // This entry is later. highestCheckpointBeforeHeight = loop; } } } } if (highestCheckpointBeforeHeight == null) { try { return new StoredBlock(params.getGenesisBlock(), params.getGenesisBlock().getWork(), 0); } catch (VerificationException e) { e.printStackTrace(); } } return highestCheckpointBeforeHeight.getValue(); }
protected String preprocessTextResource( String publicName, String category, String defaultName, Map<String, String> pairs, boolean verbose, File publicRoot) throws IOException { URL u = locateResource(publicName, category, defaultName, verbose, publicRoot); InputStream inp = u.openStream(); if (inp == null) { throw new RuntimeException("Jar corrupt? No " + defaultName + " resource!"); } // read fully into memory ByteArrayOutputStream baos = new ByteArrayOutputStream(); byte[] buffer = new byte[1024]; int length; while ((length = inp.read(buffer)) != -1) { baos.write(buffer, 0, length); } // substitute String result = new String(baos.toByteArray()); for (Map.Entry<String, String> e : pairs.entrySet()) { if (e.getValue() != null) { result = result.replace(e.getKey(), e.getValue()); } } return result; }
public static Map<String, UiConfigImpl> filterPropertyNamesInCriteria( Map<String, UiConfigImpl> uiConfigs) { Map<String, UiConfigImpl> propertyNamesInCriterion = new LinkedHashMap<String, UiConfigImpl>(); for (Map.Entry<String, UiConfigImpl> entry : uiConfigs.entrySet()) { if (!entry.getValue().isExcludedFromCriteria() && !entry.getKey().endsWith("AsString") && !CriterionOperator.getSupportedOperators(entry.getValue().getPropertyType()) .isEmpty()) { UiConfigImpl config = entry.getValue(); Set<String> cssClasses = config.getCssClasses(); if (cssClasses.contains("date")) { config.getCssClasses().clear(); config.getCssClasses().add("date"); } else if (cssClasses.contains("datetime")) { config.getCssClasses().clear(); config.getCssClasses().add("datetime"); } else if (cssClasses.contains("time")) { config.getCssClasses().clear(); config.getCssClasses().add("time"); } else { config.getCssClasses().clear(); } propertyNamesInCriterion.put(entry.getKey(), config); } } return propertyNamesInCriterion; }
private int sumHardScore( Map<CloudComputer, Integer> cpuPowerUsageMap, Map<CloudComputer, Integer> memoryUsageMap, Map<CloudComputer, Integer> networkBandwidthUsageMap) { int hardScore = 0; for (Map.Entry<CloudComputer, Integer> usageEntry : cpuPowerUsageMap.entrySet()) { CloudComputer computer = usageEntry.getKey(); int cpuPowerAvailable = computer.getCpuPower() - usageEntry.getValue(); if (cpuPowerAvailable < 0) { hardScore += cpuPowerAvailable; } } for (Map.Entry<CloudComputer, Integer> usageEntry : memoryUsageMap.entrySet()) { CloudComputer computer = usageEntry.getKey(); int memoryAvailable = computer.getMemory() - usageEntry.getValue(); if (memoryAvailable < 0) { hardScore += memoryAvailable; } } for (Map.Entry<CloudComputer, Integer> usageEntry : networkBandwidthUsageMap.entrySet()) { CloudComputer computer = usageEntry.getKey(); int networkBandwidthAvailable = computer.getNetworkBandwidth() - usageEntry.getValue(); if (networkBandwidthAvailable < 0) { hardScore += networkBandwidthAvailable; } } return hardScore; }
private Map<String, Double> updateV( Map<String, Double> vMap, Map<String, Double> fxiyi, Map<String, Double> fxizi) { Set<Map.Entry<String, Double>> entrySet = fxiyi.entrySet(); for (Map.Entry<String, Double> entry : entrySet) { String key = entry.getKey(); Double value = entry.getValue(); if (vMap.containsKey(key)) { vMap.put(key, vMap.get(key) + value); } else { vMap.put(key, value); } } entrySet = fxizi.entrySet(); for (Map.Entry<String, Double> entry : entrySet) { String key = entry.getKey(); Double value = entry.getValue(); if (vMap.containsKey(key)) { vMap.put(key, vMap.get(key) - value); } else { vMap.put(key, -1 * value); } } return vMap; }
@Override public void report( Map<String, Counter> counters, Map<String, Histogram> histograms, Map<String, Timer> timers) { try { connect(); long timestamp = System.currentTimeMillis() / 1000; for (Map.Entry<String, Counter> entry : counters.entrySet()) { reportCounter(entry.getKey(), entry.getValue().snapshot, timestamp); } for (Map.Entry<String, Histogram> entry : histograms.entrySet()) { reportHistogram(entry.getKey(), entry.getValue().snapshot, timestamp); } for (Map.Entry<String, Timer> entry : timers.entrySet()) { reportTimer(entry.getKey(), entry.getValue().snapshot, timestamp); } flush(); } catch (IOException e) { logger.warn("Unable to report to Graphite", e); } finally { try { close(); } catch (IOException e) { logger.warn("Error disconnecting from Graphite", e); } } }
protected Map handleAttributes(Map attributes) { Map undo = super.handleAttributes(attributes); if (attributes != null) { if (undo == null) undo = new Hashtable(); Iterator it = attributes.entrySet().iterator(); while (it.hasNext()) { Map.Entry entry = (Map.Entry) it.next(); Object cell = entry.getKey(); Map properties = (Map) entry.getValue(); if (cell instanceof JGraphBusinessObject) { JGraphBusinessObject bo = (JGraphBusinessObject) cell; Map deltaOld = new Hashtable(); Iterator it2 = properties.entrySet().iterator(); while (it2.hasNext()) { Map.Entry property = (Map.Entry) it2.next(); Object key = property.getKey(); Object oldValue = bo.putProperty(key, property.getValue()); if (oldValue != null) deltaOld.put(key, oldValue); } undo.put(cell, deltaOld); } } } return undo; }
/** * The assumption is that this method will be invoked only by cache.putAll and cache.removeAll * methods. */ @Override public Map<K, ValueHolder<V>> bulkCompute( final Set<? extends K> keys, final Function< Iterable<? extends Map.Entry<? extends K, ? extends V>>, Iterable<? extends Map.Entry<? extends K, ? extends V>>> remappingFunction) throws StoreAccessException { Map<K, ValueHolder<V>> valueHolderMap = new HashMap<K, ValueHolder<V>>(); if (remappingFunction instanceof Ehcache.PutAllFunction) { Ehcache.PutAllFunction<K, V> putAllFunction = (Ehcache.PutAllFunction) remappingFunction; Map<K, V> entriesToRemap = putAllFunction.getEntriesToRemap(); for (Map.Entry<K, V> entry : entriesToRemap.entrySet()) { PutStatus putStatus = silentPut(entry.getKey(), entry.getValue()); if (putStatus == PutStatus.PUT || putStatus == PutStatus.UPDATE) { putAllFunction.getActualPutCount().incrementAndGet(); valueHolderMap.put(entry.getKey(), new ClusteredValueHolder<V>(entry.getValue())); } } } else if (remappingFunction instanceof Ehcache.RemoveAllFunction) { Ehcache.RemoveAllFunction<K, V> removeAllFunction = (Ehcache.RemoveAllFunction) remappingFunction; for (K key : keys) { boolean removed = silentRemove(key); if (removed) { removeAllFunction.getActualRemoveCount().incrementAndGet(); } } } else { throw new UnsupportedOperationException( "This compute method is not yet capable of handling generic computation functions"); } return valueHolderMap; }
public String[] getAvailableLanguageIds() { Set<String> availableLanguageIds = new TreeSet<String>(); Map<Locale, String> nameMap = getNameMap(); for (Map.Entry<Locale, String> entry : nameMap.entrySet()) { Locale locale = entry.getKey(); String value = entry.getValue(); if (Validator.isNotNull(value)) { availableLanguageIds.add(LocaleUtil.toLanguageId(locale)); } } Map<Locale, String> descriptionMap = getDescriptionMap(); for (Map.Entry<Locale, String> entry : descriptionMap.entrySet()) { Locale locale = entry.getKey(); String value = entry.getValue(); if (Validator.isNotNull(value)) { availableLanguageIds.add(LocaleUtil.toLanguageId(locale)); } } return availableLanguageIds.toArray(new String[availableLanguageIds.size()]); }
@Override protected boolean runTest(DatabaseRegistryEntry dbre) { boolean result = true; for (Map.Entry<String, String[]> method_tags : getMandatoryTags().entrySet()) { Vector<String> quoted_tags = new Vector<String>(); for (String t : method_tags.getValue()) { quoted_tags.add(String.format("'%s'", t)); } List<String> mlsss = getTemplate(dbre) .queryForDefaultObjectList( String.format( QUERY, StringUtils.join(quoted_tags, ","), method_tags.getKey(), method_tags.getValue().length), String.class); if (mlsss.size() > 0) { ReportManager.problem( this, dbre.getConnection(), "MLSSs for " + method_tags.getKey() + " found with no statistics: " + StringUtils.join(mlsss, ",")); result = false; } else { ReportManager.correct(this, dbre.getConnection(), "PASSED "); } } return result; }
public static void printMap(Map mp) { Iterator it = mp.entrySet().iterator(); String symbol = ""; double highest = 0; while (it.hasNext()) { Map.Entry pair = (Map.Entry) it.next(); System.out.println(pair.getKey() + " = " + pair.getValue()); double std = std((ArrayList<String>) pair.getValue()); if (highest < std) { highest = std; symbol = (String) pair.getKey(); } /* ArrayList arList = (ArrayList) pair.getValue(); for (Object object : arList) { System.out.println(object.toString()); }*/ it.remove(); // avoids a ConcurrentModificationException } System.out.println("Symbol:" + symbol + ": High:" + highest); }
@Test public void testIteratorWithDeprecatedKeys() { Configuration conf = new Configuration(); Configuration.addDeprecation("dK", new String[] {"nK"}); conf.set("k", "v"); conf.set("dK", "V"); assertEquals("V", conf.get("dK")); assertEquals("V", conf.get("nK")); conf.set("nK", "VV"); assertEquals("VV", conf.get("dK")); assertEquals("VV", conf.get("nK")); boolean kFound = false; boolean dKFound = false; boolean nKFound = false; for (Map.Entry<String, String> entry : conf) { if (entry.getKey().equals("k")) { assertEquals("v", entry.getValue()); kFound = true; } if (entry.getKey().equals("dK")) { assertEquals("VV", entry.getValue()); dKFound = true; } if (entry.getKey().equals("nK")) { assertEquals("VV", entry.getValue()); nKFound = true; } } assertTrue("regular Key not found", kFound); assertTrue("deprecated Key not found", dKFound); assertTrue("new Key not found", nKFound); }
@Override public List<Category> getCategoryByParameters(Map<String, Object> parameters) { CriteriaBuilder cb = em.getCriteriaBuilder(); CriteriaQuery<Category> cq = cb.createQuery(Category.class); Root<Category> r = cq.from(Category.class); Join<RssUser, Category> ru = r.join("rssUserList", JoinType.LEFT); Join<RssUser, Rss> rss = ru.join("rss", JoinType.LEFT); Predicate p = cb.conjunction(); for (Map.Entry<String, Object> param : parameters.entrySet()) { if (param.getKey().equals("rssId")) { p = cb.and(p, cb.equal(rss.get("rssId"), param.getValue())); } else if (param.getKey().equals("guid")) { p = cb.and(p, cb.equal(r.get(param.getKey()), param.getValue())); } } p = cb.or(p, cb.equal(r.get("categoryId"), DEFAULT_CATEGORY_ID)); cq.distinct(true); cq.multiselect().where(p); TypedQuery typedQuery = em.createQuery(cq); List<Category> resultList = typedQuery.getResultList(); return resultList; }
public static void toXContent( IndexTemplateMetaData indexTemplateMetaData, XContentBuilder builder, ToXContent.Params params) throws IOException { builder.startObject(indexTemplateMetaData.name(), XContentBuilder.FieldCaseConversion.NONE); builder.field("order", indexTemplateMetaData.order()); builder.field("template", indexTemplateMetaData.template()); builder.startObject("settings"); for (Map.Entry<String, String> entry : indexTemplateMetaData.settings().getAsMap().entrySet()) { builder.field(entry.getKey(), entry.getValue()); } builder.endObject(); builder.startArray("mappings"); for (Map.Entry<String, CompressedString> entry : indexTemplateMetaData.mappings().entrySet()) { byte[] data = entry.getValue().uncompressed(); XContentParser parser = XContentFactory.xContent(data).createParser(data); Map<String, Object> mapping = parser.map(); parser.close(); builder.map(mapping); } builder.endArray(); builder.endObject(); }
private static Node createVlanNode(String nodeLabel, Pane canvas, ContextMenu contextMenu) { int infNumber = 0; if (Data.hubMap.size() == 0) { infNumber = 1; } else { for (Map.Entry<String, HUB> entry : Data.hubMap.entrySet()) { if (entry.getValue().getInfs().size() == 0) { infNumber += 1; } else { infNumber += entry.getValue().getInfs().size(); } } } int vlanHeight = ((infNumber * 100) + ((infNumber - 1) * 50)); Rectangle node = new Rectangle(Data.nodeWidth, vlanHeight); node.setFill(Color.ORANGE); Label lnodeName = new Label(nodeLabel); StackPane nodeContainer = new StackPane(); nodeContainer.getChildren().addAll(node, lnodeName); nodeContainer.relocate(Data.vlanstartPosX, Data.vlanstartPosY); return nodeContainer; }
@Override public void handleRequest(HttpServerExchange exchange) throws Exception { final String incomingSessionId = servletContext.getSessionConfig().findSessionId(exchange); if (incomingSessionId == null || !data.containsKey(incomingSessionId)) { next.handleRequest(exchange); return; } // we have some old data PersistentSession result = data.remove(incomingSessionId); if (result != null) { long time = System.currentTimeMillis(); if (time < result.getExpiration().getTime()) { final HttpSessionImpl session = servletContext.getSession(exchange, true); final HttpSessionEvent event = new HttpSessionEvent(session); for (Map.Entry<String, Object> entry : result.getSessionData().entrySet()) { if (entry.getValue() instanceof HttpSessionActivationListener) { ((HttpSessionActivationListener) entry.getValue()).sessionDidActivate(event); } if (entry.getKey().startsWith(HttpSessionImpl.IO_UNDERTOW)) { session.getSession().setAttribute(entry.getKey(), entry.getValue()); } else { session.setAttribute(entry.getKey(), entry.getValue()); } } } } next.handleRequest(exchange); }
public void map(Map.Entry<String, String> e1, Map<Trait, Object> ret) { switch (e1.getKey()) { case TS_FIELD: int i = e1.getValue().indexOf("."); ret.put( Trait.TIMESTAMP, Long.parseLong(e1.getValue().substring(0, i == -1 ? e1.getValue().length() : i))); break; /* source IP */ case ORIG_H_FIELD: if (e1.getValue().contains(":")) { ret.clear(); return; } ret.put(Trait.IPv4_SRC, asIPv4(e1.getValue())); break; case ORIG_BYTES_FIELD: ret.put(Trait.PACKET_SIZE, getIntegerOrDefault(e1.getValue())); break; case ORIG_P_FIELD: ret.put(Trait.PORT_SRC, getIntegerOrDefault(e1.getValue())); /* destination IP */ break; case RESP_H_FIELD: ret.put(Trait.IPv4_DST, asIPv4(e1.getValue())); break; case RESP_P_FIELD: ret.put(Trait.PORT_DST, getIntegerOrDefault(e1.getValue())); break; case PROTO_FIELD: ret.put(Trait.PROTO, getProtoNumber(e1.getValue())); default: } }
/** * Prints a usage message to the given stream. * * @param out The output stream to write to. */ public void printUsage(OutputStream out) { Formatter formatter = new Formatter(out); Set<CommandLineOption> orderedOptions = new TreeSet<CommandLineOption>(new OptionComparator()); orderedOptions.addAll(optionsByString.values()); Map<String, String> lines = new LinkedHashMap<String, String>(); for (CommandLineOption option : orderedOptions) { Set<String> orderedOptionStrings = new TreeSet<String>(new OptionStringComparator()); orderedOptionStrings.addAll(option.getOptions()); List<String> prefixedStrings = new ArrayList<String>(); for (String optionString : orderedOptionStrings) { if (optionString.length() == 1) { prefixedStrings.add("-" + optionString); } else { prefixedStrings.add("--" + optionString); } } lines.put(GUtil.join(prefixedStrings, ", "), GUtil.elvis(option.getDescription(), "")); } int max = 0; for (String optionStr : lines.keySet()) { max = Math.max(max, optionStr.length()); } for (Map.Entry<String, String> entry : lines.entrySet()) { if (entry.getValue().length() == 0) { formatter.format("%s%n", entry.getKey()); } else { formatter.format("%-" + max + "s %s%n", entry.getKey(), entry.getValue()); } } formatter.flush(); }