private static void findIndividualPerSequenceCoverage( Calibrator calibrator, Map<String, Integer> sequenceLengths, Map<String, String> readGroupToSampleId, final Map<String, HashMap<String, CalibrationStats>> local, RegionRestriction restriction) { final Covariate rgCovariate = calibrator.getCovariate(calibrator.getCovariateIndex(CovariateEnum.READGROUP)); final Covariate seqCovariate = calibrator.getCovariate(calibrator.getCovariateIndex(CovariateEnum.SEQUENCE)); for (final Map.Entry<String, Integer> entry : sequenceLengths.entrySet()) { final String sequenceName = entry.getKey(); if (restriction != null && !sequenceName.equals(restriction.getSequenceName())) { continue; } for (final Map.Entry<String, String> e2 : readGroupToSampleId.entrySet()) { final String readGroup = e2.getKey(); final String sampleName = e2.getValue(); final int rgValue = rgCovariate.valueOf(readGroup); final int seqValue = seqCovariate.valueOf(sequenceName); if (rgValue == -1 || seqValue == -1) { add(local, sampleName, sequenceName, new CalibrationStats(null)); } else { final Calibrator.QuerySpec spec = calibrator.initQuery(); spec.setValue(CovariateEnum.READGROUP, rgValue); spec.setValue(CovariateEnum.SEQUENCE, seqValue); calibrator.processStats(new LocalStatsProcessor(local, sampleName, sequenceName), spec); } } } }
public static void main(String[] args) { Scanner scanner = new Scanner(System.in); long n = scanner.nextLong(); long m = scanner.nextLong(); Map<Integer, Integer> nmap = primeDivisorAndCounts(n); Map<Integer, Integer> mmap = primeDivisorAndCounts(m); int sameCount = 1; for (Map.Entry<Integer, Integer> entry : nmap.entrySet()) { int divisor = entry.getKey(); int counts = 0; if (mmap.containsKey(divisor)) { int nc = entry.getValue(); int mc = mmap.get(divisor); counts = nc < mc ? nc : mc; } sameCount *= counts + 1; } int nDiviorsCount = 1; for (Map.Entry<Integer, Integer> entry : nmap.entrySet()) { nDiviorsCount *= entry.getValue() + 1; } int mDiviorsCount = 1; for (Map.Entry<Integer, Integer> entry : mmap.entrySet()) { mDiviorsCount *= entry.getValue() + 1; } int maxCount = nDiviorsCount * mDiviorsCount; int maxDivisor = gcd(maxCount, sameCount); System.out.println(maxCount / maxDivisor + " " + sameCount / maxDivisor); }
private ItemStack[] deserial(Object o) throws SerializationException { try { if (o instanceof List) { final List<?> data = (List) o; List<ItemStack> items = new ArrayList<ItemStack>(data.size()); for (Object t : data) { if (t instanceof Map) { final Map<?, ?> mdata = (Map) t; final Map<String, Object> conv = new HashMap<String, Object>(mdata.size()); for (Map.Entry<?, ?> e : mdata.entrySet()) { conv.put(String.valueOf(e.getKey()), convert(e.getValue())); } LoggingManager.getInstance() .log(LoggingManager.Level.DEBUG, "Serializing Data: " + conv.entrySet().toString()); items.add(ItemStack.deserialize(conv)); } else { throw new IllegalArgumentException("Not a Map"); } } return items.toArray(new ItemStack[items.size()]); } throw new IllegalArgumentException("Not a List"); } catch (IllegalArgumentException ex) { throw new SerializationException(o, ex); } }
Entry encode(final T o, final String parentDN) throws LDAPPersistException { // Get the attributes that should be included in the entry. final LinkedHashMap<String, Attribute> attrMap = new LinkedHashMap<String, Attribute>(); attrMap.put("objectClass", objectClassAttribute); for (final Map.Entry<String, FieldInfo> e : fieldMap.entrySet()) { final FieldInfo i = e.getValue(); if (!i.includeInAdd()) { continue; } final Attribute a = i.encode(o, false); if (a != null) { attrMap.put(e.getKey(), a); } } for (final Map.Entry<String, GetterInfo> e : getterMap.entrySet()) { final GetterInfo i = e.getValue(); if (!i.includeInAdd()) { continue; } final Attribute a = i.encode(o); if (a != null) { attrMap.put(e.getKey(), a); } } final String dn = constructDN(o, parentDN, attrMap); final Entry entry = new Entry(dn, attrMap.values()); if (postEncodeMethod != null) { try { postEncodeMethod.invoke(o, entry); } catch (Throwable t) { debugException(t); if (t instanceof InvocationTargetException) { t = ((InvocationTargetException) t).getTargetException(); } throw new LDAPPersistException( ERR_OBJECT_HANDLER_ERROR_INVOKING_POST_ENCODE_METHOD.get( postEncodeMethod.getName(), type.getName(), getExceptionMessage(t)), t); } } setDNAndEntryFields(o, entry); if (superclassHandler != null) { final Entry e = superclassHandler.encode(o, parentDN); for (final Attribute a : e.getAttributes()) { entry.addAttribute(a); } } return entry; }
/** * Moves the operators from this process to another process, keeping all connections intact. TODO: * Test more rigorously. Do we register/unregister everything correctly? * * @return the number of ports the connections of which could not be restored */ public int stealOperatorsFrom(ExecutionUnit otherUnit) { int failedReconnects = 0; // remember source and sink connections so we can reconnect them later. Map<String, InputPort> sourceMap = new HashMap<String, InputPort>(); Map<String, OutputPort> sinkMap = new HashMap<String, OutputPort>(); for (OutputPort source : otherUnit.getInnerSources().getAllPorts()) { if (source.isConnected()) { sourceMap.put(source.getName(), source.getDestination()); } } otherUnit.getInnerSources().disconnectAll(); for (InputPort sink : otherUnit.getInnerSinks().getAllPorts()) { if (sink.isConnected()) { sinkMap.put(sink.getName(), sink.getSource()); } } otherUnit.getInnerSinks().disconnectAll(); // Move operators Iterator<Operator> i = otherUnit.operators.iterator(); while (i.hasNext()) { Operator operator = i.next(); i.remove(); otherUnit.unregister(operator); Process otherProcess = operator.getProcess(); if (otherProcess != null) { operator.unregisterOperator(otherProcess); } this.operators.add(operator); operator.setEnclosingProcess(null); // operator.unregisterOperator(operator.getProcess()); registerOperator(operator, true); // operator.registerOperator(this.getEnclosingOperator().getProcess()); } // Rewire sources and sinks for (Map.Entry<String, InputPort> entry : sourceMap.entrySet()) { OutputPort mySource = getInnerSources().getPortByName(entry.getKey()); if (mySource != null) { mySource.connectTo(entry.getValue()); } else { failedReconnects++; } } getInnerSources().unlockPortExtenders(); for (Map.Entry<String, OutputPort> entry : sinkMap.entrySet()) { InputPort mySink = getInnerSinks().getPortByName(entry.getKey()); if (mySink != null) { entry.getValue().connectTo(mySink); } else { failedReconnects++; } } getInnerSinks().unlockPortExtenders(); fireUpdate(this); return failedReconnects; }
protected Map handleAttributes(Map attributes) { Map undo = super.handleAttributes(attributes); if (attributes != null) { if (undo == null) undo = new Hashtable(); Iterator it = attributes.entrySet().iterator(); while (it.hasNext()) { Map.Entry entry = (Map.Entry) it.next(); Object cell = entry.getKey(); Map properties = (Map) entry.getValue(); if (cell instanceof JGraphBusinessObject) { JGraphBusinessObject bo = (JGraphBusinessObject) cell; Map deltaOld = new Hashtable(); Iterator it2 = properties.entrySet().iterator(); while (it2.hasNext()) { Map.Entry property = (Map.Entry) it2.next(); Object key = property.getKey(); Object oldValue = bo.putProperty(key, property.getValue()); if (oldValue != null) deltaOld.put(key, oldValue); } undo.put(cell, deltaOld); } } } return undo; }
public synchronized CDOChangeSetData merge(CDOChangeSet target, CDOChangeSet source) throws ConflictException { result = new CDOChangeSetDataImpl(); conflicts = CDOIDUtil.createMap(); targetMap = createMap(target); sourceMap = createMap(source); Set<CDOID> taken = new HashSet<CDOID>(); for (Entry<CDOID, Object> entry : targetMap.entrySet()) { CDOID id = entry.getKey(); Object targetData = entry.getValue(); Object sourceData = sourceMap.get(id); if (merge(targetData, sourceData)) { taken.add(id); } } for (Entry<CDOID, Object> entry : sourceMap.entrySet()) { CDOID id = entry.getKey(); if (taken.add(id)) { Object sourceData = entry.getValue(); Object targetData = targetMap.get(id); merge(targetData, sourceData); } } if (!conflicts.isEmpty()) { throw new ConflictException( "Merger could not resolve all conflicts: " + conflicts, this, result); } return result; }
private void updateVmapAvg( Map<String, Double> vMap, Map<String, AvgValue> vMapAvg, Map<String, Double> fxiyi, Map<String, Double> fxizi) { Set<Map.Entry<String, Double>> entrySet = fxiyi.entrySet(); Map<String, Object> changed = new HashMap<String, Object>(); for (Map.Entry<String, Double> entry : entrySet) { String key = entry.getKey(); changed.put(key, new Object()); } entrySet = fxizi.entrySet(); for (Map.Entry<String, Double> entry : entrySet) { String key = entry.getKey(); changed.put(key, new Object()); } Set<Map.Entry<String, Object>> changedSet = changed.entrySet(); for (Map.Entry<String, Object> changedSetEntry : changedSet) { String key = changedSetEntry.getKey(); if (vMapAvg.containsKey(key)) { AvgValue avg = vMapAvg.get(key); Double sum = avg.getD() + vMap.get(key); int count = avg.getCount() + 1; vMapAvg.put(key, new AvgValue(sum, count)); } else { vMapAvg.put(key, new AvgValue(vMap.get(key), 1)); } } }
@Override public void report( Map<String, Counter> counters, Map<String, Histogram> histograms, Map<String, Timer> timers) { try { connect(); long timestamp = System.currentTimeMillis() / 1000; for (Map.Entry<String, Counter> entry : counters.entrySet()) { reportCounter(entry.getKey(), entry.getValue().snapshot, timestamp); } for (Map.Entry<String, Histogram> entry : histograms.entrySet()) { reportHistogram(entry.getKey(), entry.getValue().snapshot, timestamp); } for (Map.Entry<String, Timer> entry : timers.entrySet()) { reportTimer(entry.getKey(), entry.getValue().snapshot, timestamp); } flush(); } catch (IOException e) { logger.warn("Unable to report to Graphite", e); } finally { try { close(); } catch (IOException e) { logger.warn("Error disconnecting from Graphite", e); } } }
/** * Initializes the JXPathContext based on any relevant properties set for the filter. * * @param context the JXPathContext to initialize */ protected void initialise(JXPathContext context) { Map.Entry entry; if (namespaces != null) { if (logger.isDebugEnabled()) { logger.debug("Initializing JXPathContext with namespaces: " + namespaces); } for (Iterator iterator = namespaces.entrySet().iterator(); iterator.hasNext(); ) { entry = (Map.Entry) iterator.next(); context.registerNamespace(entry.getKey().toString(), entry.getValue().toString()); } } if (contextProperties != null) { if (logger.isDebugEnabled()) { logger.debug("Initializing JXPathContext with properties: " + contextProperties); } for (Iterator iterator = contextProperties.entrySet().iterator(); iterator.hasNext(); ) { entry = (Map.Entry) iterator.next(); context.setValue(entry.getKey().toString(), entry.getValue()); } } if (factory != null) { context.setFactory(factory); } context.setLenient(lenient); }
/** histograms are sampled, but we just update points */ public void mergeHistograms( MetricInfo metricInfo, String meta, Map<Integer, MetricSnapshot> data, Map<String, Integer> metaCounters, Map<String, Map<Integer, Histogram>> histograms) { Map<Integer, MetricSnapshot> existing = metricInfo.get_metrics().get(meta); if (existing == null) { metricInfo.put_to_metrics(meta, data); Map<Integer, Histogram> histogramMap = new HashMap<>(); for (Map.Entry<Integer, MetricSnapshot> dataEntry : data.entrySet()) { Histogram histogram = MetricUtils.metricSnapshot2Histogram(dataEntry.getValue()); histogramMap.put(dataEntry.getKey(), histogram); } histograms.put(meta, histogramMap); } else { for (Map.Entry<Integer, MetricSnapshot> dataEntry : data.entrySet()) { Integer win = dataEntry.getKey(); MetricSnapshot snapshot = dataEntry.getValue(); MetricSnapshot old = existing.get(win); if (old == null) { existing.put(win, snapshot); histograms.get(meta).put(win, MetricUtils.metricSnapshot2Histogram(snapshot)); } else { if (snapshot.get_ts() >= old.get_ts()) { old.set_ts(snapshot.get_ts()); // update points MetricUtils.updateHistogramPoints(histograms.get(meta).get(win), snapshot.get_points()); } } } } updateMetricCounters(meta, metaCounters); }
@Test public void testPriceProductShopLessBuyPrice() { shop.getProductSets().put(ProductName.CHEESE, new ProductInfo(5, 5)); shop.getProductSets().put(ProductName.FISH, new ProductInfo(10, 10)); buys.put(ProductName.CHEESE, new ProductInfo(15, 10)); buys.put(ProductName.FISH, new ProductInfo(40, 15)); transaction = deal.deal(buys); result.clear(); assertTrue(shop.getProductSets().entrySet().containsAll(result.entrySet())); result.put(ProductName.CHEESE, new ProductInfo(5, 5)); result.put(ProductName.FISH, new ProductInfo(10, 10)); /*System.out.println("|transaction"); for (Map.Entry<ProductName, ProductInfo> entry : transaction.getProductSets().entrySet()){ System.out.print(entry.getKey() + " " + entry.getValue().getCount() + " " + entry.getValue().getPrice() + "; "); } System.out.println("\n|shop"); for (Map.Entry<ProductName, ProductInfo> entry : shop.getProductSets().entrySet()){ System.out.print(entry.getKey() + " " + entry.getValue().getCount() + " " + entry.getValue().getPrice() + "; "); } System.out.println("\n|result"); for (Map.Entry<ProductName, ProductInfo> entry : result.entrySet()){ System.out.print(entry.getKey() + " " + entry.getValue().getCount() + " " + entry.getValue().getPrice() + "; "); } System.out.println("|");*/ assertTrue(transaction.getProductSets().entrySet().containsAll(result.entrySet())); }
public List<ServiceConfiguration> getServiceConfigurations() { Map<String, Object> configurationAsMap = getRestTemplate().getForObject(getUrl("info/services"), Map.class); if (configurationAsMap == null) { return Collections.emptyList(); } List<ServiceConfiguration> configurations = new ArrayList<ServiceConfiguration>(); for (Map.Entry<String, Object> typeEntry : configurationAsMap.entrySet()) { Map<String, Object> vendorMap = CloudUtil.parse(Map.class, typeEntry.getValue()); if (vendorMap == null) { continue; } for (Map.Entry<String, Object> vendorEntry : vendorMap.entrySet()) { Map<String, Object> versionMap = CloudUtil.parse(Map.class, vendorEntry.getValue()); if (versionMap == null) { continue; } for (Map.Entry<String, Object> serviceEntry : versionMap.entrySet()) { Map<String, Object> attributes = CloudUtil.parse(Map.class, serviceEntry.getValue()); if (attributes != null) { configurations.add(new ServiceConfiguration(attributes)); } } } } return configurations; }
/** * Sets the rendering settings associated to the image. * * @param viewedBy The value to set. */ void setViewedBy(Map viewedBy) { Map m = new LinkedHashMap(); if (viewedBy != null) { long id = MetadataViewerAgent.getUserDetails().getId(); Entry entry; Iterator i = viewedBy.entrySet().iterator(); ExperimenterData exp; while (i.hasNext()) { entry = (Entry) i.next(); exp = (ExperimenterData) entry.getKey(); if (exp.getId() == id) { m.put(exp, entry.getValue()); } } i = viewedBy.entrySet().iterator(); while (i.hasNext()) { entry = (Entry) i.next(); exp = (ExperimenterData) entry.getKey(); if (exp.getId() != id) { m.put(exp, entry.getValue()); } } } this.viewedBy = m; }
// Map<Long, URL> static void filterFromUrls(Map<Long, URL> from, Map<Long, URL> to, Map<String, String> filter) { if (from == null || from.isEmpty()) return; for (Map.Entry<Long, URL> entry : from.entrySet()) { URL url = entry.getValue(); boolean match = true; for (Map.Entry<String, String> e : filter.entrySet()) { String key = e.getKey(); String value = e.getValue(); if (ADDRESS_FILTER_KEY.equals(key)) { if (!value.equals(url.getAddress())) { match = false; break; } } else { if (!value.equals(url.getParameter(key))) { match = false; break; } } } if (match) { to.put(entry.getKey(), url); } } }
final void remove(String name) { RenderObjectHandle obj = renderObjects.get(name); if (obj == null) { UI.printWarning(Module.API, "Unable to remove \"%s\" - object was not defined yet"); return; } UI.printDetailed(Module.API, "Removing object \"%s\"", name); renderObjects.remove(name); // scan through all objects to make sure we don't have any // references to the old object still around switch (obj.type) { case SHADER: Shader s = obj.getShader(); for (Map.Entry<String, RenderObjectHandle> e : renderObjects.entrySet()) { Instance i = e.getValue().getInstance(); if (i != null) { UI.printWarning( Module.API, "Removing shader \"%s\" from instance \"%s\"", name, e.getKey()); i.removeShader(s); } } break; case MODIFIER: Modifier m = obj.getModifier(); for (Map.Entry<String, RenderObjectHandle> e : renderObjects.entrySet()) { Instance i = e.getValue().getInstance(); if (i != null) { UI.printWarning( Module.API, "Removing modifier \"%s\" from instance \"%s\"", name, e.getKey()); i.removeModifier(m); } } break; case GEOMETRY: { Geometry g = obj.getGeometry(); for (Map.Entry<String, RenderObjectHandle> e : renderObjects.entrySet()) { Instance i = e.getValue().getInstance(); if (i != null && i.hasGeometry(g)) { UI.printWarning( Module.API, "Removing instance \"%s\" because it referenced geometry \"%s\"", e.getKey(), name); remove(e.getKey()); } } break; } case INSTANCE: rebuildInstanceList = true; break; case LIGHT: rebuildLightList = true; break; default: // no dependencies break; } }
private int sumHardScore( Map<CloudComputer, Integer> cpuPowerUsageMap, Map<CloudComputer, Integer> memoryUsageMap, Map<CloudComputer, Integer> networkBandwidthUsageMap) { int hardScore = 0; for (Map.Entry<CloudComputer, Integer> usageEntry : cpuPowerUsageMap.entrySet()) { CloudComputer computer = usageEntry.getKey(); int cpuPowerAvailable = computer.getCpuPower() - usageEntry.getValue(); if (cpuPowerAvailable < 0) { hardScore += cpuPowerAvailable; } } for (Map.Entry<CloudComputer, Integer> usageEntry : memoryUsageMap.entrySet()) { CloudComputer computer = usageEntry.getKey(); int memoryAvailable = computer.getMemory() - usageEntry.getValue(); if (memoryAvailable < 0) { hardScore += memoryAvailable; } } for (Map.Entry<CloudComputer, Integer> usageEntry : networkBandwidthUsageMap.entrySet()) { CloudComputer computer = usageEntry.getKey(); int networkBandwidthAvailable = computer.getNetworkBandwidth() - usageEntry.getValue(); if (networkBandwidthAvailable < 0) { hardScore += networkBandwidthAvailable; } } return hardScore; }
@Test public void testFromSystem_containsListValues() throws Exception { AbstractConfiguration.setDefaultListDelimiter('|'); Map<String, String> properties = Maps.newHashMap(); properties.put("testProperty", "b,bee"); for (Entry<String, String> entry : properties.entrySet()) { System.setProperty(entry.getKey(), entry.getValue()); } Splitter splitter = Splitter.on(','); Configuration systemConfiguration = configurationHelper.fromSystem(); for (Entry<String, String> entry : properties.entrySet()) { String[] actualValues = systemConfiguration.getStringArray(entry.getKey()); String[] expectedValues; if ("line.separator".equals(entry.getKey())) { expectedValues = new String[] {SystemUtils.LINE_SEPARATOR}; } else { expectedValues = splitter.splitToList(entry.getValue()).toArray(new String[0]); } assertArrayEquals( String.format("Values for key %s do not match", entry.getKey()), expectedValues, actualValues); } }
/** * Return JPQL string statment reference to the SampleParameter * * @return JPQL string statement * @throws NoParametersException */ private String getSampleParameterJPQL() throws NoParametersException { String ret = ""; for (Map.Entry<String, Parameter> e : datafileParameter.entrySet()) ret += ", IN(" + Queries.DATAFILE_NAME + ".datafileParameterCollection) " + e.getKey(); for (Map.Entry<String, Parameter> e : datasetParameter.entrySet()) ret += ", IN(" + Queries.DATASET_NAME + ".datasetParameterCollection) " + e.getKey(); for (Map.Entry<String, Parameter> e : sampleParameter.entrySet()) ret += ", IN(" + Queries.PARAM_NAME_JPQL + ".sampleParameterCollection) " + e.getKey(); if (ret.isEmpty()) throw new NoParametersException(); String parameter = ""; if (!datafileParameter.isEmpty()) parameter += ", IN(" + Queries.PARAM_NAME_JPQL + ".investigationId.datasetCollection) " + Queries.DATASET_NAME + ", IN(" + Queries.DATASET_NAME + ".datafileCollection) " + Queries.DATAFILE_NAME; if (datafileParameter.isEmpty() && !datasetParameter.isEmpty()) parameter += ", IN(" + Queries.PARAM_NAME_JPQL + ".investigationId.datasetCollection) " + Queries.DATASET_NAME; if (parameter.isEmpty()) return ret.substring(2); return parameter.substring(2) + ret; }
static Map<Feature, Integer> unionFeaturesForMates( Map<Feature, Integer> mappedFeaturesAcrossChunksForMate1, Map<Feature, Integer> mappedFeaturesAcrossChunksForMate2) { // This map will give us the number of bases overlapping each interval, thus we can fractionally // count Map<Feature, Integer> union = new HashMap<>(); for (Map.Entry<Feature, Integer> entry : mappedFeaturesAcrossChunksForMate1.entrySet()) { if (union.containsKey(entry.getKey())) { int currentVal = union.get(entry.getKey()); union.put(entry.getKey(), entry.getValue() + currentVal); } else { union.put(entry.getKey(), entry.getValue()); } } for (Map.Entry<Feature, Integer> entry : mappedFeaturesAcrossChunksForMate2.entrySet()) { if (union.containsKey(entry.getKey())) { int currentVal = union.get(entry.getKey()); union.put(entry.getKey(), entry.getValue() + currentVal); } else { union.put(entry.getKey(), entry.getValue()); } } return union; }
private Map<String, Double> updateV( Map<String, Double> vMap, Map<String, Double> fxiyi, Map<String, Double> fxizi) { Set<Map.Entry<String, Double>> entrySet = fxiyi.entrySet(); for (Map.Entry<String, Double> entry : entrySet) { String key = entry.getKey(); Double value = entry.getValue(); if (vMap.containsKey(key)) { vMap.put(key, vMap.get(key) + value); } else { vMap.put(key, value); } } entrySet = fxizi.entrySet(); for (Map.Entry<String, Double> entry : entrySet) { String key = entry.getKey(); Double value = entry.getValue(); if (vMap.containsKey(key)) { vMap.put(key, vMap.get(key) - value); } else { vMap.put(key, -1 * value); } } return vMap; }
private String getFlattenedValue(Map<String, Interest> input) { StringBuilder stringBuilder = new StringBuilder(); Set<Map.Entry<String, Interest>> inputEntrySet = input.entrySet(); Iterator<Map.Entry<String, Interest>> iterator = inputEntrySet.iterator(); int numEntries = input.size(); int currentIndex = 0; for (Map.Entry<String, Interest> entry : input.entrySet()) { String key = entry.getKey(); Interest interest = entry.getValue(); if (key == null || key.isEmpty() || interest == null || interest.getValue() == null || interest.getValue().isEmpty()) { continue; } String interestStr = interest.getValue(); stringBuilder.append(key); stringBuilder.append("_"); stringBuilder.append(interestStr); if (currentIndex++ != numEntries - 1) { stringBuilder.append(" "); } } return stringBuilder.toString(); }
public void testFailFastEntrySet() { if (!isAddRemoveModifiable()) { return; } if (!isFailFastExpected()) { return; } resetFull(); Iterator<Map.Entry> it = map.entrySet().iterator(); final Map.Entry val = it.next(); map.remove(val.getKey()); try { it.next(); fail(); } catch (ConcurrentModificationException expected) { } resetFull(); it = map.entrySet().iterator(); it.next(); map.clear(); try { it.next(); fail(); } catch (ConcurrentModificationException expected) { } }
public <K, V> ImmutableMap<K, V> withAll(Map<K, V> map) { if (map.isEmpty()) { return this.of(); } if (map.size() > 4) { return new ImmutableUnifiedMap<K, V>(map); } Map.Entry<K, V>[] entries = map.entrySet().toArray(new Map.Entry[map.entrySet().size()]); switch (entries.length) { case 1: return this.of(entries[0].getKey(), entries[0].getValue()); case 2: return this.of( entries[0].getKey(), entries[0].getValue(), entries[1].getKey(), entries[1].getValue()); case 3: return this.of( entries[0].getKey(), entries[0].getValue(), entries[1].getKey(), entries[1].getValue(), entries[2].getKey(), entries[2].getValue()); case 4: return this.of( entries[0].getKey(), entries[0].getValue(), entries[1].getKey(), entries[1].getValue(), entries[2].getKey(), entries[2].getValue(), entries[3].getKey(), entries[3].getValue()); default: throw new AssertionError(); } }
/** * Get all the acceptable values for a option for a board The outcome of this method can be used * to fill a combobox * * @param menu the name of a menu not the ide * @param boardName the name of a board not the ide * @return */ public String[] getMenuItemNames(String menuLabel, String boardName) { String menuID = null; String boardID = getBoardIDFromName(boardName); HashSet<String> ret = new HashSet<String>(); Map<String, String> menuInfo = mArduinoSupportedBoards.get("menu"); for (Entry<String, String> e2 : menuInfo.entrySet()) { if (e2.getValue().equals(menuLabel)) menuID = e2.getKey(); } String SearchKey = menuID + "." + boardID + "."; for (Entry<String, String> e2 : menuInfo.entrySet()) { int numsubkeys = e2.getKey().split("\\.").length; boolean startOk = e2.getKey().startsWith(SearchKey); if ((numsubkeys == 3) && (startOk)) ret.add(e2.getValue()); } // from Arduino IDE 1.5.4 menu is subset of the board. The previous code will not return a // result Map<String, String> boardInfo = mArduinoSupportedBoards.get(boardID); if (boardInfo != null) { SearchKey = "menu." + menuID + "."; for (Entry<String, String> e2 : boardInfo.entrySet()) { int numsubkeys = e2.getKey().split("\\.").length; boolean startOk = e2.getKey().startsWith(SearchKey); if ((numsubkeys == 3) && (startOk)) ret.add(e2.getValue()); } } return ret.toArray(new String[ret.size()]); }
public static <Type> JSONValue toJSON( Map<String, Type> value, AbstractJsonEncoderDecoder<Type> encoder, Style style) { if (value == null) { return JSONNull.getInstance(); } switch (style) { case DEFAULT: case SIMPLE: { JSONObject rc = new JSONObject(); for (Entry<String, Type> t : value.entrySet()) { rc.put(t.getKey(), encoder.encode(t.getValue())); } return rc; } case JETTISON_NATURAL: { JSONObject rc = new JSONObject(); JSONArray entries = new JSONArray(); int i = 0; for (Entry<String, Type> t : value.entrySet()) { JSONObject entry = new JSONObject(); entry.put("key", new JSONString(t.getKey())); entry.put("value", encoder.encode(t.getValue())); entries.set(i++, entry); } rc.put("entry", entries); return rc; } default: throw new UnsupportedOperationException( "The encoding style is not yet suppored: " + style.name()); } }
public String[] getAvailableLanguageIds() { Set<String> availableLanguageIds = new TreeSet<String>(); Map<Locale, String> nameMap = getNameMap(); for (Map.Entry<Locale, String> entry : nameMap.entrySet()) { Locale locale = entry.getKey(); String value = entry.getValue(); if (Validator.isNotNull(value)) { availableLanguageIds.add(LocaleUtil.toLanguageId(locale)); } } Map<Locale, String> descriptionMap = getDescriptionMap(); for (Map.Entry<Locale, String> entry : descriptionMap.entrySet()) { Locale locale = entry.getKey(); String value = entry.getValue(); if (Validator.isNotNull(value)) { availableLanguageIds.add(LocaleUtil.toLanguageId(locale)); } } return availableLanguageIds.toArray(new String[availableLanguageIds.size()]); }
private void commitOffsetsForAckedTuples() { // Find offsets that are ready to be committed for every topic partition final Map<TopicPartition, OffsetAndMetadata> nextCommitOffsets = new HashMap<>(); for (Map.Entry<TopicPartition, OffsetEntry> tpOffset : acked.entrySet()) { final OffsetAndMetadata nextCommitOffset = tpOffset.getValue().findNextCommitOffset(); if (nextCommitOffset != null) { nextCommitOffsets.put(tpOffset.getKey(), nextCommitOffset); } } // Commit offsets that are ready to be committed for every topic partition if (!nextCommitOffsets.isEmpty()) { kafkaConsumer.commitSync(nextCommitOffsets); LOG.debug("Offsets successfully committed to Kafka [{}]", nextCommitOffsets); // Instead of iterating again, it would be possible to commit and update the state for each // TopicPartition // in the prior loop, but the multiple network calls should be more expensive than iterating // twice over a small loop for (Map.Entry<TopicPartition, OffsetEntry> tpOffset : acked.entrySet()) { final OffsetEntry offsetEntry = tpOffset.getValue(); offsetEntry.commit(nextCommitOffsets.get(tpOffset.getKey())); } } else { LOG.trace("No offsets to commit. {}", this); } }
@Override public Record cloneRecord(IdentityRecordStack parentRecords) throws RecordException { if (parentRecords.contains(this)) { throw new RecordException("A record may not be nested in itself: " + id); } RecordImpl record = new RecordImpl(); record.id = id; record.version = version; record.recordTypes.putAll(recordTypes); parentRecords.push(this); for (Entry<QName, Object> entry : fields.entrySet()) { record.fields.put(entry.getKey(), tryCloneValue(parentRecords, entry)); } parentRecords.pop(); if (fieldsToDelete.size() > 0) { // addAll seems expensive even when list is empty record.fieldsToDelete.addAll(fieldsToDelete); } if (metadatas != null) { for (Map.Entry<QName, Metadata> metadata : metadatas.entrySet()) { record.setMetadata(metadata.getKey(), metadata.getValue()); } } // the ResponseStatus is not cloned, on purpose return record; }
/** * Object payload contained in Exchange In case of a single file Exchange Header is populated with * the name of the remote path downloaded In case of a multiple files Exchange Header is populated * with the name of the remote paths downloaded In case of a single file Exchange Body is * populated with the ByteArrayOutputStream downloaded from dropbox. In case of multiple files * Exchange Body is populated with a map containing as key the remote path and as value the linked * ByteArrayOutputStream * * @param exchange */ @Override public void populateExchange(Exchange exchange) { // in case we have only one baos put it directly in body Map<String, ByteArrayOutputStream> map = (Map<String, ByteArrayOutputStream>) resultEntries; if (map.size() == 1) { // set info in exchange String pathExtracted = null; ByteArrayOutputStream baosExtracted = null; for (Map.Entry<String, ByteArrayOutputStream> entry : map.entrySet()) { pathExtracted = entry.getKey(); baosExtracted = entry.getValue(); } exchange.getIn().setHeader(DropboxResultHeader.DOWNLOADED_FILE.name(), pathExtracted); exchange.getIn().setBody(baosExtracted); } else { StringBuffer pathsExtracted = new StringBuffer(); for (Map.Entry<String, ByteArrayOutputStream> entry : map.entrySet()) { pathsExtracted.append(entry.getKey() + "\n"); } exchange .getIn() .setHeader(DropboxResultHeader.DOWNLOADED_FILES.name(), pathsExtracted.toString()); exchange.getIn().setBody(map); } }