static void collectionTest() { // removeIf Collection<String> c = new HashSet<>(); c.add("Content 1"); c.add("Content 2"); c.add("Content 3"); c.add("Content 4"); c.removeIf(s -> s.contains("2")); System.out.println("removeIf : " + c); /// 基本操作 List<Integer> list = new ArrayList(Arrays.asList(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11)); list.removeIf(a -> a % 3 == 0); System.out.println("a % 3 == 0 " + list); // OR 操作 Predicate<Integer> predicate2 = a -> a % 3 == 0; Predicate<Integer> predicate3 = a -> a % 5 == 0; list = new ArrayList(Arrays.asList(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11)); list.removeIf(predicate2.or(predicate3)); System.out.println("a % 3 == 0 or a % 5 == 0 " + list); // AND 操作 predicate2 = a -> a % 3 == 0; predicate3 = a -> a % 5 == 0; list = new ArrayList(Arrays.asList(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11)); list.removeIf(predicate2.and(predicate3)); System.out.println("a % 3 == 0 and a % 5 == 0 " + list); List<String> stringList = Arrays.asList("a", "b"); stringList.forEach(System.out::println); stringList = Arrays.asList("a", "b", "c"); stringList.replaceAll(String::toUpperCase); System.out.println(stringList); // [A, B, C] stringList = Arrays.asList("a", "b", "c"); stringList.sort(String::compareTo); Map<String, Integer> map = new HashMap<>(); map.put("A", 10); map.put("B", 20); map.put("C", 30); map.forEach((k, v) -> System.out.println("Item : " + k + " Count : " + v)); System.out.println(map.getOrDefault("D", 40)); // => 40 }
/** * Two pointers. O(N) time, O(M) space * window的右边界碰到目标元素,这个元素的计数器减减(原先没有出现的元素置成负一),左边界遇到目标元素,这个元素的计数器加一, 原先没有出现的元素变成0 * 如果左边界的计数此刻大于零,证明它原来是目标元素之一, 收缩我们的有效窗口 */ public static String minWindowV1(String s, String t) { if (s == null || s.isEmpty() || t == null || t.isEmpty()) return ""; int start = 0, minLen = Integer.MAX_VALUE, unuse = t.length(); // Count character apperances to build a dictionary Map<Character, Integer> dict = new HashMap<>(); // word size map. Key: character. Value: appearance times in t. for (int i = 0, tLen = t.length(); i < tLen; ++i) { char c = t.charAt(i); dict.put(c, 1 + dict.getOrDefault(c, 0)); } // Minimum window traversing for (int head = 0, tail = 0, sLen = s.length(); tail < sLen; ++tail) { // 外圈是在右端扩展窗口,一次一个 char tailKey = s.charAt(tail); dict.put(tailKey, dict.getOrDefault(tailKey, 0) - 1); for (unuse -= dict.get(tailKey) >= 0 ? 1 : 0; unuse == 0; ++head) { // 发现valid的子串,在左端收缩窗口 if (1 + tail - head < minLen) minLen = 1 + tail - (start = head); char headKey = s.charAt(head); dict.put(headKey, 1 + dict.get(headKey)); if (dict.get(headKey) > 0) ++unuse; // make it invalid } } return minLen == Integer.MAX_VALUE ? "" : s.substring(start, start + minLen); }
@Override public void buildWidgetForm( List<RenderedElement> elements, FieldConfig field, Map<String, String> value, int delta) { Map<String, Object> settings = field.getSettings(); if (settings == null) { settings = getDefaultSettings(); } // Get the default value Object defaultValue = settings.get("value"); String defaultSummaryValue = ""; String defaultTextValue = ""; if (defaultValue != null && Map.class.isAssignableFrom(defaultValue.getClass())) { defaultSummaryValue = ((Map<String, String>) defaultValue).getOrDefault("summary", ""); defaultTextValue = ((Map<String, String>) defaultValue).getOrDefault("text", ""); } else if (defaultValue != null && List.class.isAssignableFrom(defaultValue.getClass()) && ((List) defaultValue).size() > delta) { Object listValue = ((List<Object>) defaultValue).get(delta); if (listValue != null && Map.class.isAssignableFrom(listValue.getClass())) { defaultSummaryValue = ((Map<String, String>) listValue).get("summary"); defaultTextValue = ((Map<String, String>) listValue).get("text"); } else if (String.class.isAssignableFrom(listValue.getClass())) { defaultSummaryValue = ""; defaultTextValue = (String) listValue; } } if (value != null && value.containsKey("summary")) { defaultSummaryValue = value.get("summary"); } if (value != null && value.containsKey("value")) { defaultTextValue = value.get("value"); } // FIXME: Switch for show summary // Text Field is a type of String element TextareaElement summary = new TextareaElement(); summary.setLabel(field.getLabel() + " Summary"); summary.setName(field.getName() + "_summary"); summary.setRequired(field.isRequired()); summary.setDefaultValue(defaultSummaryValue); summary.setRows(Integer.parseInt((String) settings.getOrDefault("summary_rows", 10))); summary.setSize(1024); elements.add(summary); TextareaElement element = new TextareaElement(); element.setLabel(field.getLabel()); element.setName(field.getName() + "_text"); element.setRequired(field.isRequired()); element.setDefaultValue(defaultTextValue); element.setRows(Integer.parseInt((String) settings.getOrDefault("rows", 10))); element.setSize(-1); // unlimted elements.add(element); }
private List<String> getClassNamesFromConfig( final String token, final Map<String, Object> config) { final List<String> classNameList; try { classNameList = (List<String>) config.getOrDefault(token, new ArrayList<String>()); } catch (Exception ex) { throw new IllegalStateException( String.format( "Invalid configuration value of [%s] for [%s] setting on %s serialization configuration", config.getOrDefault(token, ""), token, this.getClass().getName()), ex); } return classNameList; }
private void createItem(Instruction instr, DatabaseCore databaseCore) { Map<String, Object> data = instr.getData(); ModifiableItem item = databaseCore .getDataFactory() .createItem( instr.getTarget(), data.getOrDefault("date", "").toString(), data.getOrDefault("name", "").toString(), data.getOrDefault("data", "").toString()); if (data.containsKey("tags")) item.addTags(parseTags(data.get("tags"), databaseCore)); databaseCore.addItem(item); }
public void process(Iterable<QueryExecution> queries) { if (!enabled) { return; } long totalBytes = 0; for (QueryExecution query : queries) { long bytes = query.getTotalMemoryReservation(); DataSize sessionMaxQueryMemory = getQueryMaxMemory(query.getSession()); long queryMemoryLimit = Math.min(maxQueryMemory.toBytes(), sessionMaxQueryMemory.toBytes()); totalBytes += bytes; if (bytes > queryMemoryLimit) { query.fail( new ExceededMemoryLimitException( "Query", DataSize.succinctDataSize(queryMemoryLimit, Unit.BYTE))); } } clusterMemoryUsageBytes.set(totalBytes); Map<MemoryPoolId, Integer> countByPool = new HashMap<>(); for (QueryExecution query : queries) { MemoryPoolId id = query.getMemoryPool().getId(); countByPool.put(id, countByPool.getOrDefault(id, 0) + 1); } updatePools(countByPool); updateNodes(updateAssignments(queries)); }
private void purgeTombstones() { /* * In order to mitigate the resource exhaustion that can ensue due to an ever-growing set * of tombstones we employ the following heuristic to purge old tombstones periodically. * First, we keep track of the time (local system time) when we were able to have a successful * AE exchange with each peer. The smallest (or oldest) such time across *all* peers is regarded * as the time before which all tombstones are considered safe to purge. */ long currentSafeTombstonePurgeTime = clusterService .getNodes() .stream() .map(ControllerNode::id) .filter(id -> !id.equals(localNodeId)) .map(id -> antiEntropyTimes.getOrDefault(id, 0L)) .reduce(Math::min) .orElse(0L); if (currentSafeTombstonePurgeTime == previousTombstonePurgeTime) { return; } List<Map.Entry<K, MapValue<V>>> tombStonesToDelete = items .entrySet() .stream() .filter(e -> e.getValue().isTombstone()) .filter(e -> e.getValue().creationTime() <= currentSafeTombstonePurgeTime) .collect(Collectors.toList()); previousTombstonePurgeTime = currentSafeTombstonePurgeTime; tombStonesToDelete.forEach(entry -> items.remove(entry.getKey(), entry.getValue())); }
private Availability firstAssignation(AID alumn) { if (this.groups.containsKey(alumn)) { System.err.println("WARN: Requested first assignation for already registered alumn " + alumn); return this.groups.get(alumn); } // TODO: This could be more optimized, for example, having the // availabilityCount map cached // Get the count of the current availabilities final Map<Availability, Long> availabilityCount = this.groups.values().stream().collect(Collectors.groupingBy(a -> a, Collectors.counting())); // Get the current available groups final List<Availability> availableGroups = TeacherBehaviour.AVAILABLE_GROUPS .stream() .filter( a -> availabilityCount.getOrDefault(a, 0l) < TeacherBehaviour.MAX_ALUMNS_PER_GROUP) .collect(Collectors.toList()); // Pick a random one final Availability result = availableGroups.get(this.random.nextInt(availableGroups.size())); this.groups.put(alumn, result); return result; }
public static void main(String[] args) { Map<Integer, String> map = new HashMap<>(); for (int i = 0; i < 10; i++) { map.putIfAbsent(i, "val" + i); } map.forEach((id, val) -> System.out.println(val)); map.computeIfPresent(3, (num, val) -> val + num); System.out.println(map.get(3)); // val33 map.computeIfPresent(9, (num, val) -> null); System.out.println(map.containsKey(9)); // false map.computeIfAbsent(23, num -> "val" + num); System.out.println(map.containsKey(23)); // true map.computeIfAbsent(3, num -> "bam"); System.out.println(map.get(3)); // val33 map.remove(3, "val3"); System.out.println(map.get(3)); // val33 map.remove(3, "val33"); System.out.println(map.get(3)); // null System.out.println(map.getOrDefault(42, "not found")); // not found map.merge(9, "val9", (value, newValue) -> value.concat(newValue)); System.out.println(map.get(9)); // val9 map.merge(9, "concat", (value, newValue) -> value.concat(newValue)); System.out.println(map.get(9)); // val9concat }
private double getScoreBM25(RetrievalModel r) throws IOException { if (!this.docIteratorHasMatchCache()) { return 0d; } else { Map<Qry, Integer> queryMap = new HashMap<>(); for (Qry arg : this.args) { int num = queryMap.getOrDefault(arg, 0) + 1; queryMap.put(arg, num); } countingSetup(); double totalScore = 0d; for (Qry curArg : queryMap.keySet()) { int curQryFreq = queryMap.get(curArg); int docid = this.docIteratorGetMatch(); if (!curArg.docIteratorHasMatch(r) || docid != curArg.docIteratorGetMatch()) { // nothing match , curscore = 0; continue; } else { totalScore += ((QrySop) curArg).getScore(r); } // end of else } // end of for loop return totalScore; } }
private static void setPageStatus(String filePath, int status, String message) { Map<String, Object> pageConfig = (Map<String, Object>) config.getOrDefault(filePath, new HashMap<>()); pageConfig.put("status", status); pageConfig.put("msg", message); config.put(filePath, pageConfig); }
public static void main(String[] args) { try { fileConfig = Utils.loadStatusData(LOGFILE_PATH); config = fileConfig.getOrDefault("progress", new HashMap<>()); String API_SNIPPETS_PATH; if (args.length == 0) { API_SNIPPETS_PATH = Utils.getenv("API_SNIPPETS_PATH").orElse("/home/ehernandez/twilio/markdown"); } else { API_SNIPPETS_PATH = args[0]; } printStatus = PrintStatus.fromParam((args.length > 0) ? args[1] : ""); PagesScanner scanner = new PagesScanner( SearchForMissingPages::checkPageStatus, BASE_REPOSITORY_URL, API_SNIPPETS_PATH, new String[] {"md"}); scanner.test("/docs/api"); scanner.test("/docs/quickstart"); Utils.saveStatusData(fileConfig, new File(LOGFILE_PATH)); } catch (ParsingFileException ex) { System.err.println("Error parsing " + LOGFILE_PATH); } }
@Test public void functionTest() { Map<Integer, String> map = Maps.newHashMap(); for (int i = 0; i < 10; i++) { map.putIfAbsent(i, "val" + i); } log.info("key 3 value:'{}'", map.get(3)); log.info("map computeIfPresent:'{}'", map.computeIfPresent(9, (num, val) -> null)); log.info("key 9 containsKey:'{}'", map.containsKey(9)); log.info("computeIfAbsent 23 :'{}'", map.computeIfAbsent(23, num -> "val" + num)); log.info("map.containsKey(23):'{}'", map.containsKey(23)); log.info("computeIfAbsent 3:'{}'", map.computeIfAbsent(3, num -> "bam")); log.info("get 3:'{}'", map.get(3)); map.remove(3, "val"); log.info("remove get 3:'{}'", map.get(3)); map.remove(3, "val3"); log.info("remove get 3:'{}'", map.get(3)); log.info("getDefault:'{}'", map.getOrDefault(43, "not found")); log.info("merge:'{}'", map.merge(9, "val9", (value, newValue) -> value.concat(newValue))); log.info("merge:'{}'", map.merge(9, "count", (value, newValue) -> value.concat(newValue))); }
/** * @param clazz the class to be handled (read from and wrote to) * @param file the file to be processed */ public void handleConfig(Class<?> clazz, File file) { if (!hasRun) { hasRun = true; for (Field field : clazz.getDeclaredFields()) { for (Annotation annotation : field.getAnnotations()) { if (annotation.annotationType() == Config.String.class) handleString(clazz, field, (Config.String) annotation); else if (annotation.annotationType() == Config.Integer.class) handleInteger(clazz, field, (Config.Integer) annotation); else if (annotation.annotationType() == Config.Boolean.class) handleBoolean(clazz, field, (Config.Boolean) annotation); else if (annotation.annotationType() == Config.List.class) handleList(clazz, field, (Config.List) annotation); else if (annotation.annotationType() == Config.Map.class) handleMap(clazz, field, (Config.Map) annotation); else if (annotation.annotationType() == Config.Long.class) handleLong(clazz, field, (Config.Long) annotation); else if (annotation.annotationType() == Config.Float.class) handleFloat(clazz, field, (Config.Float) annotation); else if (annotation.annotationType() == Config.Double.class) handleDouble(clazz, field, (Config.Double) annotation); else if (annotation.annotationType() == Config.Character.class) handleCharacter(clazz, field, (Config.Character) annotation); } } } try (BufferedReader reader = new BufferedReader(new InputStreamReader(new FileInputStream(file), "UTF-8"))) { String line; while ((line = reader.readLine()) != null) { if (line.contains("{")) { char[] chars = line.toCharArray(); boolean hasNotEncounteredText = true; StringBuilder stringBuilder = new StringBuilder(); for (Character character : chars) { if ((!character.equals(' ') && !character.equals('\t')) || hasNotEncounteredText) { hasNotEncounteredText = false; stringBuilder.append(character); } else if (character.equals(' ')) break; } categories .getOrDefault(stringBuilder.toString(), categories.get("General")) .read(clazz, reader); } } } catch (IOException ignored) { } StringBuilder stringBuilder = new StringBuilder(); categories.values().forEach(category -> category.write(stringBuilder)); String fileString = stringBuilder.toString(); try (BufferedWriter writer = new BufferedWriter(new OutputStreamWriter(new FileOutputStream(file), "UTF-8"))) { writer.append(fileString); } catch (IOException ignored) { } }
@Override protected Model<double[][], int[], double[], Integer> doCreate( String id, Map<String, Object> properties) { double[] priori = (double[]) properties.get("priori"); Double alpha = (Double) properties.getOrDefault("alpha", DEFAULT_ALPHA); Double tol = (Double) properties.get("tol"); return new RDAModel(id, def, priori, alpha, tol); }
public List<SessionPropertyValue> getAllSessionProperties( Session session, Map<String, ConnectorId> catalogs) { requireNonNull(session, "session is null"); ImmutableList.Builder<SessionPropertyValue> sessionPropertyValues = ImmutableList.builder(); Map<String, String> systemProperties = session.getSystemProperties(); for (PropertyMetadata<?> property : systemSessionProperties.values()) { String defaultValue = firstNonNull(property.getDefaultValue(), "").toString(); String value = systemProperties.getOrDefault(property.getName(), defaultValue); sessionPropertyValues.add( new SessionPropertyValue( value, defaultValue, property.getName(), Optional.empty(), property.getName(), property.getDescription(), property.getSqlType().getDisplayName(), property.isHidden())); } for (Entry<String, ConnectorId> entry : new TreeMap<>(catalogs).entrySet()) { String catalog = entry.getKey(); ConnectorId connectorId = entry.getValue(); Map<String, String> connectorProperties = session.getConnectorProperties(connectorId); for (PropertyMetadata<?> property : new TreeMap<>(connectorSessionProperties.get(connectorId)).values()) { String defaultValue = firstNonNull(property.getDefaultValue(), "").toString(); String value = connectorProperties.getOrDefault(property.getName(), defaultValue); sessionPropertyValues.add( new SessionPropertyValue( value, defaultValue, catalog + "." + property.getName(), Optional.of(catalog), property.getName(), property.getDescription(), property.getSqlType().getDisplayName(), property.isHidden())); } } return sessionPropertyValues.build(); }
public String getDockerMachineName(Map<String, String> environment) { String envVar = DOCKER_MACHINE_NAME; String dockerMachineName = environment.getOrDefault(envVar, ""); if (dockerMachineName == null || dockerMachineName.isEmpty()) { LOG.debug( "The environmental variable DOCKER_MACHINE_NAME was not found. Using docker0 address."); } return dockerMachineName; }
public static void main(final String[] args) throws IOException { final Map<String, String> opts = argsToOpts(Arrays.asList(args)); applySystemProperties(opts); final String name = opts.getOrDefault("name", "cocktail-bar"); final ActorSystem system = ActorSystem.create(String.format("%s-system", name)); final CocktailBarApp cocktailBarApp = new CocktailBarApp(system); cocktailBarApp.run(); }
private void sendPropertyUpdate( Map<String, Object> configurationParameters, HashMap<String, Object> deviceProperties) { try { Device device = getMaxCubeBridgeHandler().getDevice(maxDeviceSerial); rfAddress = device.getRFAddress(); int roomId = device.getRoomId(); BigDecimal tempComfort = (BigDecimal) configurationParameters.getOrDefault( PROPERTY_THERMO_COMFORT_TEMP, deviceProperties.get(PROPERTY_THERMO_COMFORT_TEMP)); BigDecimal tempEco = (BigDecimal) configurationParameters.getOrDefault( PROPERTY_THERMO_ECO_TEMP, deviceProperties.get(PROPERTY_THERMO_ECO_TEMP)); BigDecimal tempSetpointMax = (BigDecimal) configurationParameters.getOrDefault( PROPERTY_THERMO_MAX_TEMP_SETPOINT, deviceProperties.get(PROPERTY_THERMO_MAX_TEMP_SETPOINT)); BigDecimal tempSetpointMin = (BigDecimal) configurationParameters.getOrDefault( PROPERTY_THERMO_MIN_TEMP_SETPOINT, deviceProperties.get(PROPERTY_THERMO_MIN_TEMP_SETPOINT)); BigDecimal tempOffset = (BigDecimal) configurationParameters.getOrDefault( PROPERTY_THERMO_OFFSET_TEMP, deviceProperties.get(PROPERTY_THERMO_OFFSET_TEMP)); BigDecimal tempOpenWindow = (BigDecimal) configurationParameters.getOrDefault( PROPERTY_THERMO_WINDOW_OPEN_TEMP, deviceProperties.get(PROPERTY_THERMO_WINDOW_OPEN_TEMP)); BigDecimal durationOpenWindow = (BigDecimal) configurationParameters.getOrDefault( PROPERTY_THERMO_WINDOW_OPEN_DURATION, deviceProperties.get(PROPERTY_THERMO_WINDOW_OPEN_DURATION)); S_ConfigCommand cmd = new S_ConfigCommand( rfAddress, roomId, tempComfort.doubleValue(), tempEco.doubleValue(), tempSetpointMax.doubleValue(), tempSetpointMin.doubleValue(), tempOffset.doubleValue(), tempOpenWindow.doubleValue(), durationOpenWindow.intValue()); bridgeHandler.queueCommand( new SendCommand(maxDeviceSerial, cmd, "Update Thermostat Properties")); sendCCommand(); } catch (NullPointerException e) { logger.warn( "MAX! Cube LAN gateway bridge handler not found. Cannot handle update without bridge."); } catch (Exception e) { logger.debug("Exception occurred during execution: {}", e.getMessage(), e); } }
private static void filteredUpdate(Set<String> vocabulary, String word) { if (word.trim().length() > MAX_STRING_LENGTH || ((LuceneNLPUtil.isAllASCII(word) || word.trim().length() <= 1) && word.matches(START_WITH_NONALPHA_CHAR))) { return; } vocabulary.add(word); vocabDist.put(word, vocabDist.getOrDefault(word, 0) + 1); }
private static Optional<OptionProperty> of(Property property) { SimpleTypeInfo returnTypeInfo = property.returnTypeInfo(); if (returnTypeInfo.isEnum()) { return EnumOptionProperty.mapper(property); } else { String qname = returnTypeInfo.qualifiedName(); Mapper mapper = MAPPERS.getOrDefault(qname, (p) -> Optional.<OptionProperty>empty()); return mapper.of(property); } }
@Override public boolean transferFunds( int amount, String sourceBackAccount, String destinationBankAccount) { if (accounts.getOrDefault(sourceBackAccount, 0) >= amount) { accounts.put(sourceBackAccount, accounts.get(sourceBackAccount) - amount); accounts.put(destinationBankAccount, accounts.get(destinationBankAccount) + amount); return true; } else { return false; } }
/** * Returns an {@code Anomaly} configured to execute the type of calculation specified by the * {@link Mode}, and whether or not to apply a moving average. * * <p>Must have one of "MODE" = {@link Mode#LIKELIHOOD}, {@link Mode#PURE}, {@link Mode#WEIGHTED} * * @param p Map * @return */ public static Anomaly create(Map<String, Object> params) { boolean useMovingAvg = (boolean) params.getOrDefault(KEY_USE_MOVING_AVG, false); int windowSize = (int) params.getOrDefault(KEY_WINDOW_SIZE, -1); if (useMovingAvg && windowSize < 1) { throw new IllegalArgumentException("windowSize must be > 0, when using moving average."); } Mode mode = (Mode) params.get(KEY_MODE); if (mode == null) { throw new IllegalArgumentException("MODE cannot be null."); } switch (mode) { case PURE: return new Anomaly(useMovingAvg, windowSize) { private static final long serialVersionUID = 1L; @Override public double compute( int[] activeColumns, int[] predictedColumns, double inputValue, long timestamp) { double retVal = computeRawAnomalyScore(activeColumns, predictedColumns); if (this.useMovingAverage) { retVal = movingAverage.next(retVal); } return retVal; } }; case LIKELIHOOD: case WEIGHTED: { boolean isWeighted = (boolean) params.getOrDefault(KEY_IS_WEIGHTED, false); int claLearningPeriod = (int) params.getOrDefault(KEY_LEARNING_PERIOD, VALUE_NONE); int estimationSamples = (int) params.getOrDefault(KEY_ESTIMATION_SAMPLES, VALUE_NONE); return new AnomalyLikelihood( useMovingAvg, windowSize, isWeighted, claLearningPeriod, estimationSamples); } default: return null; } }
private synchronized void updatePools(Map<MemoryPoolId, Integer> queryCounts) { // Update view of cluster memory and pools List<MemoryInfo> nodeMemoryInfos = nodes .values() .stream() .map(RemoteNodeMemory::getInfo) .filter(Optional::isPresent) .map(Optional::get) .collect(toImmutableList()); long totalClusterMemory = nodeMemoryInfos .stream() .map(MemoryInfo::getTotalNodeMemory) .mapToLong(DataSize::toBytes) .sum(); clusterMemoryBytes.set(totalClusterMemory); Set<MemoryPoolId> activePoolIds = nodeMemoryInfos .stream() .flatMap(info -> info.getPools().keySet().stream()) .collect(toImmutableSet()); // Make a copy to materialize the set difference Set<MemoryPoolId> removedPools = ImmutableSet.copyOf(difference(pools.keySet(), activePoolIds)); for (MemoryPoolId removed : removedPools) { unexport(pools.get(removed)); pools.remove(removed); } for (MemoryPoolId id : activePoolIds) { ClusterMemoryPool pool = pools.computeIfAbsent( id, poolId -> { ClusterMemoryPool newPool = new ClusterMemoryPool(poolId); String objectName = ObjectNames.builder(ClusterMemoryPool.class, newPool.getId().toString()) .build(); try { exporter.export(objectName, newPool); } catch (JmxException e) { log.error(e, "Error exporting memory pool %s", poolId); } return newPool; }); pool.update(nodeMemoryInfos, queryCounts.getOrDefault(pool.getId(), 0)); } }
@Override protected void handle(DatagramPacket datagramPacket) throws SignatureException { KeyManager keyManager = KeyManager.getInstance(); RSAPublicKey clientPubKey = (RSAPublicKey) keyManager.getPublicKey(Key.CLIENT); byte[] reqBytes = Arrays.copyOf(datagramPacket.getData(), datagramPacket.getLength()); Request req = new Request(new String(reqBytes), clientPubKey); String chainHash = null; LOCK.lock(); try { String itemId = req.getItemId().toString(); System.out.format( "received: itemId=%2d, userId=%s, price=%s\n", req.getItemId(), req.getUserId(), req.getPrice()); chainHash = HASHING_CHAIN_TABLE.getLastChainHash(itemId); HASHING_CHAIN_TABLE.chain(itemId, Utils.digest(req.toString())); } finally { LOCK.unlock(); } int currentPrice = PRICE_TABLE.getOrDefault(req.getItemId(), 0); int bidderPrice = Integer.decode(req.getPrice()); boolean bidSuccess = bidderPrice > currentPrice; if (bidSuccess) { PRICE_TABLE.put(req.getItemId(), bidderPrice); } System.out.format( "decrypted price: %d, now highest price is %d\n", bidderPrice, Math.max(bidderPrice, currentPrice)); Acknowledgement ack = new Acknowledgement(chainHash, bidSuccess, req); ack.sign(keyPair, keyInfo); try (Socket s = new Socket(datagramPacket.getAddress(), req.getPort()); DataOutputStream out = new DataOutputStream(s.getOutputStream()); DataInputStream in = new DataInputStream(s.getInputStream())) { Utils.send(out, ack.toString()); } catch (IOException ex) { LOGGER.log(Level.SEVERE, null, ex); } }
private EntryType detectEntryType(@NotNull Element element) { Elements wordTypeNodes = element.getElementsByClass("wordType"); if (wordTypeNodes.size() < 1) { LOGGER.debug("No wordType node found - defaulting to {}", EntryType.UNKNOWN); return EntryType.UNKNOWN; } EntryType entryType = ENTRY_TYPE_MAP.getOrDefault(wordTypeNodes.first().text(), EntryType.UNKNOWN); if (entryType == EntryType.UNKNOWN) LOGGER.debug("Unable to resolve entry type \"{}\"", entryType); return entryType; }
private int matchImpl(String text, String pattern, int startIndex) { int n = text.length(); int m = pattern.length(); Map<Character, Integer> charTable = createCharTable(pattern); int offsetTable[] = createOffsetTable(pattern); for (int i = m - 1 + startIndex, j; i < n; ) { for (j = m - 1; pattern.charAt(j) == text.charAt(i); --i, --j) { if (j == 0) { return i; } } i += Math.max(offsetTable[m - j - 1], charTable.getOrDefault(text.charAt(i), m)); } return NOT_FOUND_INDEX; }
public List<SessionPropertyValue> getAllSessionProperties(Session session) { requireNonNull(session, "session is null"); List<SessionPropertyValue> properties = new ArrayList<>(); for (SessionProperty<?> sessionProperty : allSessionProperties.values()) { PropertyMetadata<?> propertyMetadata = sessionProperty.getMetadata(); String defaultValue = firstNonNull(propertyMetadata.getDefaultValue(), "").toString(); Map<String, String> values; if (sessionProperty.getCatalogName().isPresent()) { values = session.getCatalogProperties(sessionProperty.getCatalogName().get()); } else { values = session.getSystemProperties(); } String value = values.getOrDefault(sessionProperty.getPropertyName(), defaultValue); properties.add( new SessionPropertyValue( value, defaultValue, sessionProperty.getFullyQualifiedName(), sessionProperty.getCatalogName(), sessionProperty.getPropertyName(), sessionProperty.getMetadata().getDescription(), sessionProperty.getMetadata().getSqlType().getTypeSignature().toString())); } // sort properties by catalog then property Collections.sort( properties, (left, right) -> ComparisonChain.start() .compare( left.getCatalogName().orElse(null), right.getCatalogName().orElse(null), Ordering.natural().nullsFirst()) .compare(left.getPropertyName(), right.getPropertyName()) .result()); return ImmutableList.copyOf(properties); }
public static void main(final String[] args) throws Exception { try (final Scanner in = new Scanner(new File("/tmp/bu.txt"))) { while (true) { final String testCase = in.next(); if ("0".equals(testCase)) { break; } final int index = testCase.indexOf(':'); final int[] array = new int[Integer.parseInt(testCase.substring(0, index))]; for (int i = 0; i < array.length; i++) { array[i] = in.nextInt(); } final Map<Integer, Integer> values = $(array); boolean found = false; for (int i = 0; (!found) && (i < array.length); i++) { final int ai = array[i]; for (int j = i + 1; (!found) && (j < array.length); j++) { final int aj = array[j]; if (aj > ai) { final int first = values.getOrDefault(2 * ai - aj, -1); found |= ((first >= 0) && (first < i)); final int middle = values.getOrDefault((ai + aj) / 2, -1); found |= (((ai + aj) % 2 == 0) && (middle > i) && (middle < j)); final int last = values.getOrDefault(2 * aj - ai, -1); found |= (last > j); } else { final int first = values.getOrDefault(2 * aj - ai, -1); found |= (first > j); final int middle = values.getOrDefault((ai + aj) / 2, -1); found |= (((ai + aj) % 2 == 0) && (middle > j) && (middle < i)); final int last = values.getOrDefault(2 * ai - aj, -1); found |= ((last >= 0) && (last < i)); } } } System.out.println(found ? "no" : "yes"); } } }
@Override public void configure(final Map<String, Object> config, final Map<String, Graph> graphs) { final GryoMapper.Builder builder; final Object graphToUseForMapper = config.get(TOKEN_USE_MAPPER_FROM_GRAPH); if (graphToUseForMapper != null) { if (null == graphs) throw new IllegalStateException( String.format( "No graphs have been provided to the serializer and therefore %s is not a valid configuration", TOKEN_USE_MAPPER_FROM_GRAPH)); final Graph g = graphs.get(graphToUseForMapper.toString()); if (null == g) throw new IllegalStateException( String.format( "There is no graph named [%s] configured to be used in the %s setting", graphToUseForMapper, TOKEN_USE_MAPPER_FROM_GRAPH)); // a graph was found so use the mapper it constructs. this allows gryo to be auto-configured // with any // custom classes that the implementation allows for builder = g.io(GryoIo.build()).mapper(); } else { // no graph was supplied so just use the default - this will likely be the case when using a // graph // with no custom classes or a situation where the user needs complete control like when using // two // distinct implementations each with their own custom classes. builder = GryoMapper.build(); } addIoRegistries(config, builder); addCustomClasses(config, builder); this.serializeToString = Boolean.parseBoolean( config.getOrDefault(TOKEN_SERIALIZE_RESULT_TO_STRING, "false").toString()); this.gryoMapper = builder.create(); }