@Override public List<EndpointAffinity> getOperatorAffinity() { watch.reset(); watch.start(); Map<String, DrillbitEndpoint> endpointMap = new HashMap<String, DrillbitEndpoint>(); for (DrillbitEndpoint ep : storagePlugin.getContext().getBits()) { endpointMap.put(ep.getAddress(), ep); } Map<DrillbitEndpoint, EndpointAffinity> affinityMap = new HashMap<DrillbitEndpoint, EndpointAffinity>(); for (ServerName sn : regionsToScan.values()) { DrillbitEndpoint ep = endpointMap.get(sn.getHostname()); if (ep != null) { EndpointAffinity affinity = affinityMap.get(ep); if (affinity == null) { affinityMap.put(ep, new EndpointAffinity(ep, 1)); } else { affinity.addAffinity(1); } } } logger.debug("Took {} µs to get operator affinity", watch.elapsed(TimeUnit.NANOSECONDS) / 1000); return Lists.newArrayList(affinityMap.values()); }
/** * returns a collection of all existing partitions * * @return */ public List<Partition> getPartitions() { List<Partition> plist = new ArrayList<Partition>(); for (Interval interval : intervals.values()) { plist.addAll(interval.partitions.values()); } return plist; }
@Override public DequeueResult<byte[]> dequeue(int maxBatchSize) throws IOException { Preconditions.checkArgument(maxBatchSize > 0, "Batch size must be > 0."); // pre-compute the "claimed" state content in case of FIFO. byte[] claimedStateValue = null; if (consumerConfig.getDequeueStrategy() == DequeueStrategy.FIFO && consumerConfig.getGroupSize() > 1) { claimedStateValue = encodeStateColumn(ConsumerEntryState.CLAIMED); } while (consumingEntries.size() < maxBatchSize && getEntries(consumingEntries, maxBatchSize)) { // ANDREAS: this while loop should stop once getEntries/populateCache reaches the end of the // queue. Currently, it // will retry as long as it gets at least one entry in every round, even if that is an entry // that must be ignored // because it cannot be claimed. // ANDREAS: It could be a problem that we always read to the end of the queue. This way one // flowlet instance may // always all entries, while others are idle. // For FIFO, need to try claiming the entry if group size > 1 if (consumerConfig.getDequeueStrategy() == DequeueStrategy.FIFO && consumerConfig.getGroupSize() > 1) { Iterator<Map.Entry<byte[], SimpleQueueEntry>> iterator = consumingEntries.entrySet().iterator(); while (iterator.hasNext()) { SimpleQueueEntry entry = iterator.next().getValue(); if (entry.getState() == null || QueueEntryRow.getStateInstanceId(entry.getState()) >= consumerConfig.getGroupSize()) { // If not able to claim it, remove it, and move to next one. if (!claimEntry(entry.getRowKey(), claimedStateValue)) { iterator.remove(); } } } } } // If nothing get dequeued, return the empty result. if (consumingEntries.isEmpty()) { return EMPTY_RESULT; } return new SimpleDequeueResult(consumingEntries.values()); }
public Word getNextWord(String previousWord, boolean limit) throws Exception { if (weightsMap.isEmpty()) { return new Word(null, 1.0); } if (null == previousWord || previousWord.equals(weightsMap.lastEntry().getValue().getValue())) { if (!limit) { return weightsMap.firstEntry().getValue(); } throw new LimitReachedException( previousWord, "Last word in dictionary \"" + name + "\" already reached."); } Iterator<Word> iterator = weightsMap.values().iterator(); while (iterator.hasNext()) { if (previousWord.equals(iterator.next().getValue())) { return iterator.next(); } } throw new Exception( "No previous word \"" + previousWord + "\" was found in dictionary \"" + name + "\"."); }
/** * Recalculates real bag size. * * @return real size */ private int updateSize() { int size = 0; if (collectionPointer != null) { final OSBTreeBonsai<OIdentifiable, Integer> tree = loadTree(); try { size = tree.getRealBagSize(changes); } finally { releaseTree(); } } else { for (Change change : changes.values()) { size += change.applyTo(0); } } for (OModifiableInteger diff : newEntries.values()) { size += diff.getValue(); } this.size = size; return size; }
private static ExecutionScript loadScript( String flowId, ExecutionPhase phase, String nodeId, Map<String, String> contents) { assert flowId != null; assert phase != null; assert nodeId != null; assert contents != null; String prefix = getPrefix(flowId, phase, nodeId); String scriptId = extract(contents, prefix, KEY_ID); String kindSymbol = extract(contents, prefix, KEY_KIND); ExecutionScript.Kind kind = ExecutionScript.Kind.findFromSymbol(kindSymbol); String blockersString = extract(contents, prefix, KEY_BLOCKERS); Set<String> blockers = parseTokens(blockersString); Map<String, String> environmentVariables = PropertiesUtil.createPrefixMap(contents, KEY_ENV_PREFIX); ExecutionScript script; if (kind == ExecutionScript.Kind.COMMAND) { String profileName = extract(contents, prefix, KEY_PROFILE); String moduleName = extract(contents, prefix, KEY_MODULE); NavigableMap<String, String> commandMap = PropertiesUtil.createPrefixMap(contents, KEY_COMMAND_PREFIX); if (commandMap.isEmpty()) { throw new IllegalArgumentException( MessageFormat.format("\"{0}*\" is not defined", prefix + KEY_COMMAND_PREFIX)); } List<String> command = new ArrayList<String>(commandMap.values()); script = new CommandScript( scriptId, blockers, profileName, moduleName, command, environmentVariables); } else if (kind == ExecutionScript.Kind.HADOOP) { String className = extract(contents, prefix, KEY_CLASS_NAME); Map<String, String> properties = PropertiesUtil.createPrefixMap(contents, KEY_PROP_PREFIX); script = new HadoopScript(scriptId, blockers, className, properties, environmentVariables); } else { throw new IllegalArgumentException( MessageFormat.format("Unsupported kind in \"{0}\": {1}", prefix + KEY_KIND, kindSymbol)); } LOG.trace("Loaded script {}* -> {}", script); return script; }
@Override public Collection<Object> values() { return configMap.values(); }
public List<Point2D.Double> debugColumnVersion( String timestamp, double latitude, double longitude, double radius) { this.getStatLog(this.STAT_FILE_NAME); long sTime = System.currentTimeMillis(); // build up a raster XRaster raster = new XRaster(this.space, this.min_size_of_height, this.max_num_of_column); Point2D.Double point = new Point2D.Double(latitude, longitude); ResultScanner rScanner = null; // return result HashMap<String, String> results = new HashMap<String, String>(); ArrayList<Point2D.Double> returnPoints = new ArrayList<Point2D.Double>(); try { // match rect to find the subspace it belongs to XBox[] match_boxes = raster.match(latitude, longitude, radius); String[] rowRange = new String[2]; rowRange[0] = match_boxes[0].getRow(); rowRange[1] = match_boxes[1].getRow() + "0"; String[] c = raster.getColumns(match_boxes[0], match_boxes[1]); // the version here is harded coded, because i cannot get how many // objects in one cell now FilterList fList = new FilterList(); fList.addFilter(this.hbaseUtil.getInclusiveFilter(rowRange[1])); rScanner = this.hbaseUtil.getResultSet(rowRange, fList, this.familyName, c, 1000000); BixiReader reader = new BixiReader(); int count = 0; int accepted = 0; int max_column = 0; int min_column = 10000; int max_version = 0; int min_version = 10000; int row_count = 0; int byte_lenght = 0; for (Result r : rScanner) { byte_lenght = r.getBytes().getLength(); row_count++; NavigableMap<byte[], NavigableMap<byte[], NavigableMap<Long, byte[]>>> resultMap = r.getMap(); int count_column = 0; for (byte[] family : resultMap.keySet()) { NavigableMap<byte[], NavigableMap<Long, byte[]>> columns = resultMap.get(family); count_column = 0; for (byte[] col : columns.keySet()) { NavigableMap<Long, byte[]> values = columns.get(col); count_column++; if (values.values().size() > max_version) { max_version = values.values().size(); } if (values.values().size() < min_version) { min_version = values.values().size(); } for (Long version : values.keySet()) { count++; // get the distance between this point and the given // point XStation station = reader.getStationFromJson(Bytes.toString(values.get(version))); Point2D.Double resPoint = new Point2D.Double(station.getLatitude(), station.getlongitude()); double distance = resPoint.distance(point); if (Bytes.toString(col).equals("0011")) { /* System.out.println("!!!! key=>"+Bytes.toString(r.getRow())+ ";column=>"+Bytes.toString(col)+ ";version=>"+version+ ";point=>"+resPoint.toString());*/ } if (distance <= radius) { returnPoints.add(resPoint); // System.out.println("row=>"+Bytes.toString(r.getRow()) // + // ";colum=>"+Bytes.toString(col)+";version=>"+version+ // ";station=>"+station.getId()+";distance=>"+distance); accepted++; results.put(station.getId(), String.valueOf(distance)); } } } if (count_column > max_column) max_column = count_column; if (count_column < min_column) min_column = count_column; } } System.out.println("byte_length=>" + byte_lenght + ";row_count=>" + row_count); System.out.println( "max_column=>" + max_column + ";min_column=>" + min_column + ";max_version=>" + max_version + ";min_version=>" + min_version); long eTime = System.currentTimeMillis(); System.out.println( "count=>" + count + ";accepted=>" + accepted + ";time=>" + (eTime - sTime)); String outStr = "radius=>" + radius + ";count=>" + count + ";accepted=>" + accepted + ";time=>" + (eTime - sTime) + ";row_stride=>" + this.min_size_of_height + ";columns=>" + this.max_num_of_column; this.writeStat(outStr); } catch (Exception e) { e.printStackTrace(); } finally { this.hbaseUtil.closeTableHandler(); this.closeStatLog(); } return returnPoints; }