public List<TravelQuote> getRankedTravelQuotes( TravelInfo travelInfo, Set<TravelCompany> companies, Comparator<TravelQuote> ranking, long time, TimeUnit unit) throws InterruptedException { List<QuoteTask> tasks = new ArrayList<QuoteTask>(); for (TravelCompany company : companies) tasks.add(new QuoteTask(company, travelInfo)); List<Future<TravelQuote>> futures = exec.invokeAll(tasks, time, unit); List<TravelQuote> quotes = new ArrayList<TravelQuote>(tasks.size()); Iterator<QuoteTask> taskIter = tasks.iterator(); for (Future<TravelQuote> f : futures) { QuoteTask task = taskIter.next(); try { quotes.add(f.get()); } catch (ExecutionException e) { quotes.add(task.getFailureQuote(e.getCause())); } catch (CancellationException e) { quotes.add(task.getTimeoutQuote(e)); } } Collections.sort(quotes, ranking); return quotes; }
public static void main(String[] args) { Date start = new Date(); if (args.length < 3) { System.out.println("Wrong number of arguments:\n" + USAGE); return; } // get # threads int tc = Integer.parseInt(args[0]); String outfile = args[1]; // make a threadsafe queue of all files to process ConcurrentLinkedQueue<String> files = new ConcurrentLinkedQueue<String>(); for (int i = 2; i < args.length; i++) { files.add(args[i]); } // hastable for results Hashtable<String, Integer> results = new Hashtable<String, Integer>(HASH_SIZE, LF); // spin up the threads Thread[] workers = new Thread[tc]; for (int i = 0; i < tc; i++) { workers[i] = new Worker(files, results); workers[i].start(); } // wait for them to finish try { for (int i = 0; i < tc; i++) { workers[i].join(); } } catch (Exception e) { System.out.println("Caught Exception: " + e.getMessage()); } // terminal output Date end = new Date(); System.out.println(end.getTime() - start.getTime() + " total milliseconds"); System.out.println(results.size() + " unique words"); // sort results for easy comparison/verification List<Map.Entry<String, Integer>> sorted_results = new ArrayList<Map.Entry<String, Integer>>(results.entrySet()); Collections.sort(sorted_results, new KeyComp()); // file output try { PrintStream out = new PrintStream(outfile); for (int i = 0; i < sorted_results.size(); i++) { out.println(sorted_results.get(i).getKey() + "\t" + sorted_results.get(i).getValue()); } } catch (Exception e) { System.out.println("Caught Exception: " + e.getMessage()); } }
/** * Returns all supported capture sizes. * * @return an array of capture sizes, in bytes, never <code>null</code>. */ public Integer[] getCaptureSizes() { final String rawValue = this.properties.get(DEVICE_CAPTURESIZES); final String[] values = rawValue.split(",\\s*"); final List<Integer> result = new ArrayList<Integer>(); for (String value : values) { result.add(Integer.valueOf(value.trim())); } Collections.sort( result, NumberUtils.<Integer>createNumberComparator(false /* aSortAscending */)); return result.toArray(new Integer[result.size()]); }
public List<LoopEx> outterFirst() { ArrayList<LoopEx> loops = new ArrayList<>(loops()); Collections.sort( loops, new Comparator<LoopEx>() { @Override public int compare(LoopEx o1, LoopEx o2) { return o1.lirLoop().depth - o2.lirLoop().depth; } }); return loops; }
public Void call() { final List<RolapStar> starList = CacheControlImpl.getStarList(region); Collections.sort( starList, new Comparator<RolapStar>() { public int compare(RolapStar o1, RolapStar o2) { return o1.getFactTable().getAlias().compareTo(o2.getFactTable().getAlias()); } }); for (RolapStar star : starList) { indexRegistry.getIndex(star).printCacheState(pw); } return null; }
/** * @param fld Folder with files to match. * @param ptrn Pattern to match against file name. * @return Collection of matched files. */ public static List<VisorLogFile> matchedFiles(File fld, final String ptrn) { List<VisorLogFile> files = fileTree( fld, MAX_FOLDER_DEPTH, new FileFilter() { @Override public boolean accept(File f) { return !f.isHidden() && (f.isDirectory() || f.isFile() && f.getName().matches(ptrn)); } }); Collections.sort(files, LAST_MODIFIED); return files; }
/** Load all the deployment units out of the store. Called on start-up. */ public void loadAll() { final ArrayList<ProcessConfImpl> loaded = new ArrayList<ProcessConfImpl>(); exec( new Callable<Object>() { public Object call(ConfStoreConnection conn) { Collection<DeploymentUnitDAO> dus = conn.getDeploymentUnits(); for (DeploymentUnitDAO du : dus) try { loaded.addAll(load(du)); } catch (Exception ex) { __log.error("Error loading DU from store: " + du.getName(), ex); } return null; } }); // Dispatch DISABLED, RETIRED and ACTIVE events in that order Collections.sort( loaded, new Comparator<ProcessConf>() { public int compare(ProcessConf o1, ProcessConf o2) { return stateValue(o1.getState()) - stateValue(o2.getState()); } int stateValue(ProcessState state) { if (ProcessState.DISABLED.equals(state)) return 0; if (ProcessState.RETIRED.equals(state)) return 1; if (ProcessState.ACTIVE.equals(state)) return 2; throw new IllegalStateException("Unexpected process state: " + state); } }); for (ProcessConfImpl p : loaded) { try { fireStateChange(p.getProcessId(), p.getState(), p.getDeploymentUnit().getName()); } catch (Exception except) { __log.error( "Error while activating process: pid=" + p.getProcessId() + " package=" + p.getDeploymentUnit().getName(), except); } } }
private void getServersFile() throws Exception { InputStream is = this.getClass().getResourceAsStream("/servers"); if (is == null) throw new IOException("Cannot find servers file"); BufferedReader br = new BufferedReader(new InputStreamReader(is)); configuredServers.clear(); String line; while ((line = br.readLine()) != null) { configuredServers.add(line); } Collections.sort(configuredServers); if (configuredServers.size() < 1) throw new IOException("No entries found in servers file"); int lnum = 1; for (int i = 0; i < configuredServers.size(); i++) { LOG.debug("servers file line " + lnum + " [" + configuredServers.get(i) + "]"); lnum++; } }
public int reduce(Optimizer optimizer, int expMinCluster, int expMaxCluster) throws ExecutionException, InterruptedException { // 1.start with min, max provided, what is oom int mag = computeMagnitude(expMaxCluster - expMinCluster + 1); // 2. setting search range. if oom is 0, set initial range to 1; int hop = mag == 0 ? 1 : (int) Math.pow(10.0, mag); // 3. find the meaning range, i.e., real min cluster and real max cluster numbers where a real // numbered optimisation score // can be computed int[] range = computeClusterNumberRange(expMinCluster, expMaxCluster, hop, optimizer); if (range[0] == -1 && range[1] != -1) { System.err.println( "[!]No meaningful lower range. Only 1 possible cluster number:" + range[1]); return range[1]; } else if (range[1] == -1 && range[0] != -1) { System.err.println( "[!]No meaningful upper range. Only 1 possible cluster number:" + range[0]); return range[0]; } else if (range[0] == -1 && range[1] == -1) { System.err.println("[!]No meaningful cluster number, cannot cluster"); return -1; } System.out.println( "[]Input range:" + expMinCluster + "-" + expMaxCluster + ", Real range:" + range[0] + "-" + range[1]); expMinCluster = range[0] < range[1] ? range[0] : range[1]; expMaxCluster = range[1] > range[0] ? range[1] : range[0]; // 4. reset hop based on new range mag = computeMagnitude(expMaxCluster - expMinCluster + 1); hop = mag == 0 ? 1 : (int) Math.pow(10.0, mag); double currentMaxOptimizationScore = 0; int current_iteration = 0; // todo: for location, why min > max; select range based on best interval, is it correct? while (current_iteration < maxIteration) { current_iteration++; currentMaxOptimizationScore = 0; // 5. compute optimization scores based on the search space defined by expMinCluster, // expMaxCluster, and range Map<Integer, Double> triedSplitsAndScores = computeOptimizationScores(expMinCluster, expMaxCluster, hop, optimizer); if (triedSplitsAndScores != null) { // already using minimum hop, but no meaningful optimisation score can be // computed // within the range (TODO can this really happen?) // what is the real hop, max score? List<Integer> intervals = new ArrayList<>(triedSplitsAndScores.keySet()); Collections.sort(intervals); int realHop = -1, lowerInterval = -1; for (int i : intervals) { if (lowerInterval != -1 && realHop == -1) realHop = Math.abs(i - lowerInterval); lowerInterval = i; Double score = triedSplitsAndScores.get(i); if (!Double.isInfinite(score) && !Double.isNaN(score) && score != Double.MAX_VALUE && score > currentMaxOptimizationScore) currentMaxOptimizationScore = score; } double global_max = 0.0; for (Double d : scores.values()) { if (!Double.isInfinite(d) && !Double.isNaN(d) && d > global_max) global_max = d; } if (stop(realHop, currentMaxOptimizationScore, global_max)) break; if (currentMaxOptimizationScore > global_max) // found a new max score, reset iterations to try current_iteration = 0; int newHop = reduceHop(realHop); hop = newHop; } else break; } int bestSplit = -1; double maxScore = 0; for (Map.Entry<Integer, Double> entry : scores.entrySet()) { Double score = entry.getValue(); if (!score.isNaN() && !score.isInfinite() && score > maxScore) { maxScore = score; bestSplit = entry.getKey(); } } System.out.println("[]Final Best=" + bestSplit + ", footprint:" + scores); return bestSplit; }
private List<String> getChildren(String znode, Watcher watcher) throws Exception { List<String> children = null; children = zkc.getChildren(znode, watcher); if (!children.isEmpty()) Collections.sort(children); return children; }
/** * @param updateSeq Update sequence. * @return Checks if any of the local partitions need to be evicted. */ private boolean checkEvictions(long updateSeq) { assert lock.isWriteLockedByCurrentThread(); boolean changed = false; UUID locId = cctx.nodeId(); for (GridDhtLocalPartition part : locParts.values()) { GridDhtPartitionState state = part.state(); if (state.active()) { int p = part.id(); List<ClusterNode> affNodes = cctx.affinity().nodes(p, topVer); if (!affNodes.contains(cctx.localNode())) { Collection<UUID> nodeIds = F.nodeIds(nodes(p, topVer, OWNING)); // If all affinity nodes are owners, then evict partition from local node. if (nodeIds.containsAll(F.nodeIds(affNodes))) { part.rent(false); updateLocal(part.id(), locId, part.state(), updateSeq); changed = true; if (log.isDebugEnabled()) log.debug("Evicted local partition (all affinity nodes are owners): " + part); } else { int ownerCnt = nodeIds.size(); int affCnt = affNodes.size(); if (ownerCnt > affCnt) { List<ClusterNode> sorted = new ArrayList<>(cctx.discovery().nodes(nodeIds)); // Sort by node orders in ascending order. Collections.sort(sorted, CU.nodeComparator(true)); int diff = sorted.size() - affCnt; for (int i = 0; i < diff; i++) { ClusterNode n = sorted.get(i); if (locId.equals(n.id())) { part.rent(false); updateLocal(part.id(), locId, part.state(), updateSeq); changed = true; if (log.isDebugEnabled()) log.debug( "Evicted local partition (this node is oldest non-affinity node): " + part); break; } } } } } } } return changed; }