protected void split() { int halfGuides = guides.size() / 2; int halfKids = halfGuides + 1; GuideNode<K, V> left = new GuideNode<K, V>(comparator, guides.subList(0, halfGuides), kids.subList(0, halfKids)); GuideNode<K, V> right = new GuideNode<K, V>( comparator, guides.subList(halfGuides + 1, guides.size()), kids.subList(halfKids, kids.size())); left.setLeft(getLeft()); left.setRight(right); getLeft().setRight(left); left.setParent(getParent()); for (Node<K, V> kid : left.kids) { kid.setParent(left); } right.setLeft(left); right.setRight(getRight()); getRight().setLeft(right); right.setParent(getParent()); for (Node<K, V> kid : right.kids) { kid.setParent(right); } parent.insertKid(this, guides.get(halfGuides), left, right); }
@Test public void testReprioritise() { LinkedList<SimpleTask> ToDo1 = new LinkedList<SimpleTask>(); final int NUMMODELS = d_numCores + 2; for (int i = 0; i < NUMMODELS; ++i) { ToDo1.add(new SimpleSuspendableTask(new SuspendableTestThread((i + 4) * 400))); } ThreadHandler th = ThreadHandler.getInstance(); th.scheduleTasks(ToDo1); List<SimpleTask> nCoresHeadList = ToDo1.subList(0, d_numCores); List<SimpleTask> nCoresHeadListComplement = ToDo1.subList(d_numCores, d_numCores + (NUMMODELS - d_numCores)); sleepLongEnough(); assertTrue(th.getRunningTasks().containsAll(nCoresHeadList)); assertTrue(th.getQueuedTaskList().containsAll(ToDo1)); // Note: NOP; rescheduling already-running tasks should not change anything th.scheduleTasks(nCoresHeadList); sleepLongEnough(); assertTrue(th.getRunningTasks().containsAll(nCoresHeadList)); assertTrue(th.getQueuedTaskList().containsAll(ToDo1)); // reprioritise scheduled tasks by re-adding them; should displace running tasks th.scheduleTasks(nCoresHeadListComplement); sleepLongEnough(); assertTrue(th.getRunningTasks().containsAll(nCoresHeadListComplement)); assertTrue(th.getRunningTasks().containsAll(nCoresHeadList.subList(0, d_numCores - 2))); assertTrue(th.getQueuedTaskList().containsAll(ToDo1)); }
private boolean directedCompute() { SortedSet<Integer> degrees = new TreeSet<Integer>(); for (IElement iE : g.getNodes()) { DirectedNode n = (DirectedNode) iE; int degree = n.getOutDegree(); degrees.add(degree); if (nodesSortedByDegree.containsKey(degree)) { this.nodesSortedByDegree.get(degree).add(n); } else { LinkedList<Node> temp = new LinkedList<>(); temp.add(n); this.nodesSortedByDegree.put(degree, temp); } } HashSet<Node> currentRichClub = new HashSet<Node>(); int currentRichClubSize = 0; int size = degrees.size(); for (int i = 0; i < size; i++) { int currentDegree = degrees.last(); degrees.remove(currentDegree); LinkedList<Node> current = this.nodesSortedByDegree.get(currentDegree); currentRichClubSize += current.size(); this.nodesSortedByDegree.remove(currentDegree); if (currentRichClubSize >= this.richClubSize) { int seperateAT = current.size() - (currentRichClubSize - this.richClubSize); LinkedList<Node> temp = new LinkedList<>(); temp.addAll(current.subList(0, seperateAT)); this.richClub.put(currentDegree, temp); currentRichClub.addAll(temp); LinkedList<Node> temp2 = new LinkedList<>(); temp2.addAll(current.subList(seperateAT, current.size())); if (!temp2.isEmpty()) this.nodesSortedByDegree.put(currentDegree, (LinkedList<Node>) temp2); break; } else { richClub.put(currentDegree, current); currentRichClub.addAll(current); } } for (Node n : currentRichClub) { DirectedNode ne = (DirectedNode) n; for (IElement iE : ne.getOutgoingEdges()) { DirectedEdge e = (DirectedEdge) iE; if (currentRichClub.contains(e.getDst())) { edgesBetweenRichClub++; } } } return true; }
@Test public void testReprioritiseDontTouchNonSuspendable() { LinkedList<SimpleTask> ToDo1 = new LinkedList<SimpleTask>(); LinkedList<SimpleTask> ToDo2 = new LinkedList<SimpleTask>(); for (int i = 0; i < d_numCores - 1; ++i) { ToDo1.add(new SimpleSuspendableTask(new SuspendableTestThread(600))); ToDo2.add(new SimpleSuspendableTask(new SuspendableTestThread(600))); } ToDo1.add(new SimpleSuspendableTask(new NonSuspendableTestThread(600))); ToDo2.add(new SimpleSuspendableTask(new SuspendableTestThread(400))); ThreadHandler th = ThreadHandler.getInstance(); th.scheduleTasks(ToDo1); sleepLongEnough(); assertEquals(th.getRunningTasks(), ToDo1); LinkedList<SimpleTask> expected = new LinkedList<SimpleTask>(ToDo2.subList(0, ToDo2.size() - 1)); expected.addFirst(ToDo1.getLast()); th.scheduleTasks(ToDo2); sleepLongEnough(); assertEquals(expected, th.getRunningTasks()); }
public List<Article> getArticles() { if (articles.size() > MAX_ARTICLE_COUNT) { return articles.subList(0, MAX_ARTICLE_COUNT); } else { return articles; } }
public void setMaxMessages(int max) { // if we are shrinking the list, clip the extra, older, messages if (max < this.maxMessages && messages.size() > max) { messages.subList(max, messages.size()).clear(); } this.maxMessages = max; }
public List<Node> getLine(int index) { if (index >= 0 && index < lines.size()) { int start = lines.get(index); int end = getLineEnd(index); return nodeList.subList(start, end); } return null; }
public LinkedList<TaxiData> findNearestTaxiForUser(User u) { LinkedList<TaxiData> copyOfTaxiList = makeCopyListOfTaxi(u); Collections.sort(copyOfTaxiList, new DistanceComparator(u.getPos())); if (copyOfTaxiList.size() <= numberOfNearestTaxi) { return copyOfTaxiList; } return (LinkedList<TaxiData>) copyOfTaxiList.subList(0, numberOfNearestTaxi - 1); }
public LinkedList<MapLocation> pathFind(MapLocation start, MapLocation target) throws GameActionException { // for (int i = 0; i < myRobot.allies.length; i++) { // RobotInfo r = rc.senseRobotInfo(myRobot.allies[i]); // if (myRobot.allies[i].getID() == myRobot.ID) continue; // map[r.location.x][r.location.y] = -2; // } // for (int i = 0; i < myRobot.enemies.length; i++) { // RobotInfo r = rc.senseRobotInfo(myRobot.enemies[i]); // map[r.location.x][r.location.y] = -2; // } int x = Clock.getRoundNum(); SearchNode bugSearch = bugSearch(start, target); SearchNode[] nodes = new SearchNode[bugSearch.length]; int counter = bugSearch.length - 1; while (bugSearch.prevLoc != null) { nodes[counter] = bugSearch; bugSearch = bugSearch.prevLoc; counter--; } nodes[0] = bugSearch; LinkedList<MapLocation> pivots = new LinkedList<MapLocation>(); pivots.add(nodes[0].loc); for (int i = 1; i < nodes.length; i++) { if (nodes[i].isPivot) { pivots.add(nodes[i].loc); } } counter = 0; ListIterator<MapLocation> li1 = pivots.listIterator(), li2; while (li1.hasNext()) { li2 = pivots.listIterator(pivots.size()); while (li2.hasPrevious() && li2.previousIndex() > li1.nextIndex() + 1) { if (canTravel(li1.next(), li2.previous())) { pivots.subList(li1.nextIndex(), li2.previousIndex() + 1).clear(); li1 = pivots.listIterator(++counter); break; } li1.previous(); } li1.next(); } if (false) System.out.println(Clock.getRoundNum() - x); return pivots; }
/** * Interface method implementation. For multi-sharded entities, returns data collected from each * shard i.e. by execution of the query against each shard and collating the results into a single * collection. Limits the search results by {@link Criteria#getMaxResults()} if specified. * * @see PersistenceManager#findEntities(Criteria) */ @SuppressWarnings("unchecked") public Collection<PersistentEntity> findEntities(Criteria criteria) throws PersistenceException { // Linked list to collate results from queries executed on multiple shards, if any LinkedList returnedObjects = new LinkedList(); if (MultiShardAwareCriteria.class.isAssignableFrom(criteria.getClass())) { // It is a multi-sharded criteria. Iterate through shards and invoke the persistence call on // the delegate // once for each retrieved shard hint. Return the results collated from each shard MultiShardAwareCriteria multiShardAwareCriteria = (MultiShardAwareCriteria) criteria; for (String shardHint : multiShardAwareCriteria.getShardHints()) { // set the returned shards one at a time and make persistence calls on the delegate multiShardAwareCriteria.setShardHint(shardHint); checkAndPopulateShardedEntityContextHolder(new Criteria[] {multiShardAwareCriteria}); Collection lookedUpEntities = findSuitableProvider(multiShardAwareCriteria.getManagedClass()) .findEntities(multiShardAwareCriteria); for (Object entity : lookedUpEntities) { // check to see if the returned object is a ShardedEntity. Set the shard hint to denote // the data store it was loaded from. // useful if the returned object is going to be persisted subsequently if (ShardedEntity.class.isAssignableFrom(entity.getClass())) { ((ShardedEntity) entity).setShardHint(shardHint); } } returnedObjects.addAll(lookedUpEntities); // unset the context using the criteria checkAndUnsetShardedEntityContextHolder(multiShardAwareCriteria); // check to see if max results has been set and break loop if results count match or exceed // this limit if (criteria.getMaxResults() > 0 && returnedObjects.size() >= criteria.getMaxResults()) { returnedObjects = (LinkedList) returnedObjects.subList(0, criteria.getMaxResults()); break; } } // return the outcome of the multi-sharded criteria persistence call return returnedObjects; } // none of the passed in criteria are multi-sharded. Proceed to deal with single sharded (or // none) entities checkAndPopulateShardedEntityContextHolder(new Criteria[] {criteria}); Collection entities = findSuitableProvider(criteria.getManagedClass()).findEntities(criteria); // unset the context using the first entity checkAndUnsetShardedEntityContextHolder(criteria); return entities; }
/** * Gets recommendations for the currently logged in user. * * @return An array containing the recommendations */ public Medium[] getRecommendedMedia() { User u = Database.getInstance().getLoggedInUser(); if (u == null) return null; LinkedList<Medium> recommendedMedia = new LinkedList<>(); // Get favorite genre HashMap<Genre, Integer> mediumCounts = new HashMap<>(); Integer count; int maxCount = -1; Genre favoriteGenre = null; for (Medium m : u.getFavoriteMediums()) { count = mediumCounts.get(m.getGenre()); if (count == null) { count = 1; } else { count++; } mediumCounts.put(m.getGenre(), count); if (count > maxCount) { maxCount = count; favoriteGenre = m.getGenre(); } } // Use topgenre if no favorite genre was found if (favoriteGenre == null) { favoriteGenre = Database.getInstance().getTopGenre(); } // Get a song out of the favorite genre that is not rated yet for (Medium m : favoriteGenre.getMedia()) { if (m.getRatingList().get(u.hashCode()) == null) { recommendedMedia.add(m); } } // Return random top 10 Collections.shuffle(recommendedMedia); recommendedMedia = new LinkedList(recommendedMedia.subList(0, Math.min(recommendedMedia.size(), 10))); Medium[] result = new Medium[recommendedMedia.size()]; recommendedMedia.toArray(result); return result; }
public boolean isContext(YAMLEmitter.ParseContext... eventChain) { int pathLength = eventChain.length; int currentContextDepth = context.size(); boolean result = true; if (pathLength <= currentContextDepth) { List<YAMLEmitter.ParseContext> tail = context.subList(0, pathLength); for (int i = pathLength - 1; i >= 0; i--) { if (eventChain[i] != tail.get(pathLength - 1 - i)) { result = false; break; } } } else { result = false; } return result; }
public int getFileListSize() { return list.subList(1, list.size()).size(); }
/** * Look at the various property settings and create the command line for this server. * * @return the command line for this server. */ protected List buildExecCommand() { try { String cmd = getCommand(); if (cmd == null) { return null; } mLogger.finest(LogUtil.splitLine(cmd)); // Get java specific properties LinkedList execTokens = tokenizeCommand(cmd); String firstToken = (String) execTokens.get(0); if (firstToken.equals("java")) { // This is a java command, so rework it if (execTokens.size() < 2) { return null; } mIsJavaServer = true; List props = new LinkedList(); props.add(mPropertyPrefix + ".nativeLogging"); props.add(WDConstants.WD_PREFIX + ".nativeLogging"); String nativeLog = getProperty(props); if (nativeLog == null || nativeLog.equals("false")) { setNativeLoggingUsed(false); } else { setNativeLoggingUsed(true); } String mainClassName = (String) execTokens.getLast(); List cmdLineFlags = null; if (execTokens.size() > 2) { cmdLineFlags = execTokens.subList(1, execTokens.size() - 1); } String javaClasspath = getJavaClasspath(); String addCp = getJavaAdditionalClasspath(); if (addCp != null) { javaClasspath += File.pathSeparator + addCp; } String jvm = getJavaJVM(); String jvmType = getJVMType(); List jvmFlags = getJVMFlags(); List appArgs = getAppArgs(); mServerCmd = new ServerCommand( jvm, jvmType, jvmFlags, javaClasspath, mainClassName, cmdLineFlags, appArgs); updateFlags(mServerCmd); updateServerCommandForSpecialHandling(mServerCmd); execTokens = mServerCmd.getTokens(); mLogger.finest(LogUtil.splitLine(execTokens.toString())); } else { setNativeLoggingUsed(true); } return execTokens; } catch (Exception e) { mLogger.severe("Failed to buildExecCmd", e); return null; } }
public java.util.List subList( int fromIndex, int toIndex) { // Specified by: subList in interface java.util.List return (linkedlist.subList(fromIndex, toIndex)); }
@Test public void testRemoveOldLogs() throws Exception { try { DirectUpdateHandler2.commitOnClose = false; final Semaphore logReplay = new Semaphore(0); final Semaphore logReplayFinish = new Semaphore(0); UpdateLog.testing_logReplayHook = new Runnable() { @Override public void run() { try { assertTrue(logReplay.tryAcquire(timeout, TimeUnit.SECONDS)); } catch (Exception e) { throw new RuntimeException(e); } } }; UpdateLog.testing_logReplayFinishHook = new Runnable() { @Override public void run() { logReplayFinish.release(); } }; clearIndex(); assertU(commit()); String logDir = h.getCore().getUpdateHandler().getUpdateLog().getLogDir(); h.close(); String[] files = HdfsUpdateLog.getLogList(fs, new Path(logDir)); for (String file : files) { fs.delete(new Path(logDir, file), false); } assertEquals(0, HdfsUpdateLog.getLogList(fs, new Path(logDir)).length); createCore(); int start = 0; int maxReq = 50; LinkedList<Long> versions = new LinkedList<>(); addDocs(10, start, versions); start += 10; assertJQ( req("qt", "/get", "getVersions", "" + maxReq), "/versions==" + versions.subList(0, Math.min(maxReq, start))); assertU(commit()); assertJQ( req("qt", "/get", "getVersions", "" + maxReq), "/versions==" + versions.subList(0, Math.min(maxReq, start))); addDocs(10, start, versions); start += 10; assertJQ( req("qt", "/get", "getVersions", "" + maxReq), "/versions==" + versions.subList(0, Math.min(maxReq, start))); assertU(commit()); assertJQ( req("qt", "/get", "getVersions", "" + maxReq), "/versions==" + versions.subList(0, Math.min(maxReq, start))); assertEquals(2, HdfsUpdateLog.getLogList(fs, new Path(logDir)).length); addDocs(105, start, versions); start += 105; assertJQ( req("qt", "/get", "getVersions", "" + maxReq), "/versions==" + versions.subList(0, Math.min(maxReq, start))); assertU(commit()); assertJQ( req("qt", "/get", "getVersions", "" + maxReq), "/versions==" + versions.subList(0, Math.min(maxReq, start))); // previous two logs should be gone now assertEquals(1, HdfsUpdateLog.getLogList(fs, new Path(logDir)).length); addDocs(1, start, versions); start += 1; h.close(); createCore(); // trigger recovery, make sure that tlog reference handling is correct // test we can get versions while replay is happening assertJQ( req("qt", "/get", "getVersions", "" + maxReq), "/versions==" + versions.subList(0, Math.min(maxReq, start))); logReplay.release(1000); assertTrue(logReplayFinish.tryAcquire(timeout, TimeUnit.SECONDS)); assertJQ( req("qt", "/get", "getVersions", "" + maxReq), "/versions==" + versions.subList(0, Math.min(maxReq, start))); addDocs(105, start, versions); start += 105; assertJQ( req("qt", "/get", "getVersions", "" + maxReq), "/versions==" + versions.subList(0, Math.min(maxReq, start))); assertU(commit()); assertJQ( req("qt", "/get", "getVersions", "" + maxReq), "/versions==" + versions.subList(0, Math.min(maxReq, start))); // previous logs should be gone now assertEquals(1, HdfsUpdateLog.getLogList(fs, new Path(logDir)).length); // // test that a corrupt tlog file doesn't stop us from coming up, or seeing versions before // that tlog file. // addDocs( 1, start, new LinkedList< Long>()); // don't add this to the versions list because we are going to lose it... h.close(); files = HdfsUpdateLog.getLogList(fs, new Path(logDir)); ; Arrays.sort(files); FSDataOutputStream dos = fs.create(new Path(new Path(logDir), files[files.length - 1]), (short) 1); dos.writeUTF("This is a trashed log file that really shouldn't work at all, but we'll see.."); dos.close(); ignoreException("Failure to open existing"); createCore(); // we should still be able to get the list of versions (not including the trashed log file) assertJQ( req("qt", "/get", "getVersions", "" + maxReq), "/versions==" + versions.subList(0, Math.min(maxReq, start))); resetExceptionIgnores(); } finally { DirectUpdateHandler2.commitOnClose = true; UpdateLog.testing_logReplayHook = null; UpdateLog.testing_logReplayFinishHook = null; } }
public List getRecentlyOpenedFiles() { return list.subList(1, list.size()); }
private void updateChordCounts( int curNode, boolean[] isMarked, boolean[] isOuter, int[] chordCount, LinkedList<Integer> currentOuterNodes, Graph graph) { // go through all outer neighbors. if count is just 2, then reduce count of chords for both of // them. // otherwise iterate neighbors of each node and increase counts appropriately ArrayList<Integer> outerSequence = new ArrayList<>(); for (Edge edge : graph.nodes.get(curNode).neighbors) { if (!isMarked[edge.dest]) { outerSequence.add(edge.dest); isOuter[edge.dest] = true; } } int minIdx = currentOuterNodes.size(), minIdxNode = -1; int maxIdx = -1; int curNodeIdx; for (Integer node : outerSequence) { curNodeIdx = currentOuterNodes.indexOf(node); if (curNodeIdx == -1) { continue; } if (curNodeIdx < minIdx) { minIdx = curNodeIdx; minIdxNode = node; } if (curNodeIdx > maxIdx) { maxIdx = curNodeIdx; } } int outerSeqSize = outerSequence.size(); while (outerSequence.get(0) != minIdxNode) { int temp = outerSequence.remove(outerSeqSize - 1); outerSequence.add(0, temp); } // now update current outer nodes LinkedList<Integer> newOuterNodes = new LinkedList<>(); newOuterNodes.addAll(currentOuterNodes.subList(0, minIdx + 1)); newOuterNodes.addAll(outerSequence.subList(1, outerSeqSize - 1)); newOuterNodes.addAll(currentOuterNodes.subList(maxIdx, currentOuterNodes.size())); currentOuterNodes.clear(); currentOuterNodes.addAll(newOuterNodes); if (outerSequence.size() == 2) { chordCount[outerSequence.get(0)]--; chordCount[outerSequence.get(1)]--; } else { // go through all nodes that were not outer in previous iteration // increase chord count of each HashSet<String> markedEdges = new HashSet<>(); for (int i = 1; i < outerSequence.size() - 1; i++) { for (Edge edge : graph.nodes.get(outerSequence.get(i)).neighbors) { if (isOuter[edge.dest] && edge.dest != outerSequence.get(i - 1) && edge.dest != outerSequence.get(i + 1) && !markedEdges.contains(Helpers.getStringForEdge(edge))) { markedEdges.add(Helpers.getStringForEdge(edge)); chordCount[outerSequence.get(i)]++; chordCount[edge.dest]++; } } } } }
@Test public void testPutSubAbstractList() { final LinkedList<Person> origin = new LinkedList<>(genPersonList(100)); List<Person> sublist = origin.subList(10, 30); testReadWriteWithoutClassCheck(sublist); }