public void testSubSetContents2() { NavigableSet set = set5(); SortedSet sm = set.subSet(two, three); assertEquals(1, sm.size()); assertEquals(two, sm.first()); assertEquals(two, sm.last()); assertFalse(sm.contains(one)); assertTrue(sm.contains(two)); assertFalse(sm.contains(three)); assertFalse(sm.contains(four)); assertFalse(sm.contains(five)); Iterator i = sm.iterator(); Object k; k = (Integer) (i.next()); assertEquals(two, k); assertFalse(i.hasNext()); Iterator j = sm.iterator(); j.next(); j.remove(); assertFalse(set.contains(two)); assertEquals(4, set.size()); assertEquals(0, sm.size()); assertTrue(sm.isEmpty()); assertFalse(sm.remove(three)); assertEquals(4, set.size()); }
public void testDescendingSubSetContents2() { NavigableSet set = dset5(); SortedSet sm = set.subSet(m2, m3); assertEquals(1, sm.size()); assertEquals(m2, sm.first()); assertEquals(m2, sm.last()); assertFalse(sm.contains(m1)); assertTrue(sm.contains(m2)); assertFalse(sm.contains(m3)); assertFalse(sm.contains(m4)); assertFalse(sm.contains(m5)); Iterator i = sm.iterator(); Object k; k = (Integer) (i.next()); assertEquals(m2, k); assertFalse(i.hasNext()); Iterator j = sm.iterator(); j.next(); j.remove(); assertFalse(set.contains(m2)); assertEquals(4, set.size()); assertEquals(0, sm.size()); assertTrue(sm.isEmpty()); assertFalse(sm.remove(m3)); assertEquals(4, set.size()); }
/** size changes when elements added and removed */ public void testDescendingSize() { NavigableSet q = populatedSet(SIZE); for (int i = 0; i < SIZE; ++i) { assertEquals(SIZE - i, q.size()); q.pollFirst(); } for (int i = 0; i < SIZE; ++i) { assertEquals(i, q.size()); q.add(new Integer(i)); } }
private static void testAllSlices( String id, NavigableSet<Integer> btree, NavigableSet<Integer> canon, boolean ascending, List<ListenableFuture<?>> results) { testOneSlice(id, btree, canon, results); for (Integer lb : range(canon.size(), Integer.MIN_VALUE, ascending)) { // test head/tail sets testOneSlice( String.format("%s->[..%d)", id, lb), btree.headSet(lb, true), canon.headSet(lb, true), results); testOneSlice( String.format("%s->(..%d)", id, lb), btree.headSet(lb, false), canon.headSet(lb, false), results); testOneSlice( String.format("%s->(%d..]", id, lb), btree.tailSet(lb, true), canon.tailSet(lb, true), results); testOneSlice( String.format("%s->(%d..]", id, lb), btree.tailSet(lb, false), canon.tailSet(lb, false), results); for (Integer ub : range(canon.size(), lb, ascending)) { // test subsets testOneSlice( String.format("%s->[%d..%d]", id, lb, ub), btree.subSet(lb, true, ub, true), canon.subSet(lb, true, ub, true), results); testOneSlice( String.format("%s->(%d..%d]", id, lb, ub), btree.subSet(lb, false, ub, true), canon.subSet(lb, false, ub, true), results); testOneSlice( String.format("%s->[%d..%d)", id, lb, ub), btree.subSet(lb, true, ub, false), canon.subSet(lb, true, ub, false), results); testOneSlice( String.format("%s->(%d..%d)", id, lb, ub), btree.subSet(lb, false, ub, false), canon.subSet(lb, false, ub, false), results); } } }
/** A deserialized serialized set has same elements */ public void testSerialization() throws Exception { NavigableSet x = populatedSet(SIZE); NavigableSet y = serialClone(x); assertNotSame(x, y); assertEquals(x.size(), y.size()); assertEquals(x, y); assertEquals(y, x); while (!x.isEmpty()) { assertFalse(y.isEmpty()); assertEquals(x.pollFirst(), y.pollFirst()); } assertTrue(y.isEmpty()); }
/** A deserialized serialized set has same elements */ public void testDescendingSerialization() throws Exception { NavigableSet x = dset5(); NavigableSet y = serialClone(x); assertNotSame(x, y); assertEquals(x.size(), y.size()); assertEquals(x.toString(), y.toString()); assertEquals(x, y); assertEquals(y, x); while (!x.isEmpty()) { assertFalse(y.isEmpty()); assertEquals(x.pollFirst(), y.pollFirst()); } assertTrue(y.isEmpty()); }
@Test public void WriteDBInt_lastKey_set_middle() { int numberOfRecords = 1000; /* Creates connections to MapDB */ DB db1 = DBMaker.memoryDB().transactionDisable().make(); /* Creates maps */ NavigableSet<Integer> map1 = db1.treeSet("column1"); /* Inserts initial values in maps */ for (int i = 0; i < numberOfRecords; i++) { map1.add(i); } assertEquals((Object) (numberOfRecords - 1), map1.last()); map1.clear(); /* Inserts some values in maps */ for (int i = 100; i < 110; i++) { map1.add(i); } assertEquals(10, map1.size()); assertFalse(map1.isEmpty()); assertEquals((Object) 109, map1.last()); assertEquals((Object) 100, map1.first()); }
/** tailSet returns set with keys in requested range */ public void testTailSetContents() { NavigableSet set = set5(); SortedSet sm = set.tailSet(two); assertFalse(sm.contains(one)); assertTrue(sm.contains(two)); assertTrue(sm.contains(three)); assertTrue(sm.contains(four)); assertTrue(sm.contains(five)); Iterator i = sm.iterator(); Object k; k = (Integer) (i.next()); assertEquals(two, k); k = (Integer) (i.next()); assertEquals(three, k); k = (Integer) (i.next()); assertEquals(four, k); k = (Integer) (i.next()); assertEquals(five, k); assertFalse(i.hasNext()); SortedSet ssm = sm.tailSet(four); assertEquals(four, ssm.first()); assertEquals(five, ssm.last()); assertTrue(ssm.remove(four)); assertEquals(1, ssm.size()); assertEquals(3, sm.size()); assertEquals(4, set.size()); }
/** * Assert that getSplitEditFilesSorted returns files in expected order and that it skips * moved-aside files. * * @throws IOException */ @Test public void testGetSplitEditFilesSorted() throws IOException { FileSystem fs = FileSystem.get(util.getConfiguration()); Path regiondir = util.getDataTestDir("regiondir"); fs.delete(regiondir, true); fs.mkdirs(regiondir); Path recoverededits = WALSplitter.getRegionDirRecoveredEditsDir(regiondir); String first = WALSplitter.formatRecoveredEditsFileName(-1); createFile(fs, recoverededits, first); createFile(fs, recoverededits, WALSplitter.formatRecoveredEditsFileName(0)); createFile(fs, recoverededits, WALSplitter.formatRecoveredEditsFileName(1)); createFile(fs, recoverededits, WALSplitter.formatRecoveredEditsFileName(11)); createFile(fs, recoverededits, WALSplitter.formatRecoveredEditsFileName(2)); createFile(fs, recoverededits, WALSplitter.formatRecoveredEditsFileName(50)); String last = WALSplitter.formatRecoveredEditsFileName(Long.MAX_VALUE); createFile(fs, recoverededits, last); createFile( fs, recoverededits, Long.toString(Long.MAX_VALUE) + "." + System.currentTimeMillis()); final Configuration walConf = new Configuration(util.getConfiguration()); FSUtils.setRootDir(walConf, regiondir); (new WALFactory(walConf, null, "dummyLogName")).getWAL(new byte[] {}, null); NavigableSet<Path> files = WALSplitter.getSplitEditFilesSorted(fs, regiondir); assertEquals(7, files.size()); assertEquals(files.pollFirst().getName(), first); assertEquals(files.pollLast().getName(), last); assertEquals(files.pollFirst().getName(), WALSplitter.formatRecoveredEditsFileName(0)); assertEquals(files.pollFirst().getName(), WALSplitter.formatRecoveredEditsFileName(1)); assertEquals(files.pollFirst().getName(), WALSplitter.formatRecoveredEditsFileName(2)); assertEquals(files.pollFirst().getName(), WALSplitter.formatRecoveredEditsFileName(11)); }
/** * 右ノードに送信するノード集合を計算する. * * @param right 右ノード * @return 右ノードに送信するノード集合. */ Set<Link> computeSetForRNode(boolean addMe, Link right) { NavigableSet<Link> propset = leftNbrSet.clone(); if (addMe) { propset.add(me); } // propsetから,右ノードと同じノードと,それ以降のノードを削除する (if any) propset = propset.headSet(right, false); /* // 最後に送った集合と近いほうから比較し,i(>=0)番目のノードが異なるとする. // i >= NEIGHBOR_SET_SIZE ならばなにもしない. // i < IMMED_PROP_THRESHOLD ならば直ぐに右ノードに通知する. // i >= IMMED_PROP_THRESHOLD ならば,一定時間後に右ノードに通知するようにタイマをセットする. Iterator<Link> it1 = prevset.iterator(); Iterator<Link> it2 = propset.iterator(); int i = 0; for (; it1.hasNext() && it2.hasNext(); i++) { Link link1 = it1.next(); Link link2 = it2.next(); if (!link1.equals(link2)) { break; } } if (i >= NEIGHBOR_SET_SIZE) { return; }*/ while (propset.size() > capacity) { propset.remove(propset.last()); } // create a copy of propset. propset has a reference to our // customized Comparator, which has a reference to NeighborSet. Set<Link> copy = new HashSet<Link>(); copy.addAll(propset); return copy; }
private void mergeStatuses(NavigableSet<Status> originalStatuses, List<Status> newStatuses) { originalStatuses.addAll(newStatuses); while (originalStatuses.size() >= 40) { originalStatuses.pollFirst(); } }
/** tailSet returns set with keys in requested range */ public void testDescendingTailSetContents() { NavigableSet set = dset5(); SortedSet sm = set.tailSet(m2); assertFalse(sm.contains(m1)); assertTrue(sm.contains(m2)); assertTrue(sm.contains(m3)); assertTrue(sm.contains(m4)); assertTrue(sm.contains(m5)); Iterator i = sm.iterator(); Object k; k = (Integer) (i.next()); assertEquals(m2, k); k = (Integer) (i.next()); assertEquals(m3, k); k = (Integer) (i.next()); assertEquals(m4, k); k = (Integer) (i.next()); assertEquals(m5, k); assertFalse(i.hasNext()); SortedSet ssm = sm.tailSet(m4); assertEquals(m4, ssm.first()); assertEquals(m5, ssm.last()); assertTrue(ssm.remove(m4)); assertEquals(1, ssm.size()); assertEquals(3, sm.size()); assertEquals(4, set.size()); }
public void testKeyPutRandomUniform() throws Exception { final NavigableSet<Integer> keys = new TreeSet<Integer>(); long seed = System.currentTimeMillis(); System.out.println("testKeyPutRandomUniform seed : " + seed); final MersenneTwisterFast random = new MersenneTwisterFast(seed); while (keys.size() < KEYS_COUNT) { int key = random.nextInt(Integer.MAX_VALUE); sbTree.put(key, createValue(key, OSBTreeValuePage.MAX_BINARY_VALUE_SIZE + 4)); keys.add(key); doReset(); } Assert.assertEquals(sbTree.firstKey(), keys.first()); doReset(); Assert.assertEquals(sbTree.lastKey(), keys.last()); doReset(); for (int key : keys) { Assert.assertEquals( sbTree.get(key), createValue(key, OSBTreeValuePage.MAX_BINARY_VALUE_SIZE + 4)); doReset(); } }
/** * Assert that getSplitEditFilesSorted returns files in expected order and that it skips * moved-aside files. * * @throws IOException */ @Test public void testGetSplitEditFilesSorted() throws IOException { FileSystem fs = FileSystem.get(util.getConfiguration()); Path regiondir = util.getDataTestDir("regiondir"); fs.delete(regiondir, true); fs.mkdirs(regiondir); Path recoverededits = HLogUtil.getRegionDirRecoveredEditsDir(regiondir); String first = HLogSplitter.formatRecoveredEditsFileName(-1); createFile(fs, recoverededits, first); createFile(fs, recoverededits, HLogSplitter.formatRecoveredEditsFileName(0)); createFile(fs, recoverededits, HLogSplitter.formatRecoveredEditsFileName(1)); createFile(fs, recoverededits, HLogSplitter.formatRecoveredEditsFileName(11)); createFile(fs, recoverededits, HLogSplitter.formatRecoveredEditsFileName(2)); createFile(fs, recoverededits, HLogSplitter.formatRecoveredEditsFileName(50)); String last = HLogSplitter.formatRecoveredEditsFileName(Long.MAX_VALUE); createFile(fs, recoverededits, last); createFile( fs, recoverededits, Long.toString(Long.MAX_VALUE) + "." + System.currentTimeMillis()); HLogFactory.createHLog(fs, regiondir, "dummyLogName", util.getConfiguration()); NavigableSet<Path> files = HLogUtil.getSplitEditFilesSorted(fs, regiondir); assertEquals(7, files.size()); assertEquals(files.pollFirst().getName(), first); assertEquals(files.pollLast().getName(), last); assertEquals(files.pollFirst().getName(), HLogSplitter.formatRecoveredEditsFileName(0)); assertEquals(files.pollFirst().getName(), HLogSplitter.formatRecoveredEditsFileName(1)); assertEquals(files.pollFirst().getName(), HLogSplitter.formatRecoveredEditsFileName(2)); assertEquals(files.pollFirst().getName(), HLogSplitter.formatRecoveredEditsFileName(11)); }
@Override protected T prefetch() throws Exception { while (buffer.size() < maxBufferSize && it.hasNext()) { buffer.add(it.next()); } return buffer.isEmpty() ? finish() : buffer.pollFirst(); }
void addAll(Collection<Link> nodes) { NavigableSet<Link> propset = leftNbrSet.clone(); propset.addAll(nodes); while (capacity > 0 && propset.size() > capacity) { propset.remove(propset.last()); } set(propset); }
/** clear removes all elements */ public void testDescendingClear() { NavigableSet q = populatedSet(SIZE); q.clear(); assertTrue(q.isEmpty()); assertEquals(0, q.size()); assertTrue(q.add(new Integer(1))); assertFalse(q.isEmpty()); q.clear(); assertTrue(q.isEmpty()); }
/** * Default constructor. * * @param columns columns specified user in query * @param minVersions minimum number of versions to keep * @param maxVersions maximum versions to return per column * @param oldestUnexpiredTS the oldest timestamp we are interested in, based on TTL */ public ExplicitColumnTracker( NavigableSet<byte[]> columns, int minVersions, int maxVersions, long oldestUnexpiredTS) { this.maxVersions = maxVersions; this.minVersions = minVersions; this.oldestStamp = oldestUnexpiredTS; this.columns = new ColumnCount[columns.size()]; int i = 0; for (byte[] column : columns) { this.columns[i++] = new ColumnCount(column); } reset(); }
/** removeAll(c) removes only those elements of c and reports true if changed */ public void testDescendingRemoveAll() { for (int i = 1; i < SIZE; ++i) { NavigableSet q = populatedSet(SIZE); NavigableSet p = populatedSet(i); assertTrue(q.removeAll(p)); assertEquals(SIZE - i, q.size()); for (int j = 0; j < i; ++j) { Integer x = (Integer) (p.pollFirst()); assertFalse(q.contains(x)); } } }
private NavigableSet dset5() { TreeSet q = new TreeSet(); assertTrue(q.isEmpty()); q.add(m1); q.add(m2); q.add(m3); q.add(m4); q.add(m5); NavigableSet s = q.descendingSet(); assertEquals(5, s.size()); return s; }
/** retainAll(c) retains only those elements of c and reports true if changed */ public void testDescendingRetainAll() { NavigableSet q = populatedSet(SIZE); NavigableSet p = populatedSet(SIZE); for (int i = 0; i < SIZE; ++i) { boolean changed = q.retainAll(p); if (i == 0) assertFalse(changed); else assertTrue(changed); assertTrue(q.containsAll(p)); assertEquals(SIZE - i, q.size()); p.pollFirst(); } }
/** Returns a new set of given size containing consecutive Integers 0 ... n. */ private NavigableSet<Integer> populatedSet(int n) { TreeSet<Integer> q = new TreeSet<Integer>(); assertTrue(q.isEmpty()); for (int i = n - 1; i >= 0; i -= 2) assertTrue(q.add(new Integer(i))); for (int i = (n & 1); i < n; i += 2) assertTrue(q.add(new Integer(i))); assertTrue(q.add(new Integer(-n))); assertTrue(q.add(new Integer(n))); NavigableSet s = q.subSet(new Integer(0), true, new Integer(n), false); assertFalse(s.isEmpty()); assertEquals(n, s.size()); return s; }
public void testKeyDeleteRandomGaussian() throws Exception { NavigableSet<Integer> keys = new TreeSet<Integer>(); long seed = System.currentTimeMillis(); System.out.println("testKeyDeleteRandomGaussian seed : " + seed); MersenneTwisterFast random = new MersenneTwisterFast(seed); while (keys.size() < KEYS_COUNT) { int key = (int) (random.nextGaussian() * Integer.MAX_VALUE / 2 + Integer.MAX_VALUE); if (key < 0) continue; sbTree.put(key, createValue(key, OSBTreeValuePage.MAX_BINARY_VALUE_SIZE + 4)); keys.add(key); Assert.assertEquals( sbTree.get(key), createValue(key, OSBTreeValuePage.MAX_BINARY_VALUE_SIZE + 4)); doReset(); } Iterator<Integer> keysIterator = keys.iterator(); while (keysIterator.hasNext()) { int key = keysIterator.next(); if (key % 3 == 0) { sbTree.remove(key); keysIterator.remove(); } doReset(); } Assert.assertEquals(sbTree.firstKey(), keys.first()); doReset(); Assert.assertEquals(sbTree.lastKey(), keys.last()); doReset(); for (int key : keys) { if (key % 3 == 0) { Assert.assertNull(sbTree.get(key)); } else { Assert.assertEquals( sbTree.get(key), createValue(key, OSBTreeValuePage.MAX_BINARY_VALUE_SIZE + 4)); } doReset(); } }
/** Returns a new set of first 5 ints. */ private NavigableSet set5() { TreeSet q = new TreeSet(); assertTrue(q.isEmpty()); q.add(one); q.add(two); q.add(three); q.add(four); q.add(five); q.add(zero); q.add(seven); NavigableSet s = q.subSet(one, true, seven, false); assertEquals(5, s.size()); return s; }
protected boolean useLogarithmicProblemScale(List<XYSeries> seriesList) { NavigableSet<Double> xValueSet = new TreeSet<Double>(); int xValueListSize = 0; for (XYSeries series : seriesList) { for (XYDataItem dataItem : (List<XYDataItem>) series.getItems()) { xValueSet.add(dataItem.getXValue()); xValueListSize++; } } if (xValueListSize < LOG_SCALE_MIN_DATASETS_COUNT) { return false; } // If 60% of the points are in 20% of the value space, use a logarithmic scale double threshold = 0.2 * (xValueSet.last() - xValueSet.first()); int belowThresholdCount = xValueSet.headSet(threshold).size(); return belowThresholdCount >= (0.6 * xValueSet.size()); }
/** * Finds range of server deltas needed to transform against, then transforms all client ops * against the server ops. */ private VersionedWaveletDelta transformSubmittedDelta( WaveletDelta submittedDelta, HashedVersion appliedVersion) throws OperationException, InvalidHashException { NavigableSet<VersionedWaveletDelta> serverDeltas = deserializedTransformedDeltas.tailSet( deserializedTransformedDeltas.floor( emptyDeserializedDeltaAtVersion(appliedVersion.getVersion())), true); if (serverDeltas.size() == 0) { LOG.warning("Got empty server set, but not sumbitting to head! " + submittedDelta); // Not strictly an invalid hash, but it's a related issue throw new InvalidHashException("Cannot submit to head"); } // Confirm that the target version/hash of this delta is valid. if (!serverDeltas.first().version.equals(appliedVersion)) { LOG.warning( "Mismatched hashes: expected: " + serverDeltas.first().version + " got: " + appliedVersion); // Don't leak the hash to the client in the error message. throw new InvalidHashException("Mismatched hashes at version " + appliedVersion.getVersion()); } ParticipantId clientAuthor = submittedDelta.getAuthor(); List<WaveletOperation> clientOps = submittedDelta.getOperations(); for (VersionedWaveletDelta d : serverDeltas) { // If the client delta transforms to nothing before we've traversed all the server // deltas, return the version at which the delta was obliterated (rather than the // current version) to ensure that delta submission is idempotent. if (clientOps.isEmpty()) { return new VersionedWaveletDelta(new WaveletDelta(clientAuthor, clientOps), d.version); } ParticipantId serverAuthor = d.delta.getAuthor(); List<WaveletOperation> serverOps = d.delta.getOperations(); if (clientAuthor.equals(serverAuthor) && clientOps.equals(serverOps)) { return d; } clientOps = transformOps(clientOps, clientAuthor, serverOps, serverAuthor); } return new VersionedWaveletDelta(new WaveletDelta(clientAuthor, clientOps), currentVersion); }
/** * Creates a new instance of this class while copying all values. * * @param scan The scan instance to copy from. * @throws IOException When copying the values fails. */ public HaeinsaScan(HaeinsaScan scan) throws IOException { startRow = scan.getStartRow(); stopRow = scan.getStopRow(); caching = scan.getCaching(); cacheBlocks = scan.getCacheBlocks(); Map<byte[], NavigableSet<byte[]>> fams = scan.getFamilyMap(); for (Map.Entry<byte[], NavigableSet<byte[]>> entry : fams.entrySet()) { byte[] fam = entry.getKey(); NavigableSet<byte[]> cols = entry.getValue(); if (cols != null && cols.size() > 0) { for (byte[] col : cols) { addColumn(fam, col); } } else { addFamily(fam); } } }
/** headSet returns set with keys in requested range */ public void testHeadSetContents() { NavigableSet set = set5(); SortedSet sm = set.headSet(four); assertTrue(sm.contains(one)); assertTrue(sm.contains(two)); assertTrue(sm.contains(three)); assertFalse(sm.contains(four)); assertFalse(sm.contains(five)); Iterator i = sm.iterator(); Object k; k = (Integer) (i.next()); assertEquals(one, k); k = (Integer) (i.next()); assertEquals(two, k); k = (Integer) (i.next()); assertEquals(three, k); assertFalse(i.hasNext()); sm.clear(); assertTrue(sm.isEmpty()); assertEquals(2, set.size()); assertEquals(four, set.first()); }
/** headSet returns set with keys in requested range */ public void testDescendingHeadSetContents() { NavigableSet set = dset5(); SortedSet sm = set.headSet(m4); assertTrue(sm.contains(m1)); assertTrue(sm.contains(m2)); assertTrue(sm.contains(m3)); assertFalse(sm.contains(m4)); assertFalse(sm.contains(m5)); Iterator i = sm.iterator(); Object k; k = (Integer) (i.next()); assertEquals(m1, k); k = (Integer) (i.next()); assertEquals(m2, k); k = (Integer) (i.next()); assertEquals(m3, k); assertFalse(i.hasNext()); sm.clear(); assertTrue(sm.isEmpty()); assertEquals(2, set.size()); assertEquals(m4, set.first()); }
public void setClusters(Set<SemiClusterDetails> clusters, int graphJobVertexMaxClusterCount) { int clusterCountToBeRemoved = 0; NavigableSet<SemiClusterDetails> setSort = new TreeSet<SemiClusterDetails>( new Comparator<SemiClusterDetails>() { @Override public int compare(SemiClusterDetails o1, SemiClusterDetails o2) { return (o1.getSemiClusterScore() == o2.getSemiClusterScore() ? 0 : o1.getSemiClusterScore() < o2.getSemiClusterScore() ? -1 : 1); } }); setSort.addAll(this.semiClusterContainThis); setSort.addAll(clusters); clusterCountToBeRemoved = setSort.size() - graphJobVertexMaxClusterCount; Iterator<SemiClusterDetails> itr = setSort.descendingIterator(); while (clusterCountToBeRemoved > 0) { itr.next(); itr.remove(); clusterCountToBeRemoved--; } this.semiClusterContainThis = setSort; }