@Test public void testUnsignedTreeSetSerializable() { NavigableSet<Integer> set = new TreeSet<>( (Comparator<Integer> & Serializable) ((x, y) -> Integer.compareUnsigned(x, y))); set.addAll(Arrays.asList(-100, 0, 100)); assertEquals(0, set.first().intValue()); assertEquals(-100, set.last().intValue()); byte[] serializedSet = writeSetToBytes(set, true); NavigableSet<Integer> set1 = readSetFromBytes(serializedSet); assertEquals(0, set1.first().intValue()); assertEquals(-100, set1.last().intValue()); assertEquals(set, set1); }
public void testKeyPutRandomUniform() throws Exception { final NavigableSet<Integer> keys = new TreeSet<Integer>(); long seed = System.currentTimeMillis(); System.out.println("testKeyPutRandomUniform seed : " + seed); final MersenneTwisterFast random = new MersenneTwisterFast(seed); while (keys.size() < KEYS_COUNT) { int key = random.nextInt(Integer.MAX_VALUE); sbTree.put(key, createValue(key, OSBTreeValuePage.MAX_BINARY_VALUE_SIZE + 4)); keys.add(key); doReset(); } Assert.assertEquals(sbTree.firstKey(), keys.first()); doReset(); Assert.assertEquals(sbTree.lastKey(), keys.last()); doReset(); for (int key : keys) { Assert.assertEquals( sbTree.get(key), createValue(key, OSBTreeValuePage.MAX_BINARY_VALUE_SIZE + 4)); doReset(); } }
@Test public void WriteDBInt_lastKey_set_middle() { int numberOfRecords = 1000; /* Creates connections to MapDB */ DB db1 = DBMaker.memoryDB().transactionDisable().make(); /* Creates maps */ NavigableSet<Integer> map1 = db1.treeSet("column1"); /* Inserts initial values in maps */ for (int i = 0; i < numberOfRecords; i++) { map1.add(i); } assertEquals((Object) (numberOfRecords - 1), map1.last()); map1.clear(); /* Inserts some values in maps */ for (int i = 100; i < 110; i++) { map1.add(i); } assertEquals(10, map1.size()); assertFalse(map1.isEmpty()); assertEquals((Object) 109, map1.last()); assertEquals((Object) 100, map1.first()); }
@Test public void testUnsignedTreeSetMRef() { NavigableSet<Integer> set = new TreeSet<>(Integer::compareUnsigned); set.addAll(Arrays.asList(-100, 0, 100)); assertEquals(0, set.first().intValue()); assertEquals(-100, set.last().intValue()); }
/** * Finds range of server deltas needed to transform against, then transforms all client ops * against the server ops. */ private VersionedWaveletDelta transformSubmittedDelta( WaveletDelta submittedDelta, HashedVersion appliedVersion) throws OperationException, InvalidHashException { NavigableSet<VersionedWaveletDelta> serverDeltas = deserializedTransformedDeltas.tailSet( deserializedTransformedDeltas.floor( emptyDeserializedDeltaAtVersion(appliedVersion.getVersion())), true); if (serverDeltas.size() == 0) { LOG.warning("Got empty server set, but not sumbitting to head! " + submittedDelta); // Not strictly an invalid hash, but it's a related issue throw new InvalidHashException("Cannot submit to head"); } // Confirm that the target version/hash of this delta is valid. if (!serverDeltas.first().version.equals(appliedVersion)) { LOG.warning( "Mismatched hashes: expected: " + serverDeltas.first().version + " got: " + appliedVersion); // Don't leak the hash to the client in the error message. throw new InvalidHashException("Mismatched hashes at version " + appliedVersion.getVersion()); } ParticipantId clientAuthor = submittedDelta.getAuthor(); List<WaveletOperation> clientOps = submittedDelta.getOperations(); for (VersionedWaveletDelta d : serverDeltas) { // If the client delta transforms to nothing before we've traversed all the server // deltas, return the version at which the delta was obliterated (rather than the // current version) to ensure that delta submission is idempotent. if (clientOps.isEmpty()) { return new VersionedWaveletDelta(new WaveletDelta(clientAuthor, clientOps), d.version); } ParticipantId serverAuthor = d.delta.getAuthor(); List<WaveletOperation> serverOps = d.delta.getOperations(); if (clientAuthor.equals(serverAuthor) && clientOps.equals(serverOps)) { return d; } clientOps = transformOps(clientOps, clientAuthor, serverOps, serverAuthor); } return new VersionedWaveletDelta(new WaveletDelta(clientAuthor, clientOps), currentVersion); }
@Test public void testUnsignedTreeSetNotSerializable() { NavigableSet<Integer> set = new TreeSet<>((x, y) -> Integer.compareUnsigned(x, y)); set.addAll(Arrays.asList(-100, 0, 100)); assertEquals(0, set.first().intValue()); assertEquals(-100, set.last().intValue()); byte[] serializedSet = writeSetToBytes(set, false); assertEquals(null, serializedSet); }
public void testKeyDeleteRandomGaussian() throws Exception { NavigableSet<Integer> keys = new TreeSet<Integer>(); long seed = System.currentTimeMillis(); System.out.println("testKeyDeleteRandomGaussian seed : " + seed); MersenneTwisterFast random = new MersenneTwisterFast(seed); while (keys.size() < KEYS_COUNT) { int key = (int) (random.nextGaussian() * Integer.MAX_VALUE / 2 + Integer.MAX_VALUE); if (key < 0) continue; sbTree.put(key, createValue(key, OSBTreeValuePage.MAX_BINARY_VALUE_SIZE + 4)); keys.add(key); Assert.assertEquals( sbTree.get(key), createValue(key, OSBTreeValuePage.MAX_BINARY_VALUE_SIZE + 4)); doReset(); } Iterator<Integer> keysIterator = keys.iterator(); while (keysIterator.hasNext()) { int key = keysIterator.next(); if (key % 3 == 0) { sbTree.remove(key); keysIterator.remove(); } doReset(); } Assert.assertEquals(sbTree.firstKey(), keys.first()); doReset(); Assert.assertEquals(sbTree.lastKey(), keys.last()); doReset(); for (int key : keys) { if (key % 3 == 0) { Assert.assertNull(sbTree.get(key)); } else { Assert.assertEquals( sbTree.get(key), createValue(key, OSBTreeValuePage.MAX_BINARY_VALUE_SIZE + 4)); } doReset(); } }
protected boolean useLogarithmicProblemScale(List<XYSeries> seriesList) { NavigableSet<Double> xValueSet = new TreeSet<Double>(); int xValueListSize = 0; for (XYSeries series : seriesList) { for (XYDataItem dataItem : (List<XYDataItem>) series.getItems()) { xValueSet.add(dataItem.getXValue()); xValueListSize++; } } if (xValueListSize < LOG_SCALE_MIN_DATASETS_COUNT) { return false; } // If 60% of the points are in 20% of the value space, use a logarithmic scale double threshold = 0.2 * (xValueSet.last() - xValueSet.first()); int belowThresholdCount = xValueSet.headSet(threshold).size(); return belowThresholdCount >= (0.6 * xValueSet.size()); }
public static void routing(Number160 key, Peer[] peers, int start) { System.out.println("routing: searching for key " + key); NavigableSet<PeerAddress> pa1 = new TreeSet<PeerAddress>(PeerMap.createComparator(key)); NavigableSet<PeerAddress> queried = new TreeSet<PeerAddress>(PeerMap.createComparator(key)); Number160 result = Number160.ZERO; Number160 resultPeer = new Number160("0xd75d1a3d57841fbc9e2a3d175d6a35dc2e15b9f"); int round = 0; while (!resultPeer.equals(result)) { System.out.println("round " + round); round++; pa1.addAll(peers[start].getPeerBean().peerMap().getAll()); queried.add(peers[start].getPeerAddress()); System.out.println("closest so far: " + queried.first()); PeerAddress next = pa1.pollFirst(); while (queried.contains(next)) { next = pa1.pollFirst(); } result = next.getPeerId(); start = findNr(next.getPeerId().toString(), peers); } }
/** headSet returns set with keys in requested range */ public void testDescendingHeadSetContents() { NavigableSet set = dset5(); SortedSet sm = set.headSet(m4); assertTrue(sm.contains(m1)); assertTrue(sm.contains(m2)); assertTrue(sm.contains(m3)); assertFalse(sm.contains(m4)); assertFalse(sm.contains(m5)); Iterator i = sm.iterator(); Object k; k = (Integer) (i.next()); assertEquals(m1, k); k = (Integer) (i.next()); assertEquals(m2, k); k = (Integer) (i.next()); assertEquals(m3, k); assertFalse(i.hasNext()); sm.clear(); assertTrue(sm.isEmpty()); assertEquals(2, set.size()); assertEquals(m4, set.first()); }
/** headSet returns set with keys in requested range */ public void testHeadSetContents() { NavigableSet set = set5(); SortedSet sm = set.headSet(four); assertTrue(sm.contains(one)); assertTrue(sm.contains(two)); assertTrue(sm.contains(three)); assertFalse(sm.contains(four)); assertFalse(sm.contains(five)); Iterator i = sm.iterator(); Object k; k = (Integer) (i.next()); assertEquals(one, k); k = (Integer) (i.next()); assertEquals(two, k); k = (Integer) (i.next()); assertEquals(three, k); assertFalse(i.hasNext()); sm.clear(); assertTrue(sm.isEmpty()); assertEquals(2, set.size()); assertEquals(four, set.first()); }
public void testKeyDeleteRandomUniform() throws Exception { NavigableSet<Integer> keys = new TreeSet<Integer>(); for (int i = 0; i < KEYS_COUNT; i++) { sbTree.put(i, createValue(i, OSBTreeValuePage.MAX_BINARY_VALUE_SIZE + 4)); keys.add(i); doReset(); } Iterator<Integer> keysIterator = keys.iterator(); while (keysIterator.hasNext()) { int key = keysIterator.next(); if (key % 3 == 0) { sbTree.remove(key); keysIterator.remove(); } doReset(); } Assert.assertEquals(sbTree.firstKey(), keys.first()); doReset(); Assert.assertEquals(sbTree.lastKey(), keys.last()); doReset(); for (int key : keys) { if (key % 3 == 0) { Assert.assertNull(sbTree.get(key)); } else { Assert.assertEquals( sbTree.get(key), createValue(key, OSBTreeValuePage.MAX_BINARY_VALUE_SIZE + 4)); } doReset(); } }
RandomSelection select(boolean narrow, boolean mixInNotPresentItems, boolean permitReversal) { ThreadLocalRandom random = ThreadLocalRandom.current(); NavigableSet<Integer> canonicalSet = this.canonical; BTreeSet<Integer> testAsSet = this.test; List<Integer> canonicalList = new ArrayList<>(canonicalSet); BTreeSet<Integer> testAsList = this.test; Assert.assertEquals(canonicalSet.size(), testAsSet.size()); Assert.assertEquals(canonicalList.size(), testAsList.size()); // sometimes select keys first, so we cover full range List<Integer> allKeys = randomKeys(canonical, mixInNotPresentItems); List<Integer> keys = allKeys; int narrowCount = random.nextInt(3); while (narrow && canonicalList.size() > 10 && keys.size() > 10 && narrowCount-- > 0) { boolean useLb = random.nextBoolean(); boolean useUb = random.nextBoolean(); if (!(useLb | useUb)) continue; // select a range smaller than the total span when we have more narrowing iterations left int indexRange = keys.size() / (narrowCount + 1); boolean lbInclusive = true; Integer lbKey = canonicalList.get(0); int lbKeyIndex = 0, lbIndex = 0; boolean ubInclusive = true; Integer ubKey = canonicalList.get(canonicalList.size() - 1); int ubKeyIndex = keys.size(), ubIndex = canonicalList.size(); if (useLb) { lbKeyIndex = random.nextInt(0, indexRange - 1); Integer candidate = keys.get(lbKeyIndex); if (useLb = (candidate > lbKey && candidate <= ubKey)) { lbInclusive = random.nextBoolean(); lbKey = keys.get(lbKeyIndex); lbIndex = Collections.binarySearch(canonicalList, lbKey); if (lbIndex >= 0 && !lbInclusive) lbIndex++; else if (lbIndex < 0) lbIndex = -1 - lbIndex; } } if (useUb) { ubKeyIndex = random.nextInt(Math.max(lbKeyIndex, keys.size() - indexRange), keys.size() - 1); Integer candidate = keys.get(ubKeyIndex); if (useUb = (candidate < ubKey && candidate >= lbKey)) { ubInclusive = random.nextBoolean(); ubKey = keys.get(ubKeyIndex); ubIndex = Collections.binarySearch(canonicalList, ubKey); if (ubIndex >= 0 && ubInclusive) { ubIndex++; } else if (ubIndex < 0) ubIndex = -1 - ubIndex; } } if (ubIndex < lbIndex) { ubIndex = lbIndex; ubKey = lbKey; ubInclusive = false; } canonicalSet = !useLb ? canonicalSet.headSet(ubKey, ubInclusive) : !useUb ? canonicalSet.tailSet(lbKey, lbInclusive) : canonicalSet.subSet(lbKey, lbInclusive, ubKey, ubInclusive); testAsSet = !useLb ? testAsSet.headSet(ubKey, ubInclusive) : !useUb ? testAsSet.tailSet(lbKey, lbInclusive) : testAsSet.subSet(lbKey, lbInclusive, ubKey, ubInclusive); keys = keys.subList(lbKeyIndex, ubKeyIndex); canonicalList = canonicalList.subList(lbIndex, ubIndex); testAsList = testAsList.subList(lbIndex, ubIndex); Assert.assertEquals(canonicalSet.size(), testAsSet.size()); Assert.assertEquals(canonicalList.size(), testAsList.size()); } // possibly restore full set of keys, to test case where we are provided existing keys that // are out of bounds if (keys != allKeys && random.nextBoolean()) keys = allKeys; Comparator<Integer> comparator = naturalOrder(); if (permitReversal && random.nextBoolean()) { if (allKeys != keys) keys = new ArrayList<>(keys); if (canonicalSet != canonical) canonicalList = new ArrayList<>(canonicalList); Collections.reverse(keys); Collections.reverse(canonicalList); testAsList = testAsList.descendingSet(); canonicalSet = canonicalSet.descendingSet(); testAsSet = testAsSet.descendingSet(); comparator = reverseOrder(); } Assert.assertEquals(canonicalSet.size(), testAsSet.size()); Assert.assertEquals(canonicalList.size(), testAsList.size()); if (!canonicalSet.isEmpty()) { Assert.assertEquals(canonicalSet.first(), canonicalList.get(0)); Assert.assertEquals(canonicalSet.last(), canonicalList.get(canonicalList.size() - 1)); Assert.assertEquals(canonicalSet.first(), testAsSet.first()); Assert.assertEquals(canonicalSet.last(), testAsSet.last()); Assert.assertEquals(canonicalSet.first(), testAsList.get(0)); Assert.assertEquals(canonicalSet.last(), testAsList.get(testAsList.size() - 1)); } return new RandomSelection( keys, canonicalSet, testAsSet, canonicalList, testAsList, comparator); }
/** * Attempts to find an EC number for every domain in {@code census}, skipping those which are * already listed in this ECFinder. Prints to {@code output} periodically. */ public void buildFromCensus(CensusResultList census, File output) { ScopDatabase scop = ScopFactory.getSCOP(); CensusSignificance sig = CensusSignificanceFactory.forCeSymmOrd(); int i = 0; for (CensusResult result : census.getEntries()) { try { String scopId = result.getId(); if (this.ecsByAsymmDomain.containsKey(scopId) || this.ecsBySymmDomain.containsKey(scopId) || ecsByUnknownDomain.contains(scopId)) { continue; } ScopDomain domain = scop.getDomainByScopID(scopId); if (domain == null) { logger.error(result.getId() + " is null"); continue; } // got a result; what's its EC? // we need to find the correct polymers corresponding to the domain // note that this still isn't perfect, since we don't know what part of the polymer actually // does the function List<RCSBPolymer> polymers = new ArrayList<RCSBPolymer>(); Set<String> chains = domain.getChains(); RCSBDescription desc = RCSBDescriptionFactory.get(domain.getPdbId()); for (RCSBPolymer polymer : desc.getPolymers()) { for (Character chain : polymer.getChains()) { if (chains.contains(String.valueOf(chain))) { polymers.add(polymer); break; } } } // get the EC numbers // use a set because we don't want > 1 just because we have duplicates NavigableSet<String> ecs = new TreeSet<String>(); for (RCSBPolymer polymer : polymers) { String ec = polymer.getEnzClass(); if (ec != null) ecs.add(ec); } if (ecs.size() == 1) { String ec = ecs.first(); if (sig.isSignificant(result)) { ecsBySymmDomain.put(scopId, ec); } else { ecsByAsymmDomain.put(scopId, ec); } } else if (ecs.size() > 1) { logger.info( "Found different EC numbers for " + domain.getScopId()); // technically, this doesn't mean anything's wrong } else { // logger.debug("Didn't find EC for " + scopId); ecsByUnknownDomain.add(scopId); } if (i > 0 && i % 100 == 0) { print(output); logger.debug("Working on #" + i); } } catch (RuntimeException e) { e.printStackTrace(); logger.error(e); } finally { i++; } } }