Esempio n. 1
0
  /**
   * Performance and capacity tests.
   *
   * <p>5 seconds to send all notifications synchronously with 5 peers, 2000 Elements and 400 byte
   * payload The numbers given below are for the remote peer tester (java -jar
   * ehcache-1.x-remote-debugger.jar ehcache-distributed1.xml) running on a 10Mbit ethernet network
   * and are measured from the time the peer starts receiving to when it has fully received.
   */
  @Test
  public void testBigPutsProgagatesSynchronous() throws CacheException, InterruptedException {

    // Give everything a chance to startup
    StopWatch stopWatch = new StopWatch();
    Integer index;
    for (int i = 0; i < 2; i++) {
      for (int j = 0; j < 1000; j++) {
        index = Integer.valueOf(((1000 * i) + j));
        manager1
            .getCache("sampleCache3")
            .put(
                new Element(
                    index,
                    "AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA"
                        + "AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA"
                        + "AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA"
                        + "AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA"
                        + "AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA"));
      }
    }
    long elapsed = stopWatch.getElapsedTime();
    long putTime = ((elapsed / 1000));
    LOG.info("Put and Propagate Synchronously Elapsed time: " + putTime + " seconds");

    assertEquals(2000, manager1.getCache("sampleCache3").getSize());
    assertEquals(2000, manager2.getCache("sampleCache3").getSize());
    assertEquals(2000, manager3.getCache("sampleCache3").getSize());
    assertEquals(2000, manager4.getCache("sampleCache3").getSize());
    assertEquals(2000, manager5.getCache("sampleCache3").getSize());
  }
Esempio n. 2
0
  /** Drive everything to point of breakage within a 64MB VM. */
  public void xTestHugePutsBreaksAsynchronous() throws CacheException, InterruptedException {

    // Give everything a chance to startup
    StopWatch stopWatch = new StopWatch();
    Integer index = null;
    for (int i = 0; i < 500; i++) {
      for (int j = 0; j < 1000; j++) {
        index = Integer.valueOf(((1000 * i) + j));
        cache1.put(
            new Element(
                index,
                "AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA"
                    + "AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA"
                    + "AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA"
                    + "AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA"
                    + "AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA"));
      }
    }
    long elapsed = stopWatch.getElapsedTime();
    long putTime = ((elapsed / 1000));
    LOG.info("Put Elapsed time: " + putTime);
    // assertTrue(putTime < 8);

    assertEquals(100000, cache1.getSize());

    Thread.sleep(100000);
    assertEquals(20000, manager2.getCache("sampleCache1").getSize());
    assertEquals(20000, manager3.getCache("sampleCache1").getSize());
    assertEquals(20000, manager4.getCache("sampleCache1").getSize());
    assertEquals(20000, manager5.getCache("sampleCache1").getSize());
  }
Esempio n. 3
0
  /** Tests the performance and correctness of gzip. */
  public void testGzipPerformance() throws IOException, InterruptedException {
    long initialMemoryUsed = memoryUsed();
    byte[] gzip = getGzipFileAsBytes();
    byte[] ungzipped = null;
    int size = 0;
    long timeTaken = 0;
    long finalMemoryUsed = 0;
    long incrementalMemoryUsed = 0;
    StopWatch stopWatch = new StopWatch();

    ungzipped = ungzip1(gzip);
    stopWatch.getElapsedTime();
    for (int i = 0; i < 50; i++) {
      gzip = gzip(ungzipped);
    }
    timeTaken = stopWatch.getElapsedTime() / 50;
    ungzipped = ungzip1(gzip);
    size = ungzipped.length;
    assertEquals(100000, size);
    finalMemoryUsed = memoryUsed();
    incrementalMemoryUsed = finalMemoryUsed - initialMemoryUsed;
    LOG.info(
        "Average gzip time: "
            + timeTaken
            + ". Memory used: "
            + incrementalMemoryUsed
            + ". Size: "
            + size);
  }
Esempio n. 4
0
  public long waitForCacheSize(long size, int maxSeconds, Ehcache... caches)
      throws InterruptedException {

    StopWatch stopWatch = new StopWatch();
    while (checkForCacheSize(size, caches)) {
      Thread.sleep(500);
      if (stopWatch.getElapsedTime() > maxSeconds * 1000) {
        fail("Caches still haven't reached the expected size after " + maxSeconds + " seconds");
      }
    }

    return stopWatch.getElapsedTime();
  }
Esempio n. 5
0
  //    @Test
  public void testSpeedOfIteration() {
    StopWatch stopWatch = new StopWatch();
    Map map = new ConcurrentHashMap(100000);
    for (int i = 1; i <= 100000; i++) {
      map.put(i, i);
    }
    LOG.info("done putting: " + stopWatch.getElapsedTimeString());

    Collection collection = map.values();
    for (Object o : collection) {
      o.toString();
    }
    LOG.info(stopWatch.getElapsedTimeString());
  }
Esempio n. 6
0
  /**
   * Check we get reasonable results for 2000 entries where entry 0 is accessed once increasing to
   * entry 1999 accessed 2000 times.
   *
   * <p>1 to 5000 population, with hit counts ranging from 1 to 500, not selecting lowest half. 5000
   * tests S Cost No 7 38 99.24% confidence 8 27 99.46% confidence 9 10 10 11300 4 99.92% confidence
   * 12 2 20 11428 0 99.99% confidence
   *
   * <p>1 to 5000 population, with hit counts ranging from 1 to 500, not selecting lowest quarter.
   * 5000 tests S No 10 291 94.18% confidence 20 15 30 11536 1 99.99% confidence
   *
   * <p>For those with a statistical background the branch of stats which deals with this is
   * hypothesis testing and the Student's T distribution. The higher your sample the greater
   * confidence you can have in a hypothesis, in this case whether or not the "lowest" value lies in
   * the bottom half or quarter of the distribution. Adding samples rapidly increases confidence but
   * the return from extra sampling rapidly diminishes.
   *
   * <p>Cost is not affected much by sample size. Profiling shows that it is the iteration that is
   * causing most of the time. If we had access to the array backing Map, all would work very fast.
   * Still, it is fast enough.
   *
   * <p>A 99.99% confidence interval can be achieved that the "lowest" element is actually in the
   * bottom quarter of the hit count distribution.
   *
   * @throws IOException
   */
  public void testLowest() throws IOException {
    createMemoryStore(MemoryStoreEvictionPolicy.LFU, 5000);
    Element element = null;
    Element newElement = null;
    for (int i = 0; i < 10; i++) {
      newElement = new Element("" + i, new Date());
      store.put(newElement);
      int j;
      for (j = 0; j <= i; j++) {
        store.get("" + i);
      }
      if (i > 0) {
        element = ((LfuMemoryStore) store).findRelativelyUnused(newElement);
        assertTrue(!element.equals(newElement));
        assertTrue(element.getHitCount() < 2);
      }
    }

    int lowestQuarterNotIdentified = 0;

    long findTime = 0;
    StopWatch stopWatch = new StopWatch();
    for (int i = 10; i < 5000; i++) {
      store.put(new Element("" + i, new Date()));
      int j;
      int maximumHitCount = 0;
      for (j = 0; j <= i; j += 10) {
        store.get("" + i);
        maximumHitCount++;
      }

      stopWatch.getElapsedTime();
      element = ((LfuMemoryStore) store).findRelativelyUnused(newElement);
      findTime += stopWatch.getElapsedTime();
      long lowest = element.getHitCount();
      long bottomQuarter = (Math.round(maximumHitCount / 4.0) + 1);
      assertTrue(!element.equals(newElement));
      if (lowest > bottomQuarter) {
        lowestQuarterNotIdentified++;
        // LOG.info(i + " " + maximumHitCount + " " + element);
      }
    }
    LOG.info("Find time: " + findTime);
    assertTrue(findTime < 1500);
    LOG.info("Selections not in lowest quartile: " + lowestQuarterNotIdentified);
    assertTrue(lowestQuarterNotIdentified < 5);
  }
Esempio n. 7
0
 /**
  * 376 µs per one gzipping each time. .1 µs if we compare hashCodes on the String and only gzip as
  * necessary.
  *
  * @throws java.io.IOException
  * @throws InterruptedException
  */
 @Test
 public void testGzipSanityAndPerformance() throws IOException, InterruptedException {
   String payload = createReferenceString();
   // warmup vm
   for (int i = 0; i < 10; i++) {
     byte[] compressed = PayloadUtil.gzip(payload.getBytes());
     // make sure we don't forget to close the stream
     assertTrue(compressed.length > 300);
     Thread.sleep(20);
   }
   int hashCode = payload.hashCode();
   StopWatch stopWatch = new StopWatch();
   for (int i = 0; i < 10000; i++) {
     if (hashCode != payload.hashCode()) {
       PayloadUtil.gzip(payload.getBytes());
     }
   }
   long elapsed = stopWatch.getElapsedTime();
   LOG.info("Gzip took " + elapsed / 10F + " µs");
 }
Esempio n. 8
0
 /**
  * 169 µs per one.
  *
  * @throws IOException
  * @throws InterruptedException
  */
 @Test
 public void testUngzipPerformance() throws IOException, InterruptedException {
   String payload = createReferenceString();
   int length = payload.toCharArray().length;
   byte[] original = payload.getBytes();
   int byteLength = original.length;
   assertEquals(length, byteLength);
   byte[] compressed = PayloadUtil.gzip(original);
   // warmup vm
   for (int i = 0; i < 10; i++) {
     byte[] uncompressed = PayloadUtil.ungzip(compressed);
     uncompressed.hashCode();
     assertEquals(original.length, uncompressed.length);
     Thread.sleep(20);
   }
   StopWatch stopWatch = new StopWatch();
   for (int i = 0; i < 10000; i++) {
     PayloadUtil.ungzip(compressed);
   }
   long elapsed = stopWatch.getElapsedTime();
   LOG.info("Ungzip took " + elapsed / 10000F + " µs");
 }
Esempio n. 9
0
  /**
   * Based on the gunzip1 implementation.
   *
   * <p>Takes 9ms for the 100kb test document on the reference machine
   *
   * @throws IOException
   * @throws AlreadyGzippedException
   * @throws InterruptedException
   */
  public void testUsedGunzipImplementationPerformance()
      throws IOException, AlreadyGzippedException, InterruptedException {
    byte[] gzip = getGzipFileAsBytes();
    Collection headers = new ArrayList();
    String[] header = new String[] {"Content-Encoding", "gzip"};
    headers.add(header);
    PageInfo pageInfo = new PageInfo(200, "text/plain", headers, new ArrayList(), gzip, true);
    long initialMemoryUsed = memoryUsed();
    StopWatch stopWatch = new StopWatch();
    int size = 0;
    long timeTaken = 0;
    long finalMemoryUsed = 0;
    long incrementalMemoryUsed = 0;
    byte[] ungzipped = null;

    // warmup JVM
    for (int i = 0; i < 5; i++) {
      ungzipped = pageInfo.getUngzippedBody();
      Thread.sleep(200);
    }
    stopWatch.getElapsedTime();

    for (int i = 0; i < 50; i++) {
      ungzipped = pageInfo.getUngzippedBody();
    }
    size = ungzipped.length;
    timeTaken = stopWatch.getElapsedTime() / 50;
    finalMemoryUsed = memoryUsed();
    incrementalMemoryUsed = finalMemoryUsed - initialMemoryUsed;
    LOG.info(
        "Average gunzip time: "
            + timeTaken
            + ". Memory used: "
            + incrementalMemoryUsed
            + ". Size: "
            + size);
    assertEquals(100000, size);
    assertTrue(timeTaken < 30);
  }
Esempio n. 10
0
  /**
   * Check we get reasonable results for 2000 entries where entry 0 is accessed once increasing to
   * entry 1999 accessed 2000 times.
   *
   * <p>1 to 5000 population, with hit counts ranging from 1 to 500, not selecting lowest half. 5000
   * tests
   *
   * <p>Samples Cost No 7 38 99.24% confidence 8 27 99.46% confidence 9 10 10 11300 4 99.92%
   * confidence 12 2 20 11428 0 99.99% confidence
   *
   * <p>1 to 5000 population, with hit counts ranging from 1 to 500, not selecting lowest quarter.
   * 5000 tests S No 10 291 94.18% confidence 20 15 30 11536 1 99.99% confidence
   *
   * <p>For those with a statistical background the branch of stats which deals with this is
   * hypothesis testing and the Student's T distribution. The higher your sample the greater
   * confidence you can have in a hypothesis, in this case whether or not the "lowest" value lies in
   * the bottom half or quarter of the distribution. Adding samples rapidly increases confidence but
   * the return from extra sampling rapidly diminishes.
   *
   * <p>Cost is not affected much by sample size. Profiling shows that it is the iteration that is
   * causing most of the time. If we had access to the array backing Map, all would work very fast.
   * Still, it is fast enough.
   *
   * <p>A 99.99% confidence interval can be achieved that the "lowest" element is actually in the
   * bottom quarter of the hit count distribution.
   *
   * @throws java.io.IOException Performance: With a sample size of 10: 523ms for 5000 runs = 104 ?s
   *     per run With a sample size of 30: 628ms for 5000 runs = 125 ?s per run
   */
  @Test
  public void testLowest() throws IOException {
    createMemoryOnlyStore(MemoryStoreEvictionPolicy.LFU, 5000);
    // fully populate the otherwise we just find nulls
    for (int i = 0; i < 5000; i++) {
      Element newElement = new Element("" + i, new Date());
      store.put(newElement);
    }

    Element element = null;

    Element newElement = null;
    for (int i = 0; i < 10; i++) {
      newElement = new Element("" + i, new Date());
      store.put(newElement);
      int j;
      for (j = 0; j <= i; j++) {
        store.get("" + i);
      }
      if (i > 0) {
        try {
          element =
              (Element)
                  GET_EVICTION_TARGET.invoke(
                      PRIMARY_FACTORY.get(store), new Object(), Integer.MAX_VALUE);
        } catch (Exception e) {
          throw new RuntimeException(e);
        }
        assertTrue(!element.equals(newElement));
        assertTrue(element.getHitCount() < 2);
      }
    }

    int lowestQuarterNotIdentified = 0;

    long findTime = 0;
    StopWatch stopWatch = new StopWatch();
    for (int i = 10; i < 5000; i++) {
      store.put(new Element("" + i, new Date()));
      int j;
      int maximumHitCount = 0;
      for (j = 0; j <= i; j += 10) {
        store.get("" + i);
        maximumHitCount++;
      }

      stopWatch.getElapsedTime();
      try {
        element =
            (Element)
                GET_EVICTION_TARGET.invoke(
                    PRIMARY_FACTORY.get(store), new Object(), Integer.MAX_VALUE);
      } catch (Exception e) {
        throw new RuntimeException(e);
      }
      findTime += stopWatch.getElapsedTime();
      long lowest = element.getHitCount();
      long bottomQuarter = (Math.round(maximumHitCount / 4.0) + 1);
      assertTrue(!element.equals(newElement));
      if (lowest > bottomQuarter) {
        LOG.info(
            ""
                + element.getKey()
                + " hit count: "
                + element.getHitCount()
                + " bottomQuarter: "
                + bottomQuarter);
        lowestQuarterNotIdentified++;
      }
    }
    LOG.info("Find time: " + findTime);
    assertTrue(findTime < 200);
    LOG.info("Selections not in lowest quartile: " + lowestQuarterNotIdentified);
    assertTrue(lowestQuarterNotIdentified <= 10);
  }
Esempio n. 11
0
  /** Tests the performance of gunzip using a variety of implementations. */
  public void testGunzipPerformance() throws IOException, InterruptedException {
    long initialMemoryUsed = memoryUsed();
    byte[] gzip = getGzipFileAsBytes();
    byte[] ungzipped = null;
    int size = 0;
    long timeTaken = 0;
    long finalMemoryUsed = 0;
    long incrementalMemoryUsed = 0;
    StopWatch stopWatch = new StopWatch();

    // warmup JVM
    for (int i = 0; i < 5; i++) {
      ungzipped = ungzip1(gzip);
      ungzipped = ungzip2(gzip);
      ungzipped = ungzip3(gzip);
      ungzipped = ungzip4(gzip);
      ungzipped = ungzip5(gzip);
      Thread.sleep(200);
    }

    stopWatch.getElapsedTime();
    for (int i = 0; i < 50; i++) {
      ungzipped = ungzip1(gzip);
    }
    size = ungzipped.length;
    timeTaken = stopWatch.getElapsedTime() / 50;
    finalMemoryUsed = memoryUsed();
    incrementalMemoryUsed = finalMemoryUsed - initialMemoryUsed;
    LOG.info(
        "Average gunzip time: "
            + timeTaken
            + ". Memory used: "
            + incrementalMemoryUsed
            + ". Size: "
            + size);
    assertEquals(100000, size);

    stopWatch.getElapsedTime();
    ungzipped = ungzip2(gzip);
    for (int i = 0; i < 50; i++) {
      size = ungzipped.length;
    }
    timeTaken = stopWatch.getElapsedTime() / 50;
    finalMemoryUsed = memoryUsed();
    incrementalMemoryUsed = finalMemoryUsed - initialMemoryUsed;
    LOG.info(
        "Average gunzip time: "
            + timeTaken
            + ". Memory used: "
            + incrementalMemoryUsed
            + ". Size: "
            + size);
    assertEquals(100000, size);

    stopWatch.getElapsedTime();
    ungzipped = ungzip3(gzip);
    for (int i = 0; i < 50; i++) {
      size = ungzipped.length;
    }
    timeTaken = stopWatch.getElapsedTime() / 50;
    finalMemoryUsed = memoryUsed();
    incrementalMemoryUsed = finalMemoryUsed - initialMemoryUsed;
    LOG.info(
        "Average gunzip time: "
            + timeTaken
            + ". Memory used: "
            + incrementalMemoryUsed
            + ". Size: "
            + size);
    assertEquals(100000, size);

    stopWatch.getElapsedTime();
    for (int i = 0; i < 50; i++) {
      ungzipped = ungzip5(gzip);
    }
    size = ungzipped.length;
    timeTaken = stopWatch.getElapsedTime() / 50;
    finalMemoryUsed = memoryUsed();
    incrementalMemoryUsed = finalMemoryUsed - initialMemoryUsed;
    LOG.info(
        "Average gunzip time: "
            + timeTaken
            + ". Memory used: "
            + incrementalMemoryUsed
            + ". Size: "
            + size);
    assertEquals(100000, size);

    // Throws out the numbers. Go last.
    stopWatch.getElapsedTime();
    for (int i = 0; i < 5; i++) {
      ungzipped = ungzip4(gzip);
    }
    size = ungzipped.length;
    timeTaken = stopWatch.getElapsedTime() / 5;
    finalMemoryUsed = memoryUsed();
    incrementalMemoryUsed = finalMemoryUsed - initialMemoryUsed;
    LOG.info(
        "Average gunzip time: "
            + timeTaken
            + ". Memory used: "
            + incrementalMemoryUsed
            + ". Size: "
            + size);
    assertEquals(100000, size);
  }
Esempio n. 12
-1
  /**
   * Performance and capacity tests.
   *
   * <p>
   */
  @Test
  public void testBootstrap() throws CacheException, InterruptedException, RemoteException {

    // load up some data
    StopWatch stopWatch = new StopWatch();
    Integer index = null;
    for (int i = 0; i < 2; i++) {
      for (int j = 0; j < 1000; j++) {
        index = Integer.valueOf(((1000 * i) + j));
        cache1.put(
            new Element(
                index,
                "AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA"
                    + "AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA"
                    + "AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA"
                    + "AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA"
                    + "AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA"));
      }
    }
    long elapsed = stopWatch.getElapsedTime();
    long putTime = ((elapsed / 1000));
    LOG.info("Put Elapsed time: " + putTime);

    assertEquals(2000, cache1.getSize());

    Thread.sleep(7000);
    assertEquals(2000, manager2.getCache("sampleCache1").getSize());
    assertEquals(2000, manager3.getCache("sampleCache1").getSize());
    assertEquals(2000, manager4.getCache("sampleCache1").getSize());
    assertEquals(2000, manager5.getCache("sampleCache1").getSize());

    // now test bootstrap
    manager1.addCache("bootStrapResults");
    Cache cache = manager1.getCache("bootStrapResults");
    List cachePeers = manager1.getCacheManagerPeerProvider("RMI").listRemoteCachePeers(cache1);
    CachePeer cachePeer = (CachePeer) cachePeers.get(0);

    List keys = cachePeer.getKeys();
    assertEquals(2000, keys.size());

    Element firstElement = cachePeer.getQuiet((Serializable) keys.get(0));
    long size = firstElement.getSerializedSize();
    assertEquals(504, size);

    int chunkSize = (int) (5000000 / size);

    List requestChunk = new ArrayList();
    for (int i = 0; i < keys.size(); i++) {
      Serializable serializable = (Serializable) keys.get(i);
      requestChunk.add(serializable);
      if (requestChunk.size() == chunkSize) {
        fetchAndPutElements(cache, requestChunk, cachePeer);
        requestChunk.clear();
      }
    }
    // get leftovers
    fetchAndPutElements(cache, requestChunk, cachePeer);

    assertEquals(keys.size(), cache.getSize());
  }