@Test
  public void testOverTcpGetPerf() throws IOException, InterruptedException {
    String name = TMP + "/testOverTcpGetPerf0";
    String name2 = TMP + "/testOverTcpGetPerf2";
    ChronicleTools.deleteOnExit(name);
    ChronicleTools.deleteOnExit(name2);

    long start = System.nanoTime();
    int PORT = 12345;
    int size = 0;

    InProcessChronicleSource chronicle =
        new InProcessChronicleSource(new IndexedChronicle(name), PORT);
    DataStore dataStore = new DataStore(chronicle, ModelMode.MASTER);
    MapWrapper<String, String> strings =
        new MapWrapper<String, String>(
            dataStore,
            "strings",
            String.class,
            String.class,
            new LinkedHashMap<String, String>(),
            16);
    MapWrapper<Integer, Integer> ints =
        new MapWrapper<Integer, Integer>(
            dataStore,
            "ints",
            Integer.class,
            Integer.class,
            new LinkedHashMap<Integer, Integer>(),
            16);
    dataStore.start();
    ints.clear();
    strings.clear();

    InProcessChronicleSink chronicle2 =
        new InProcessChronicleSink(new IndexedChronicle(name2), "localhost", PORT);
    DataStore dataStore2 = new DataStore(chronicle2, ModelMode.READ_ONLY);
    MapWrapper<String, String> strings2 =
        new MapWrapper<String, String>(
            dataStore2,
            "strings",
            String.class,
            String.class,
            new LinkedHashMap<String, String>(),
            16);
    MapWrapper<Integer, Integer> ints2 =
        new MapWrapper<Integer, Integer>(
            dataStore2,
            "ints",
            Integer.class,
            Integer.class,
            new LinkedHashMap<Integer, Integer>(),
            16);

    final AtomicInteger sai = new AtomicInteger();
    MapListener<String, String> stringsListener =
        new AbstractMapListener<String, String>() {
          @Override
          public void update(String key, String oldValue, String newValue) {
            //                System.out.println(key + " " + oldValue + " => " + newValue);
            sai.incrementAndGet();
          }
        };
    strings2.addListener(stringsListener);

    final AtomicInteger iai = new AtomicInteger();
    MapListener<Integer, Integer> intsListener =
        new AbstractMapListener<Integer, Integer>() {
          @Override
          public void update(Integer key, Integer oldValue, Integer newValue) {
            //                System.out.println(key + " " + oldValue + " => " + newValue);
            iai.incrementAndGet();
          }
        };
    ints2.addListener(intsListener);
    dataStore2.start();

    Map<String, String> ssMap = new LinkedHashMap<String, String>();
    Map<Integer, Integer> iiMap = new LinkedHashMap<Integer, Integer>();
    int count = 2; // one clear per collection
    int collectionSize = 2000;
    for (int i = 0; i < collectionSize; i++) {
      iiMap.put(i, i);
      ssMap.put(Integer.toString(i), Integer.toString(i));
    }

    strings.putAll(ssMap);
    ints.putAll(iiMap);
    count += 2;

    //        int timeout = 0;
    while (dataStore2.events() < count) {
      //            if (timeout++ % 10000 == 0)
      //                System.out.println(dataStore2.events());
      dataStore2.nextEvent();
    }
    assertEquals(collectionSize, strings.size());
    assertEquals(collectionSize, strings2.size());
    assertEquals(collectionSize, ints.size());
    assertEquals(collectionSize, ints2.size());

    int gets = 0;
    for (int j = 0; j < 50000; j++) {
      for (String s : ssMap.keySet()) {
        String s1 = strings.get(s);
        String s2 = strings2.get(s);
        if (s1 == null) assertNotNull(s1);
        if (!s1.equals(s2)) assertEquals(s1, s2);
      }
      gets += ssMap.size();
      for (Integer i : iiMap.keySet()) {
        Integer i1 = ints.get(i);
        Integer i2 = ints2.get(i);
        if (i1 == null) assertNotNull(i1);
        if (!i1.equals(i2)) assertEquals(i1, i2);
      }
      gets += iiMap.size();
    }

    chronicle.close();
    chronicle2.close();
    long end = System.nanoTime();

    System.out.printf(
        "Average get time including startup, bootstrap and shutdown, took %.3f us average per key%n",
        (end - start) / gets / 1e3);
  }
  @Test
  public void testOverTcpPutAllClear() throws IOException, InterruptedException {
    String name = TMP + "/testOverTcpPutAllClear0";
    String name2 = TMP + "/testOverTcpPutAllClear2";
    ChronicleTools.deleteOnExit(name);
    ChronicleTools.deleteOnExit(name2);

    long start = System.nanoTime();
    int PORT = 12345;
    int size = 0;

    InProcessChronicleSource chronicle =
        new InProcessChronicleSource(new IndexedChronicle(name), PORT);
    DataStore dataStore = new DataStore(chronicle, ModelMode.MASTER);
    MapWrapper<String, String> strings =
        new MapWrapper<String, String>(
            dataStore,
            "strings",
            String.class,
            String.class,
            new LinkedHashMap<String, String>(),
            16);
    MapWrapper<Integer, Integer> ints =
        new MapWrapper<Integer, Integer>(
            dataStore,
            "ints",
            Integer.class,
            Integer.class,
            new LinkedHashMap<Integer, Integer>(),
            16);
    dataStore.start();
    ints.clear();
    strings.clear();

    InProcessChronicleSink chronicle2 =
        new InProcessChronicleSink(new IndexedChronicle(name2), "localhost", PORT);
    DataStore dataStore2 = new DataStore(chronicle2, ModelMode.READ_ONLY);
    MapWrapper<String, String> strings2 =
        new MapWrapper<String, String>(
            dataStore2,
            "strings",
            String.class,
            String.class,
            new LinkedHashMap<String, String>(),
            16);
    MapWrapper<Integer, Integer> ints2 =
        new MapWrapper<Integer, Integer>(
            dataStore2,
            "ints",
            Integer.class,
            Integer.class,
            new LinkedHashMap<Integer, Integer>(),
            16);

    final AtomicInteger sai = new AtomicInteger();
    MapListener<String, String> stringsListener =
        new AbstractMapListener<String, String>() {
          @Override
          public void update(String key, String oldValue, String newValue) {
            //                System.out.println(key + " " + oldValue + " => " + newValue);
            sai.incrementAndGet();
          }
        };
    strings2.addListener(stringsListener);

    final AtomicInteger iai = new AtomicInteger();
    MapListener<Integer, Integer> intsListener =
        new AbstractMapListener<Integer, Integer>() {
          @Override
          public void update(Integer key, Integer oldValue, Integer newValue) {
            //                System.out.println(key + " " + oldValue + " => " + newValue);
            iai.incrementAndGet();
          }
        };
    ints2.addListener(intsListener);
    dataStore2.start();

    Map<String, String> ssMap = new LinkedHashMap<String, String>();
    Map<Integer, Integer> iiMap = new LinkedHashMap<Integer, Integer>();
    int count = 0;
    int collectionSize = 2000;
    for (int i = 0; i < collectionSize; i++) {
      iiMap.put(i, i);
      ssMap.put(Integer.toString(i), Integer.toString(i));
    }
    for (int j = 0; j < 2500; j++) {
      strings.putAll(ssMap);
      ints.putAll(iiMap);
      strings.clear();
      ints.clear();
      count += 4;
    }
    long mid = System.nanoTime();

    //        int timeout = 0;
    while (dataStore2.events() < count) {
      //            if (timeout++ % 10000 == 0)
      //                System.out.println(dataStore2.events());
      dataStore2.nextEvent();
    }

    chronicle.close();
    chronicle2.close();
    long end = System.nanoTime();

    System.out.printf(
        "Startup and write took %.2f us on average (per key) and read and shutdown took %.2f on average (per key)%n",
        (mid - start) / count / collectionSize / 1e3, (end - mid) / count / collectionSize / 1e3);
  }