Example #1
0
  @Test(timeout = 600000)
  public void expire_maxSize_with_TTL_get() throws InterruptedException {
    if (UtilsTest.scale() == 0) return;

    File f = UtilsTest.tempDbFile();
    for (int o = 0; o < 2; o++) {
      final DB db = DBMaker.fileDB(f).transactionDisable().make();
      final HTreeMap<Object, Object> map =
          db.hashMapCreate("foo")
              .expireMaxSize(1000)
              .expireAfterAccess(3, TimeUnit.SECONDS)
              .makeOrGet();

      map.put("foo", "bar");

      for (int i = 0; i < 10; i++) assertEquals("bar", map.get("foo"));

      Thread.sleep(6000);
      map.get("aa"); // so internal tasks have change to run
      assertEquals(null, map.get("foo"));

      db.commit();
      db.close();
      Thread.sleep(1100);
    }
  }
Example #2
0
  @Test // (timeout = 10000)
  public void testIteration() {
    HTreeMap m =
        new HTreeMap<Integer, Integer>(recman, true) {
          @Override
          protected int hash(Object key) {
            return (Integer) key;
          }
        };

    final int max = 140;
    final int inc = 111111;

    for (Integer i = 0; i < max; i++) {
      m.put(i, i + inc);
    }

    Iterator<Integer> keys = m.keySet().iterator();
    for (Integer i = 0; i < max; i++) {
      assertTrue(keys.hasNext());
      assertEquals(i, keys.next());
    }
    assertTrue(!keys.hasNext());

    Iterator<Integer> vals = m.values().iterator();
    for (Integer i = inc; i < max + inc; i++) {
      assertTrue(vals.hasNext());
      assertEquals(i, vals.next());
    }
    assertTrue(!vals.hasNext());

    // great it worked, test stuff spread across segments
    m.clear();
    assertTrue(m.isEmpty());

    for (int i = 0; i < max; i++) {
      m.put((1 << 30) + i, i + inc);
      m.put((2 << 30) + i, i + inc);
      m.put((3 << 30) + i, i + inc);
    }

    assertEquals(max * 3, m.size());

    int countSegments = 0;
    for (long segmentRecid : m.segmentRecids) {
      if (recman.recordGet(segmentRecid, HTreeMap.DIR_SERIALIZER) != null) countSegments++;
    }

    assertEquals(3, countSegments);

    keys = m.keySet().iterator();
    for (int i = 1; i <= 3; i++) {
      for (int j = 0; j < max; j++) {
        assertTrue(keys.hasNext());
        assertEquals(Integer.valueOf((i << 30) + j), keys.next());
      }
    }
    assertTrue(!keys.hasNext());
  }
Example #3
0
 @Test
 public void clear() {
   HTreeMap m = new HTreeMap(recman, true);
   for (Integer i = 0; i < 100; i++) {
     m.put(i, i);
   }
   m.clear();
   assertTrue(m.isEmpty());
   assertEquals(0, m.size());
 }
Example #4
0
File: DB.java Project: rhauch/MapDB
  /**
   * Creates new HashSet
   *
   * @param name of set to create
   * @param keepCounter if counter should be kept, without counter updates are faster, but entire
   *     collection needs to be traversed to count items.
   * @param serializer used to convert keys into/from binary form. Use null for default value.
   * @param <K> item type
   * @throws IllegalArgumentException if name is already used
   */
  public synchronized <K> Set<K> createHashSet(
      String name, boolean keepCounter, Serializer<K> serializer) {
    checkNameNotExists(name);

    Set<K> ret =
        new HTreeMap<K, Object>(
                engine,
                catPut(name + ".counterRecid", !keepCounter ? 0L : engine.put(0L, Serializer.LONG)),
                catPut(name + ".hashSalt", Utils.RANDOM.nextInt()),
                catPut(name + ".segmentRecids", HTreeMap.preallocateSegments(engine)),
                catPut(name + ".serializer", serializer, getDefaultSerializer()),
                null,
                0L,
                0L,
                0L,
                0L,
                null,
                null,
                null)
            .keySet();

    catalog.put(name + ".type", "HashSet");
    collections.put(name, new WeakReference<Object>(ret));
    return ret;
  }
Example #5
0
  @Test
  public void test_delete() {
    HTreeMap m =
        new HTreeMap(recman, true) {
          @Override
          protected int hash(Object key) {
            return 0;
          }
        };

    for (long i = 0; i < 20; i++) {
      m.put(i, i + 100);
    }

    for (long i = 0; i < 20; i++) {
      assertTrue(m.containsKey(i));
      assertEquals(i + 100, m.get(i));
    }

    for (long i = 0; i < 20; i++) {
      m.remove(i);
    }

    for (long i = 0; i < 20; i++) {
      assertTrue(!m.containsKey(i));
      assertEquals(null, m.get(i));
    }
  }
Example #6
0
  /** delete record/collection with given name */
  public synchronized void delete(String name) {
    Object r = get(name);
    if (r instanceof Atomic.Boolean) {
      engine.delete(((Atomic.Boolean) r).recid, Serializer.BOOLEAN);
    } else if (r instanceof Atomic.Integer) {
      engine.delete(((Atomic.Integer) r).recid, Serializer.INTEGER);
    } else if (r instanceof Atomic.Long) {
      engine.delete(((Atomic.Long) r).recid, Serializer.LONG);
    } else if (r instanceof Atomic.String) {
      engine.delete(((Atomic.String) r).recid, Serializer.STRING_NOSIZE);
    } else if (r instanceof Atomic.Var) {
      engine.delete(((Atomic.Var) r).recid, ((Atomic.Var) r).serializer);
    } else if (r instanceof Queue) {
      // drain queue
      Queue q = (Queue) r;
      while (q.poll() != null) {
        // do nothing
      }
    } else if (r instanceof HTreeMap || r instanceof HTreeMap.KeySet) {
      HTreeMap m = (r instanceof HTreeMap) ? (HTreeMap) r : ((HTreeMap.KeySet) r).parent();
      m.clear();
      // delete segments
      for (long segmentRecid : m.segmentRecids) {
        engine.delete(segmentRecid, HTreeMap.DIR_SERIALIZER);
      }
    } else if (r instanceof BTreeMap || r instanceof BTreeMap.KeySet) {
      BTreeMap m = (r instanceof BTreeMap) ? (BTreeMap) r : (BTreeMap) ((BTreeMap.KeySet) r).m;

      // TODO on BTreeMap recursively delete all nodes
      m.clear();

      if (m.counter != null) engine.delete(m.counter.recid, Serializer.LONG);
    }

    for (String n : catalog.keySet()) {
      if (!n.startsWith(name)) continue;
      catalog.remove(n);
    }
    namesInstanciated.remove(name);
    namesLookup.remove(r);
  }
Example #7
0
  @Test
  public void test_simple_put() {

    HTreeMap m = new HTreeMap(recman, true);

    m.put(111L, 222L);
    m.put(333L, 444L);
    assertTrue(m.containsKey(111L));
    assertTrue(!m.containsKey(222L));
    assertTrue(m.containsKey(333L));
    assertTrue(!m.containsKey(444L));

    assertEquals(222L, m.get(111L));
    assertEquals(null, m.get(222L));
    assertEquals(444l, m.get(333L));
  }
Example #8
0
  @Test
  public void expire_maxSize_with_TTL() throws InterruptedException {
    if (UtilsTest.scale() == 0) return;
    File f = UtilsTest.tempDbFile();
    for (int o = 0; o < 2; o++) {
      final DB db = DBMaker.fileDB(f).transactionDisable().make();
      final HTreeMap<Object, Object> map =
          db.hashMapCreate("foo")
              .expireMaxSize(1000)
              .expireAfterWrite(1, TimeUnit.DAYS)
              .makeOrGet();

      map.put("foo", "bar");

      assertEquals("bar", map.get("foo"));

      Thread.sleep(1100);
      assertEquals("bar", map.get("foo"));

      db.commit();
      db.close();
      Thread.sleep(1100);
    }
  }
Example #9
0
File: DB.java Project: rhauch/MapDB
  /**
   * Creates new HashMap with more specific arguments
   *
   * @param <K> key type
   * @param <V> value type
   * @throws IllegalArgumentException if name is already used
   * @return newly created map
   */
  protected synchronized <K, V> HTreeMap<K, V> createHashMap(HTreeMapMaker m) {
    String name = m.name;
    checkNameNotExists(name);

    long expireTimeStart = 0, expire = 0, expireAccess = 0, expireMaxSize = 0;
    long[] expireHeads = null, expireTails = null;

    if (m.expire != 0 || m.expireAccess != 0 || m.expireMaxSize != 0) {
      expireTimeStart = catPut(name + ".expireTimeStart", System.currentTimeMillis());
      expire = catPut(name + ".expire", m.expire);
      expireAccess = catPut(name + ".expireAccess", m.expireAccess);
      expireMaxSize = catPut(name + ".expireMaxSize", m.expireMaxSize);
      expireHeads = new long[16];
      expireTails = new long[16];
      for (int i = 0; i < 16; i++) {
        expireHeads[i] = engine.put(0L, Serializer.LONG);
        expireTails[i] = engine.put(0L, Serializer.LONG);
      }
      catPut(name + ".expireHeads", expireHeads);
      catPut(name + ".expireTails", expireHeads);
    }

    HTreeMap<K, V> ret =
        new HTreeMap<K, V>(
            engine,
            catPut(name + ".counterRecid", !m.keepCounter ? 0L : engine.put(0L, Serializer.LONG)),
            catPut(name + ".hashSalt", Utils.RANDOM.nextInt()),
            catPut(name + ".segmentRecids", HTreeMap.preallocateSegments(engine)),
            catPut(name + ".keySerializer", m.keySerializer, getDefaultSerializer()),
            catPut(name + ".valueSerializer", m.valueSerializer, getDefaultSerializer()),
            expireTimeStart,
            expire,
            expireAccess,
            expireMaxSize,
            expireHeads,
            expireTails,
            m.valueCreator);

    catalog.put(name + ".type", "HashMap");
    collections.put(name, new WeakReference<Object>(ret));
    return ret;
  }
Example #10
0
  @Test
  public void test_hash_collision() {
    HTreeMap m =
        new HTreeMap(recman, true) {
          @Override
          protected int hash(Object key) {
            return 0;
          }
        };

    for (long i = 0; i < 20; i++) {
      m.put(i, i + 100);
    }

    for (long i = 0; i < 20; i++) {
      assertTrue(m.containsKey(i));
      assertEquals(i + 100, m.get(i));
    }

    m.put(11L, 1111L);
    assertEquals(1111L, m.get(11L));
  }
Example #11
0
  @Test
  public void test_hash_dir_expand() {
    HTreeMap m =
        new HTreeMap(recman, true) {
          @Override
          protected int hash(Object key) {
            return 0;
          }
        };

    for (long i = 0; i < HTreeMap.BUCKET_OVERFLOW; i++) {
      m.put(i, i);
    }

    // segment should not be expanded
    long[][] l = recman.recordGet(m.segmentRecids[0], HTreeMap.DIR_SERIALIZER);
    assertNotNull(l[0]);
    assertEquals(1, l[0][0] & 1); // last bite indicates leaf
    for (int j = 1; j < 8; j++) { // all others should be null
      assertEquals(0, l[0][j]);
    }
    long recid = l[0][0] >>> 1;

    for (long i = HTreeMap.BUCKET_OVERFLOW - 1; i >= 0; i--) {
      assertTrue(recid != 0);
      HTreeMap.LinkedNode n = (HTreeMap.LinkedNode) recman.recordGet(recid, m.LN_SERIALIZER);
      assertEquals(i, n.key);
      assertEquals(i, n.value);
      recid = n.next;
    }

    // adding one more item should trigger dir expansion to next level
    m.put((long) HTreeMap.BUCKET_OVERFLOW, (long) HTreeMap.BUCKET_OVERFLOW);

    recid = m.segmentRecids[0];

    l = recman.recordGet(recid, HTreeMap.DIR_SERIALIZER);
    assertNotNull(l[0]);
    for (int j = 1; j < 8; j++) { // all others should be null
      assertEquals(null, l[j]);
    }

    assertEquals(0, l[0][0] & 1); // last bite indicates leaf
    for (int j = 1; j < 8; j++) { // all others should be zero
      assertEquals(0, l[0][j]);
    }

    recid = l[0][0] >>> 1;

    l = recman.recordGet(recid, HTreeMap.DIR_SERIALIZER);
    assertNotNull(l[0]);
    for (int j = 1; j < 8; j++) { // all others should be null
      assertEquals(null, l[j]);
    }

    assertEquals(1, l[0][0] & 1); // last bite indicates leaf
    for (int j = 1; j < 8; j++) { // all others should be zero
      assertEquals(0, l[0][j]);
    }

    recid = l[0][0] >>> 1;

    for (long i = 0; i <= HTreeMap.BUCKET_OVERFLOW; i++) {
      assertTrue(recid != 0);
      HTreeMap.LinkedNode n = (HTreeMap.LinkedNode) recman.recordGet(recid, m.LN_SERIALIZER);

      assertNotNull(n);
      assertEquals(i, n.key);
      assertEquals(i, n.value);
      recid = n.next;
    }
  }