Ejemplo n.º 1
0
 public void serialize(SerializerOutput out, HashNode obj) throws IOException {
   if (obj.getClass() == HashBucket.class) {
     out.write(Serialization.HTREE_BUCKET);
     HashBucket b = (HashBucket) obj;
     b.writeExternal(out);
   } else {
     out.write(Serialization.HTREE_DIRECTORY);
     HashDirectory n = (HashDirectory) obj;
     n.writeExternal(out);
   }
 }
Ejemplo n.º 2
0
    /**
     * Prepare internal state so we can answer <code>hasMoreElements</code> Actually, this code
     * prepares an Enumeration on the next Bucket to enumerate. If no following bucket is found, the
     * next Enumeration is set to <code>null</code>.
     */
    private void prepareNext() throws IOException {
      long child_recid = 0;

      // find next bucket/directory to enumerate
      do {
        _child++;
        if (_child >= MAX_CHILDREN) {

          if (_dirStack.isEmpty()) {
            // no more directory in the stack, we're finished
            return;
          }

          // try next page
          _dir = (HashDirectory) _dirStack.remove(_dirStack.size() - 1);
          _child = ((Integer) _childStack.remove(_childStack.size() - 1)).intValue();
          continue;
        }
        child_recid = _dir._children[_child];
      } while (child_recid == 0);

      if (child_recid == 0) {
        throw new Error("child_recid cannot be 0");
      }

      HashNode node = (HashNode) _recman.fetch(child_recid, tree.SERIALIZER);
      // System.out.println("HDEnumeration.get() child is : "+node);

      if (node instanceof HashDirectory) {
        // save current position
        _dirStack.add(_dir);
        _childStack.add(new Integer(_child));

        _dir = (HashDirectory) node;
        _child = -1;

        // recurse into
        _dir.setPersistenceContext(_recman, child_recid);
        prepareNext();
      } else {
        // node is a bucket
        HashBucket bucket = (HashBucket) node;
        if (_iterateKeys) {
          ArrayList keys2 = (ArrayList) bucket.getKeys().clone();
          _iter = keys2.iterator();
        } else {
          _iter = bucket.getValues().iterator();
        }
      }
    }
Ejemplo n.º 3
0
  /**
   * Remove the value which is associated with the given key. If the key does not exist, this method
   * simply ignores the operation.
   *
   * @param key key whose associated value is to be removed
   * @return object which was associated with the given key, or <code>null</code> if no association
   *     existed with given key.
   */
  Object remove(Object key) throws IOException {
    int hash = hashCode(key);
    long child_recid = _children[hash];
    if (child_recid == 0) {
      // not bucket/page --> not found
      return null;
    } else {
      HashNode node = (HashNode) _recman.fetch(child_recid, tree.SERIALIZER);
      // System.out.println("HashDirectory.remove() child is : "+node);

      if (node instanceof HashDirectory) {
        // recurse into next directory level
        HashDirectory dir = (HashDirectory) node;
        dir.setPersistenceContext(_recman, child_recid);
        Object existing = dir.remove(key);
        if (existing != null) {
          if (dir.isEmpty()) {
            // delete empty directory
            _recman.delete(child_recid);
            _children[hash] = 0;
            _recman.update(_recid, this, tree.SERIALIZER);
          }
        }
        return existing;
      } else {
        // node is a bucket
        HashBucket bucket = (HashBucket) node;
        Object existing = bucket.removeElement(key);
        if (existing != null) {
          if (bucket.getElementCount() >= 1) {
            _recman.update(child_recid, bucket, tree.SERIALIZER);
          } else {
            // delete bucket, it's empty
            _recman.delete(child_recid);
            _children[hash] = 0;
            _recman.update(_recid, this, tree.SERIALIZER);
          }
        }
        return existing;
      }
    }
  }
Ejemplo n.º 4
0
  /**
   * Returns the value which is associated with the given key. Returns <code>null</code> if there is
   * not association for this key.
   *
   * @param key key whose associated value is to be returned
   */
  V get(K key) throws IOException {
    int hash = hashCode(key);
    long child_recid = _children[hash];
    if (child_recid == 0) {
      // not bucket/page --> not found
      return null;
    } else {
      HashNode<K, V> node = (HashNode<K, V>) _recman.fetch(child_recid, tree.SERIALIZER);
      // System.out.println("HashDirectory.get() child is : "+node);

      if (node instanceof HashDirectory) {
        // recurse into next directory level
        HashDirectory<K, V> dir = (HashDirectory<K, V>) node;
        dir.setPersistenceContext(_recman, child_recid);
        return dir.get(key);
      } else {
        // node is a bucket
        HashBucket<K, V> bucket = (HashBucket) node;
        return bucket.getValue(key);
      }
    }
  }
Ejemplo n.º 5
0
 public HashNode deserialize(SerializerInput ds) throws IOException {
   try {
     int i = ds.read();
     if (i == Serialization.HTREE_BUCKET) { // is HashBucket?
       HashBucket ret = new HashBucket(HTree.this);
       ret.readExternal(ds);
       if (ds.available() != 0
           && ds.read() != -1) // -1 is fix for compression, not sure what is happening
       throw new InternalError("bytes left: " + ds.available());
       return ret;
     } else if (i == Serialization.HTREE_DIRECTORY) {
       HashDirectory ret = new HashDirectory(HTree.this);
       ret.readExternal(ds);
       if (ds.available() != 0
           && ds.read() != -1) // -1 is fix for compression, not sure what is happening
       throw new InternalError("bytes left: " + ds.available());
       return ret;
     } else {
       throw new InternalError("Wrong HTree header: " + i);
     }
   } catch (ClassNotFoundException e) {
     throw new IOException(e);
   }
 }
Ejemplo n.º 6
0
  /** Basic tests */
  public void testBasics() throws IOException {

    Properties props = new Properties();
    RecordManager recman =
        RecordManagerFactory.createRecordManager(TestRecordFile.testFileName, props);
    HashBucket bucket = new HashBucket(0);

    // add
    bucket.addElement("key", "value");
    String s = (String) bucket.getValue("key");
    assertEquals("value", s);

    // replace
    bucket.addElement("key", "value2");
    s = (String) bucket.getValue("key");
    assertEquals("value2", s);

    // add
    bucket.addElement("key2", "value3");
    s = (String) bucket.getValue("key2");
    assertEquals("value3", s);

    // remove
    bucket.removeElement("key2");
    s = (String) bucket.getValue("key2");
    assertEquals(null, s);
    bucket.removeElement("key");
    s = (String) bucket.getValue("key");
    assertEquals(null, s);

    recman.close();
  }
Ejemplo n.º 7
0
  /**
   * Associates the specified value with the specified key.
   *
   * @param key key with which the specified value is to be assocated.
   * @param value value to be associated with the specified key.
   * @return object which was previously associated with the given key, or <code>null</code> if no
   *     association existed.
   */
  Object put(Object key, Object value) throws IOException {
    if (value == null) {
      return remove(key);
    }
    int hash = hashCode(key);
    long child_recid = _children[hash];
    if (child_recid == 0) {
      // no bucket/page here yet, let's create a bucket
      HashBucket bucket = new HashBucket(tree, _depth + 1);

      // insert (key,value) pair in bucket
      Object existing = bucket.addElement(key, value);

      long b_recid = _recman.insert(bucket, tree.SERIALIZER);
      _children[hash] = b_recid;

      _recman.update(_recid, this, tree.SERIALIZER);

      // System.out.println("Added: "+bucket);
      return existing;
    } else {
      HashNode node = (HashNode) _recman.fetch(child_recid, tree.SERIALIZER);

      if (node instanceof HashDirectory) {
        // recursive insert in next directory level
        HashDirectory dir = (HashDirectory) node;
        dir.setPersistenceContext(_recman, child_recid);
        return dir.put(key, value);
      } else {
        // node is a bucket
        HashBucket bucket = (HashBucket) node;
        if (bucket.hasRoom()) {
          Object existing = bucket.addElement(key, value);
          _recman.update(child_recid, bucket, tree.SERIALIZER);
          // System.out.println("Added: "+bucket);
          return existing;
        } else {
          // overflow, so create a new directory
          if (_depth == MAX_DEPTH) {
            throw new RuntimeException("Cannot create deeper directory. " + "Depth=" + _depth);
          }
          HashDirectory dir = new HashDirectory(tree, (byte) (_depth + 1));
          long dir_recid = _recman.insert(dir, tree.SERIALIZER);
          dir.setPersistenceContext(_recman, dir_recid);

          _children[hash] = dir_recid;
          _recman.update(_recid, this, tree.SERIALIZER);

          // discard overflown bucket
          _recman.delete(child_recid);

          // migrate existing bucket elements
          ArrayList keys = bucket.getKeys();
          ArrayList values = bucket.getValues();
          int entries = keys.size();
          for (int i = 0; i < entries; i++) {
            dir.put(keys.get(i), values.get(i));
          }

          // (finally!) insert new element
          return dir.put(key, value);
        }
      }
    }
  }