@Override
 public void run() {
   map.iterInit();
   long cl;
   try {
     cl = map.claimRecords(bf, nlbf);
     claims.addAndGet(cl);
   } catch (IOException e) {
     SDFSLogger.getLog().error("unable to claim shard", e);
   }
 }
  private ProgressiveFileByteArrayLongMap getWriteMap() throws IOException {

    ProgressiveFileByteArrayLongMap activeWMap;
    try {
      activeWMap = this.activeWriteMaps.take();
    } catch (InterruptedException e) {
      throw new IOException(e);
    }
    if (activeWMap.isFull()) {
      activeWMap.setActive(false);
      activeWMap = this.createWriteMap();
    }
    return activeWMap;
  }
  private AbstractShard getReadMap(byte[] hash) throws IOException {
    Lock l = gcLock.readLock();
    l.lock();
    // long v = ct.incrementAndGet();
    try {

      if (!runningGC && !lbf.mightContain(hash)) {
        // SDFSLogger.getLog().info("not in bloom filter");
        return null;
      }
    } finally {
      l.unlock();
    }
    Iterator<ProgressiveFileByteArrayLongMap> iter = activeReadMaps.iterator();
    while (iter.hasNext()) {
      ProgressiveFileByteArrayLongMap _m = iter.next();
      if (_m.containsKey(hash)) return _m;
    }
    iter = maps.iterator();
    while (iter.hasNext()) {
      ProgressiveFileByteArrayLongMap _m = iter.next();
      if (!activeReadMaps.contains(_m) && _m.containsKey(hash)) {
        al.lock();
        try {
          // SDFSLogger.getLog().info("adding active " +
          // _m.toString());
          if (activeReadMaps.remainingCapacity() == 0) {
            ProgressiveFileByteArrayLongMap obf = activeReadMaps.poll();
            // SDFSLogger.getLog().info("removed active " +
            // obf.toString());
            if (obf != null) obf.stopRun();
          }
          /*
           * if(activeReadMaps.offer(_m))
           * SDFSLogger.getLog().info("added active " +
           * _m.toString()); else
           * SDFSLogger.getLog().info("unable to add active " +
           * _m.toString());
           */
          try {
            loadCacheExecutor.execute(_m);
          } catch (Exception e) {
            if (SDFSLogger.isDebug()) SDFSLogger.getLog().debug("unable to cache " + _m, e);
          }
        } finally {
          al.unlock();
        }
        return _m;
      }
    }
    /*
    if(!runningGC) {
    	long mv = mt.incrementAndGet();
    	double pc = (double)mv/(double)v;
    	SDFSLogger.getLog().info("might be in bloom filter " + runningGC + " pc=" + pc);

    }
    */
    return null;
  }
  private ProgressiveFileByteArrayLongMap createWriteMap() throws IOException {
    ProgressiveFileByteArrayLongMap activeWMap = null;
    try {
      String guid = null;
      boolean written = false;
      while (!written) {
        guid = RandomGUID.getGuid();

        File f = new File(fileName + "-" + guid + ".keys");
        if (!f.exists()) {
          activeWMap = new ProgressiveFileByteArrayLongMap(fileName + "-" + guid, this.hashTblSz);
          activeWMap.setUp();
          this.maps.add(activeWMap);
          activeWMap.setActive(true);
          written = true;
        }
      }
      return activeWMap;
    } catch (Exception e) {
      throw new IOException(e);
    }
  }
 private long getPos(byte[] hash) throws IOException {
   long pos = -1;
   Lock l = gcLock.readLock();
   l.lock();
   try {
     if (!runningGC && !lbf.mightContain(hash)) return pos;
   } finally {
     l.unlock();
   }
   Iterator<ProgressiveFileByteArrayLongMap> iter = activeReadMaps.iterator();
   while (iter.hasNext()) {
     ProgressiveFileByteArrayLongMap m = iter.next();
     pos = m.get(hash);
     if (pos != -1) {
       return pos;
     }
   }
   if (pos == -1) {
     iter = maps.iterator();
     while (iter.hasNext()) {
       ProgressiveFileByteArrayLongMap m = iter.next();
       pos = m.get(hash);
       if (pos != -1) {
         al.lock();
         try {
           if (!activeReadMaps.contains(m)) {
             if (SDFSLogger.isDebug()) SDFSLogger.getLog().debug("adding active " + m.toString());
             if (activeReadMaps.remainingCapacity() == 0) {
               ProgressiveFileByteArrayLongMap obf = activeReadMaps.poll();
               if (obf != null) obf.stopRun();
             }
             activeReadMaps.offer(m);
             try {
               loadCacheExecutor.execute(m);
             } catch (Exception e) {
               SDFSLogger.getLog().debug("unable to cache " + m, e);
             }
           }
         } finally {
           al.unlock();
         }
         return pos;
       }
     }
   }
   return pos;
 }
  @Override
  public boolean put(ChunkData cm, boolean persist) throws IOException, HashtableFullException {
    // persist = false;
    if (this.isClosed())
      throw new HashtableFullException("Hashtable " + this.fileName + " is close");
    if (kSz.get() >= this.maxSz) throw new HashtableFullException("maximum sized reached");
    boolean added = false;
    // if (persist)
    // this.flushFullBuffer();
    Lock l = gcLock.readLock();
    l.lock();
    ProgressiveFileByteArrayLongMap bm = null;
    try {
      // long tm = System.currentTimeMillis();
      if (this.getReadMap(cm.getHash()) == null) {
        // this.misses.incrementAndGet();
        // tm = System.currentTimeMillis() - tm;
        if (persist) {
          try {
            bm = this.getWriteMap();
            added = bm.put(cm);
          } catch (HashtableFullException e) {
            bm.setActive(false);
            bm = this.createWriteMap();
            added = bm.put(cm);
          }
          if (added) {
            this.lbf.put(cm.getHash());
            this.kSz.incrementAndGet();
          }
        } else {
          try {
            bm = this.getWriteMap();
            added = bm.put(cm.getHash(), cm.getcPos());
            this.lbf.put(cm.getHash());
          } catch (HashtableFullException e) {
            bm.setActive(false);
            bm = this.createWriteMap();
            added = bm.put(cm.getHash(), cm.getcPos());

            this.lbf.put(cm.getHash());
          }
        }
      } else {
        // tm = System.currentTimeMillis() - tm;
      }
      // this.msTr.addAndGet(tm);

    } finally {
      try {
        if (bm != null) {
          bm.setActive(true);
          this.activeWriteMaps.offer(bm);
        }
      } catch (Exception e) {

      } finally {
        l.unlock();
      }
    }
    /*
     * this.trs.incrementAndGet(); if(this.trs.get() == 10000) { long tpm =
     * 0; if(this.misses.get() > 0) tpm = this.msTr.get()/this.misses.get();
     * SDFSLogger.getLog().info("trs=" + this.trs.get() + " misses=" +
     * this.misses.get() + " mtm=" + this.msTr.get() + " tpm=" + tpm);
     * this.trs.set(0); this.misses.set(0); this.msTr.set(0); }
     */
    return added;
  }
  /**
   * initializes the Object set of this hash table.
   *
   * @param initialCapacity an <code>int</code> value
   * @return an <code>int</code> value
   * @throws HashtableFullException
   * @throws FileNotFoundException
   */
  public long setUp() throws Exception {
    File _fs = new File(fileName);
    if (!_fs.getParentFile().exists()) {
      _fs.getParentFile().mkdirs();
    }
    SDFSLogger.getLog().info("Folder = " + _fs.getPath());
    SDFSLogger.getLog().info("Loading freebits bitset");
    long rsz = 0;
    long _tbs = maxSz / (256);
    int max = Integer.MAX_VALUE / ProgressiveFileByteArrayLongMap.EL;
    if (_tbs > max) {
      this.hashTblSz = max;
    } else if (_tbs > this.hashTblSz) {
      this.hashTblSz = (int) _tbs;
    }
    SDFSLogger.getLog()
        .info(
            "table setup max="
                + max
                + " maxsz="
                + this.maxSz
                + " _tbs="
                + _tbs
                + " hashTblSz="
                + this.hashTblSz);
    this.hashTblSz = NextPrime.getNextPrimeI((int) (this.hashTblSz));
    File[] files = _fs.getParentFile().listFiles(new DBFileFilter());
    if (files.length > 0) {
      CommandLineProgressBar bar =
          new CommandLineProgressBar("Loading Existing Hash Tables", files.length, System.out);
      this.loadEvent.maxCt = files.length + 128;

      for (int i = 0; i < files.length; i++) {
        this.loadEvent.curCt = this.loadEvent.curCt + 1;
        int sz = NextPrime.getNextPrimeI((int) (this.hashTblSz));
        // SDFSLogger.getLog().debug("will create byte array of size "
        // + sz + " propsize was " + propsize);
        ProgressiveFileByteArrayLongMap m = null;
        String pth = files[i].getPath();
        String pfx = pth.substring(0, pth.length() - 5);
        m = new ProgressiveFileByteArrayLongMap(pfx, sz);
        long mep = m.setUp();
        if (mep > endPos) endPos = mep;
        maps.add(m);
        rsz = rsz + m.size();
        bar.update(i);
        if (!m.isFull() && this.activeWriteMaps.remainingCapacity() > 0) {
          m.setActive(true);
          this.activeWriteMaps.add(m);
          this.loadCacheExecutor.execute(m);
        } else {
          m.setActive(false);
          m.full = true;
        }
      }
      bar.finish();
    }

    this.loadEvent.shortMsg = "Loading BloomFilters";
    if (maps.size() == 0) lbf = new LargeBloomFilter(maxSz, .01);
    else {
      try {
        lbf = new LargeBloomFilter(_fs.getParentFile(), maxSz, true);
      } catch (Exception e) {
        SDFSLogger.getLog().warn("Recreating BloomFilters...");
        this.loadEvent.shortMsg = "Recreating BloomFilters";
        lbf = new LargeBloomFilter(maxSz, .01);
        executor =
            new ThreadPoolExecutor(
                Main.writeThreads,
                Main.writeThreads,
                10,
                TimeUnit.SECONDS,
                worksQueue,
                new ProcessPriorityThreadFactory(Thread.MIN_PRIORITY),
                executionHandler);
        CommandLineProgressBar bar =
            new CommandLineProgressBar("ReCreating BloomFilters", maps.size(), System.out);
        Iterator<ProgressiveFileByteArrayLongMap> iter = maps.iterator();
        int i = 0;
        ArrayList<LBFReconstructThread> al = new ArrayList<LBFReconstructThread>();
        while (iter.hasNext()) {
          ProgressiveFileByteArrayLongMap m = iter.next();
          LBFReconstructThread th = new LBFReconstructThread(lbf, m);
          executor.execute(th);
          al.add(th);
          i++;
          bar.update(i);
        }
        executor.shutdown();
        bar.finish();
        try {
          System.out.print("Waiting for all BloomFilters creation threads to finish");
          while (!executor.awaitTermination(10, TimeUnit.SECONDS)) {
            SDFSLogger.getLog().debug("Awaiting fdisk completion of threads.");
            System.out.print(".");
          }
          for (LBFReconstructThread th : al) {
            if (th.ex != null) throw th.ex;
          }
          System.out.println(" done");
        } catch (Exception e1) {
          throw new IOException(e1);
        }
      }
    }
    while (this.activeWriteMaps.remainingCapacity() > 0) {
      String guid = null;
      boolean written = false;
      while (!written) {
        guid = RandomGUID.getGuid();

        File f = new File(fileName + "-" + guid + ".keys");
        if (!f.exists()) {
          ProgressiveFileByteArrayLongMap activeWMap =
              new ProgressiveFileByteArrayLongMap(fileName + "-" + guid, this.hashTblSz);
          activeWMap.setUp();
          this.maps.add(activeWMap);
          written = true;
          activeWMap.setActive(true);
          this.activeWriteMaps.offer(activeWMap);
        }
      }
    }
    if (SDFSLogger.isDebug()) {
      long mem = MemoryMeasurer.measureBytes(lbf);
      long mmem = MemoryMeasurer.measureBytes(maps);
      SDFSLogger.getLog().debug("Large BloomFilter Size=" + StorageUnit.of(mem).format(mem));
      SDFSLogger.getLog().debug("Maps Size=" + StorageUnit.of(mmem).format(mmem));
    }
    this.loadEvent.endEvent("Loaded entries " + rsz);
    System.out.println("Loaded entries " + rsz);
    SDFSLogger.getLog().info("Active Maps " + this.activeWriteMaps.size());
    SDFSLogger.getLog().info("Loaded entries " + rsz);
    SDFSLogger.getLog().info("Loading BloomFilters " + rsz);
    this.kSz.set(rsz);
    this.closed = false;
    return size;
  }
  @Override
  public synchronized long claimRecords(SDFSEvent evt, LargeBloomFilter bf) throws IOException {
    if (this.isClosed()) throw new IOException("Hashtable " + this.fileName + " is close");
    executor =
        new ThreadPoolExecutor(
            Main.writeThreads + 1,
            Main.writeThreads + 1,
            10,
            TimeUnit.SECONDS,
            worksQueue,
            new ProcessPriorityThreadFactory(Thread.MIN_PRIORITY),
            executionHandler);
    csz = new AtomicLong(0);
    Lock l = this.gcLock.writeLock();
    l.lock();
    this.runningGC = true;
    lbf = null;
    lbf = new LargeBloomFilter(maxSz, .01);
    l.unlock();
    try {
      SDFSLogger.getLog()
          .info("Claiming Records [" + this.getSize() + "] from [" + this.fileName + "]");
      SDFSEvent tEvt =
          SDFSEvent.claimInfoEvent(
              "Claiming Records [" + this.getSize() + "] from [" + this.fileName + "]", evt);
      tEvt.maxCt = this.maps.size();
      Iterator<ProgressiveFileByteArrayLongMap> iter = maps.iterator();
      while (iter.hasNext()) {
        tEvt.curCt++;
        ProgressiveFileByteArrayLongMap m = null;
        try {
          m = iter.next();
          executor.execute(new ClaimShard(m, bf, lbf, csz));
        } catch (Exception e) {
          tEvt.endEvent(
              "Unable to claim records for " + m + " because : [" + e.toString() + "]",
              SDFSEvent.ERROR);
          SDFSLogger.getLog().error("Unable to claim records for " + m, e);
          throw new IOException(e);
        }
      }
      executor.shutdown();
      try {
        while (!executor.awaitTermination(10, TimeUnit.SECONDS)) {
          SDFSLogger.getLog().debug("Awaiting fdisk completion of threads.");
        }
      } catch (InterruptedException e) {
        throw new IOException(e);
      }
      this.kSz.getAndAdd(-1 * csz.get());
      tEvt.endEvent("removed [" + csz.get() + "] records");
      SDFSLogger.getLog().info("removed [" + csz.get() + "] records");
      iter = maps.iterator();
      while (iter.hasNext()) {
        ProgressiveFileByteArrayLongMap m = null;
        try {
          m = iter.next();
          if (m.isFull() && !m.isActive()) {
            double pf = (double) m.size() / (double) m.maxSize();
            // SDFSLogger.getLog().info("pfull=" + pf);
            if (pf < .4 || pf == Double.NaN) {
              // SDFSLogger.getLog().info("deleting " +
              // m.toString());
              m.iterInit();
              KVPair p = m.nextKeyValue();
              while (p != null) {
                ProgressiveFileByteArrayLongMap _m = this.getWriteMap();
                try {
                  _m.put(p.key, p.value);
                } catch (HashtableFullException e) {
                  _m.setActive(false);
                  _m = this.createWriteMap();
                  _m.put(p.key, p.value);
                } finally {
                  this.activeWriteMaps.offer(_m);
                }
                p = m.nextKeyValue();
              }
              int mapsz = maps.size();
              maps.remove(m);
              mapsz = mapsz - maps.size();
              // SDFSLogger.getLog().info(
              // "removing map " + m.toString() + " sz="
              // + maps.size() + " rm=" + mapsz);
              m.vanish();

              m = null;
            }
          }
        } catch (Exception e) {
          tEvt.endEvent(
              "Unable to compact " + m + " because : [" + e.toString() + "]", SDFSEvent.ERROR);
          SDFSLogger.getLog().error("to compact " + m, e);
          throw new IOException(e);
        }
      }
      return csz.get();
    } finally {
      l.lock();
      this.runningGC = false;
      l.unlock();
      executor = null;
    }
  }