public void cacheChunk() throws IOException, InterruptedException, DataArchivedException { if (this.buf == null) { this.hlAdded = false; if (HashFunctionPool.max_hash_cluster > 1) { final ArrayList<Shard> cks = new ArrayList<Shard>(); int i = 0; // long fp = this.position; for (HashLocPair p : ar) { if (p.hashloc[1] != 0) { Shard sh = new Shard(); sh.hash = p.hash; sh.hashloc = p.hashloc; sh.cache = true; sh.pos = p.pos; sh.nlen = p.nlen; sh.offset = p.offset; sh.len = p.len; sh.apos = i; cks.add(i, sh); } else break; i++; } sz = cks.size(); AsyncChunkReadActionListener l = new AsyncChunkReadActionListener() { @Override public void commandException(Exception e) { this.incrementAndGetDNEX(); synchronized (this) { this.notifyAll(); } } @Override public void commandResponse(Shard result) { cks.get(result.apos).ck = result.ck; if (this.incrementandGetDN() >= sz) { synchronized (this) { this.notifyAll(); } } } @Override public void commandArchiveException(DataArchivedException e) { this.incrementAndGetDNEX(); this.setDAR(e); synchronized (this) { this.notifyAll(); } } }; for (Shard sh : cks) { sh.l = l; executor.execute(sh); } int wl = 0; int al = 0; while (l.getDN() < sz && l.getDNEX() == 0) { if (al == 30) { int nt = wl / 1000; SDFSLogger.getLog() .debug("Slow io, waited [" + nt + "] seconds for all reads to complete."); al = 0; } if (l.getDAR() != null) { throw l.getDAR(); } if (l.getDNEX() > 0) throw new IOException("error while reading data"); synchronized (l) { l.wait(1000); } wl += 1000; al++; } if (l.getDAR() != null) { throw l.getDAR(); } if (l.getDNEX() > 0) { throw new IOException("error while getting blocks " + l.getDNEX() + " errors found"); } if (l.getDN() < sz) { throw new IOException("thread timed out before read was complete "); } } } }