Esempio n. 1
0
  private void initBuffer() throws IOException, InterruptedException, DataArchivedException {
    if (this.buf == null) {
      this.hlAdded = false;
      if (HashFunctionPool.max_hash_cluster > 1) {
        this.buf = ByteBuffer.wrap(new byte[Main.CHUNK_LENGTH]);

        final ArrayList<Shard> cks = new ArrayList<Shard>();
        int i = 0;
        // long fp = this.position;

        for (HashLocPair p : ar) {

          if (p.hashloc[1] != 0) {
            Shard sh = new Shard();
            sh.hash = p.hash;
            sh.hashloc = p.hashloc;
            sh.pos = p.pos;
            sh.nlen = p.nlen;
            sh.offset = p.offset;
            sh.len = p.len;
            sh.apos = i;
            cks.add(i, sh);
          } else break;
          i++;
        }
        sz = cks.size();
        AsyncChunkReadActionListener l =
            new AsyncChunkReadActionListener() {

              @Override
              public void commandException(Exception e) {
                SDFSLogger.getLog().error("error getting block", e);
                this.incrementAndGetDNEX();
                synchronized (this) {
                  this.notifyAll();
                }
              }

              @Override
              public void commandResponse(Shard result) {
                cks.get(result.apos).ck = result.ck;
                if (this.incrementandGetDN() >= sz) {

                  synchronized (this) {
                    this.notifyAll();
                  }
                }
              }

              @Override
              public void commandArchiveException(DataArchivedException e) {
                this.incrementAndGetDNEX();
                this.setDAR(e);

                synchronized (this) {
                  this.notifyAll();
                }
              }
            };
        for (Shard sh : cks) {
          sh.l = l;
          executor.execute(sh);
        }
        int wl = 0;
        int tm = 1000;
        int al = 0;
        while (l.getDN() < sz && l.getDNEX() == 0) {
          if (al == 30) {
            int nt = wl / 1000;
            SDFSLogger.getLog()
                .debug("Slow io, waited [" + nt + "] seconds for all reads to complete.");
            al = 0;
          }
          if (Main.readTimeoutSeconds > 0 && wl > (Main.writeTimeoutSeconds * tm)) {
            int nt = (tm * wl) / 1000;
            throw new IOException(
                "read Timed Out after ["
                    + nt
                    + "] seconds. Expected ["
                    + sz
                    + "] block read but only ["
                    + l.getDN()
                    + "] were completed");
          }
          if (l.getDAR() != null) {
            throw l.getDAR();
          }
          if (l.getDNEX() > 0) throw new IOException("error while reading data");
          synchronized (l) {
            l.wait(1000);
          }
          wl += 1000;
          al++;
        }
        if (l.getDAR() != null) {
          throw l.getDAR();
        }
        if (l.getDNEX() > 0) {
          throw new IOException("error while getting blocks " + l.getDNEX() + " errors found");
        }
        if (l.getDN() < sz) {
          throw new IOException("thread timed out before read was complete ");
        }
        buf.position(0);
        for (Shard sh : cks) {
          if (sh.pos == -1) {
            try {
              buf.put(sh.ck);
            } catch (Exception e) {
              // SDFSLogger.getLog().info("pos = " + this.position
              // + "ck sz=" + sh.ck.length + " hcb sz=" +
              // hcb.position() + " cks sz=" +cks.size() + " len="
              // + (hcb.position() +sh.ck.length));
              throw new IOException(e);
            }
          } else {
            try {

              buf.position(sh.pos);
              buf.put(sh.ck, sh.offset, sh.nlen);
            } catch (Exception e) {
              SDFSLogger.getLog()
                  .error(
                      "pos = "
                          + this.position
                          + " ck nlen="
                          + sh.nlen
                          + " ck offset="
                          + sh.offset
                          + " ck len="
                          + sh.ck.length
                          + " hcb pos="
                          + buf.position()
                          + " ck slen="
                          + sh.len
                          + " len="
                          + (buf.capacity()));
              throw new IOException(e);
            }
          }
        }

      } else {
        this.buf =
            ByteBuffer.wrap(HCServiceProxy.fetchChunk(this.ar.get(0).hash, this.ar.get(0).hashloc));
      }
    }
  }
Esempio n. 2
0
  public void cacheChunk() throws IOException, InterruptedException, DataArchivedException {
    if (this.buf == null) {
      this.hlAdded = false;
      if (HashFunctionPool.max_hash_cluster > 1) {

        final ArrayList<Shard> cks = new ArrayList<Shard>();
        int i = 0;
        // long fp = this.position;

        for (HashLocPair p : ar) {

          if (p.hashloc[1] != 0) {
            Shard sh = new Shard();
            sh.hash = p.hash;
            sh.hashloc = p.hashloc;
            sh.cache = true;
            sh.pos = p.pos;
            sh.nlen = p.nlen;
            sh.offset = p.offset;
            sh.len = p.len;
            sh.apos = i;
            cks.add(i, sh);
          } else break;
          i++;
        }
        sz = cks.size();
        AsyncChunkReadActionListener l =
            new AsyncChunkReadActionListener() {

              @Override
              public void commandException(Exception e) {
                this.incrementAndGetDNEX();
                synchronized (this) {
                  this.notifyAll();
                }
              }

              @Override
              public void commandResponse(Shard result) {
                cks.get(result.apos).ck = result.ck;
                if (this.incrementandGetDN() >= sz) {

                  synchronized (this) {
                    this.notifyAll();
                  }
                }
              }

              @Override
              public void commandArchiveException(DataArchivedException e) {
                this.incrementAndGetDNEX();
                this.setDAR(e);

                synchronized (this) {
                  this.notifyAll();
                }
              }
            };
        for (Shard sh : cks) {
          sh.l = l;
          executor.execute(sh);
        }
        int wl = 0;
        int al = 0;
        while (l.getDN() < sz && l.getDNEX() == 0) {
          if (al == 30) {
            int nt = wl / 1000;
            SDFSLogger.getLog()
                .debug("Slow io, waited [" + nt + "] seconds for all reads to complete.");
            al = 0;
          }

          if (l.getDAR() != null) {
            throw l.getDAR();
          }
          if (l.getDNEX() > 0) throw new IOException("error while reading data");
          synchronized (l) {
            l.wait(1000);
          }
          wl += 1000;
          al++;
        }
        if (l.getDAR() != null) {
          throw l.getDAR();
        }
        if (l.getDNEX() > 0) {
          throw new IOException("error while getting blocks " + l.getDNEX() + " errors found");
        }
        if (l.getDN() < sz) {
          throw new IOException("thread timed out before read was complete ");
        }
      }
    }
  }