Esempio n. 1
0
  @Test
  public void testCountsFromLocusTraversal() {
    final GenomeAnalysisEngine engine = new GenomeAnalysisEngine();
    engine.setGenomeLocParser(genomeLocParser);

    final Collection<SAMReaderID> samFiles = new ArrayList<>();
    final SAMReaderID readerID = new SAMReaderID(testBAM, new Tags());
    samFiles.add(readerID);

    final SAMDataSource dataSource =
        new SAMDataSource(
            samFiles,
            new ThreadAllocation(),
            null,
            genomeLocParser,
            false,
            SAMFileReader.ValidationStringency.STRICT,
            null,
            null,
            new ValidationExclusion(),
            new ArrayList<ReadFilter>(),
            new ArrayList<ReadTransformer>(),
            false,
            (byte) 30,
            false,
            true);

    engine.setReadsDataSource(dataSource);
    final Set<String> samples = SampleUtils.getSAMFileSamples(dataSource.getHeader());

    final TraverseLociNano traverseLociNano = new TraverseLociNano(1);
    final DummyLocusWalker walker = new DummyLocusWalker();
    traverseLociNano.initialize(engine, walker, null);

    for (final Shard shard : dataSource.createShardIteratorOverAllReads(new LocusShardBalancer())) {
      final WindowMaker windowMaker =
          new WindowMaker(
              shard, genomeLocParser, dataSource.seek(shard), shard.getGenomeLocs(), samples);
      for (WindowMaker.WindowMakerIterator window : windowMaker) {
        final LocusShardDataProvider dataProvider =
            new LocusShardDataProvider(
                shard,
                shard.getReadProperties(),
                genomeLocParser,
                window.getLocus(),
                window,
                reference,
                new ArrayList<ReferenceOrderedDataSource>());
        traverseLociNano.traverse(walker, dataProvider, 0);
        dataProvider.close();
      }
      windowMaker.close();
    }

    // dataSource.close();
    Assert.assertEquals(
        engine.getCumulativeMetrics().getNumReadsSeen(), contigs.size() * numReadsPerContig);
    Assert.assertEquals(
        engine.getCumulativeMetrics().getNumIterations(), contigs.size() * numReadsPerContig);
  }
Esempio n. 2
0
  private void twoAddressesTest(
      String shard1Id, String shard2Id, int node1Weight, int node2Weight) {

    DataSource testDataSource =
        new DataSource() {
          public RedisConnection getConnection() {
            return null;
          }
        };

    TestNode node1 = new TestNode();
    TestNode node2 = new TestNode();

    expect(dataSourceFactoryMock.create("addr1", 1)).andReturn(testDataSource);
    expect(nodeFactoryMock.create(testDataSource)).andReturn(node1);

    expect(dataSourceFactoryMock.create("addr2", 2)).andReturn(testDataSource);
    expect(nodeFactoryMock.create(testDataSource)).andReturn(node2);

    replay(nodeFactoryMock, dataSourceFactoryMock);
    List<Shard<TestNode>> shards = new ArrayList<Shard<TestNode>>(shardsFactory.create());
    verify(nodeFactoryMock, dataSourceFactoryMock);

    Shard<TestNode> shard = shards.get(0);
    assertEquals(node1, shard.getNode());
    assertEquals(shard1Id, shard.getShardId());
    assertEquals(node1Weight, shard.getWeight());

    shard = shards.get(1);
    assertEquals(node2, shard.getNode());
    assertEquals(shard2Id, shard.getShardId());
    assertEquals(node2Weight, shard.getWeight());
  }
  /* (non-Javadoc)
   * @see org.apache.hadoop.mapred.MapReduceBase#configure(org.apache.hadoop.mapred.JobConf)
   */
  public void configure(JobConf job) {
    iconf = new IndexUpdateConfiguration(job);
    analyzer = (Analyzer) ReflectionUtils.newInstance(iconf.getDocumentAnalyzerClass(), job);

    localAnalysis =
        (ILocalAnalysis) ReflectionUtils.newInstance(iconf.getLocalAnalysisClass(), job);
    localAnalysis.configure(job);

    shards = Shard.getIndexShards(iconf);

    distributionPolicy =
        (IDistributionPolicy) ReflectionUtils.newInstance(iconf.getDistributionPolicyClass(), job);
    distributionPolicy.init(shards);

    LOG.info("sea.document.analyzer = " + analyzer.getClass().getName());
    LOG.info("sea.local.analysis = " + localAnalysis.getClass().getName());
    LOG.info(shards.length + " shards = " + iconf.getIndexShards());
    LOG.info("sea.distribution.policy = " + distributionPolicy.getClass().getName());
  }
  public Shard unmarshall(JsonUnmarshallerContext context) throws Exception {
    Shard shard = new Shard();

    int originalDepth = context.getCurrentDepth();
    String currentParentElement = context.getCurrentParentElement();
    int targetDepth = originalDepth + 1;

    JsonToken token = context.currentToken;
    if (token == null) token = context.nextToken();
    if (token == VALUE_NULL) return null;

    while (true) {
      if (token == null) break;

      if (token == FIELD_NAME || token == START_OBJECT) {
        if (context.testExpression("ShardId", targetDepth)) {
          context.nextToken();
          shard.setShardId(StringJsonUnmarshaller.getInstance().unmarshall(context));
        }
        if (context.testExpression("ParentShardId", targetDepth)) {
          context.nextToken();
          shard.setParentShardId(StringJsonUnmarshaller.getInstance().unmarshall(context));
        }
        if (context.testExpression("AdjacentParentShardId", targetDepth)) {
          context.nextToken();
          shard.setAdjacentParentShardId(StringJsonUnmarshaller.getInstance().unmarshall(context));
        }
        if (context.testExpression("HashKeyRange", targetDepth)) {
          context.nextToken();
          shard.setHashKeyRange(HashKeyRangeJsonUnmarshaller.getInstance().unmarshall(context));
        }
        if (context.testExpression("SequenceNumberRange", targetDepth)) {
          context.nextToken();
          shard.setSequenceNumberRange(
              SequenceNumberRangeJsonUnmarshaller.getInstance().unmarshall(context));
        }
      } else if (token == END_ARRAY || token == END_OBJECT) {
        if (context.getLastParsedParentElement() == null
            || context.getLastParsedParentElement().equals(currentParentElement)) {
          if (context.getCurrentDepth() <= originalDepth) break;
        }
      }

      token = context.nextToken();
    }

    return shard;
  }
    @SuppressWarnings("unchecked")
    public GeneratorT withInstance(int aValue) {
      instance.setInstance(aValue);

      return (GeneratorT) this;
    }
    @SuppressWarnings("unchecked")
    public GeneratorT withFloc(Floc aValue) {
      instance.setFloc(aValue);

      return (GeneratorT) this;
    }
Esempio n. 7
0
  private void initBuffer() throws IOException, InterruptedException, DataArchivedException {
    if (this.buf == null) {
      this.hlAdded = false;
      if (HashFunctionPool.max_hash_cluster > 1) {
        this.buf = ByteBuffer.wrap(new byte[Main.CHUNK_LENGTH]);

        final ArrayList<Shard> cks = new ArrayList<Shard>();
        int i = 0;
        // long fp = this.position;

        for (HashLocPair p : ar) {

          if (p.hashloc[1] != 0) {
            Shard sh = new Shard();
            sh.hash = p.hash;
            sh.hashloc = p.hashloc;
            sh.pos = p.pos;
            sh.nlen = p.nlen;
            sh.offset = p.offset;
            sh.len = p.len;
            sh.apos = i;
            cks.add(i, sh);
          } else break;
          i++;
        }
        sz = cks.size();
        AsyncChunkReadActionListener l =
            new AsyncChunkReadActionListener() {

              @Override
              public void commandException(Exception e) {
                SDFSLogger.getLog().error("error getting block", e);
                this.incrementAndGetDNEX();
                synchronized (this) {
                  this.notifyAll();
                }
              }

              @Override
              public void commandResponse(Shard result) {
                cks.get(result.apos).ck = result.ck;
                if (this.incrementandGetDN() >= sz) {

                  synchronized (this) {
                    this.notifyAll();
                  }
                }
              }

              @Override
              public void commandArchiveException(DataArchivedException e) {
                this.incrementAndGetDNEX();
                this.setDAR(e);

                synchronized (this) {
                  this.notifyAll();
                }
              }
            };
        for (Shard sh : cks) {
          sh.l = l;
          executor.execute(sh);
        }
        int wl = 0;
        int tm = 1000;
        int al = 0;
        while (l.getDN() < sz && l.getDNEX() == 0) {
          if (al == 30) {
            int nt = wl / 1000;
            SDFSLogger.getLog()
                .debug("Slow io, waited [" + nt + "] seconds for all reads to complete.");
            al = 0;
          }
          if (Main.readTimeoutSeconds > 0 && wl > (Main.writeTimeoutSeconds * tm)) {
            int nt = (tm * wl) / 1000;
            throw new IOException(
                "read Timed Out after ["
                    + nt
                    + "] seconds. Expected ["
                    + sz
                    + "] block read but only ["
                    + l.getDN()
                    + "] were completed");
          }
          if (l.getDAR() != null) {
            throw l.getDAR();
          }
          if (l.getDNEX() > 0) throw new IOException("error while reading data");
          synchronized (l) {
            l.wait(1000);
          }
          wl += 1000;
          al++;
        }
        if (l.getDAR() != null) {
          throw l.getDAR();
        }
        if (l.getDNEX() > 0) {
          throw new IOException("error while getting blocks " + l.getDNEX() + " errors found");
        }
        if (l.getDN() < sz) {
          throw new IOException("thread timed out before read was complete ");
        }
        buf.position(0);
        for (Shard sh : cks) {
          if (sh.pos == -1) {
            try {
              buf.put(sh.ck);
            } catch (Exception e) {
              // SDFSLogger.getLog().info("pos = " + this.position
              // + "ck sz=" + sh.ck.length + " hcb sz=" +
              // hcb.position() + " cks sz=" +cks.size() + " len="
              // + (hcb.position() +sh.ck.length));
              throw new IOException(e);
            }
          } else {
            try {

              buf.position(sh.pos);
              buf.put(sh.ck, sh.offset, sh.nlen);
            } catch (Exception e) {
              SDFSLogger.getLog()
                  .error(
                      "pos = "
                          + this.position
                          + " ck nlen="
                          + sh.nlen
                          + " ck offset="
                          + sh.offset
                          + " ck len="
                          + sh.ck.length
                          + " hcb pos="
                          + buf.position()
                          + " ck slen="
                          + sh.len
                          + " len="
                          + (buf.capacity()));
              throw new IOException(e);
            }
          }
        }

      } else {
        this.buf =
            ByteBuffer.wrap(HCServiceProxy.fetchChunk(this.ar.get(0).hash, this.ar.get(0).hashloc));
      }
    }
  }
Esempio n. 8
0
  public void cacheChunk() throws IOException, InterruptedException, DataArchivedException {
    if (this.buf == null) {
      this.hlAdded = false;
      if (HashFunctionPool.max_hash_cluster > 1) {

        final ArrayList<Shard> cks = new ArrayList<Shard>();
        int i = 0;
        // long fp = this.position;

        for (HashLocPair p : ar) {

          if (p.hashloc[1] != 0) {
            Shard sh = new Shard();
            sh.hash = p.hash;
            sh.hashloc = p.hashloc;
            sh.cache = true;
            sh.pos = p.pos;
            sh.nlen = p.nlen;
            sh.offset = p.offset;
            sh.len = p.len;
            sh.apos = i;
            cks.add(i, sh);
          } else break;
          i++;
        }
        sz = cks.size();
        AsyncChunkReadActionListener l =
            new AsyncChunkReadActionListener() {

              @Override
              public void commandException(Exception e) {
                this.incrementAndGetDNEX();
                synchronized (this) {
                  this.notifyAll();
                }
              }

              @Override
              public void commandResponse(Shard result) {
                cks.get(result.apos).ck = result.ck;
                if (this.incrementandGetDN() >= sz) {

                  synchronized (this) {
                    this.notifyAll();
                  }
                }
              }

              @Override
              public void commandArchiveException(DataArchivedException e) {
                this.incrementAndGetDNEX();
                this.setDAR(e);

                synchronized (this) {
                  this.notifyAll();
                }
              }
            };
        for (Shard sh : cks) {
          sh.l = l;
          executor.execute(sh);
        }
        int wl = 0;
        int al = 0;
        while (l.getDN() < sz && l.getDNEX() == 0) {
          if (al == 30) {
            int nt = wl / 1000;
            SDFSLogger.getLog()
                .debug("Slow io, waited [" + nt + "] seconds for all reads to complete.");
            al = 0;
          }

          if (l.getDAR() != null) {
            throw l.getDAR();
          }
          if (l.getDNEX() > 0) throw new IOException("error while reading data");
          synchronized (l) {
            l.wait(1000);
          }
          wl += 1000;
          al++;
        }
        if (l.getDAR() != null) {
          throw l.getDAR();
        }
        if (l.getDNEX() > 0) {
          throw new IOException("error while getting blocks " + l.getDNEX() + " errors found");
        }
        if (l.getDN() < sz) {
          throw new IOException("thread timed out before read was complete ");
        }
      }
    }
  }
 public void deleteNode(Shard realShard) {
   for (int i = 0; i < num_virtual; i++) {
     virtual_nodes.remove(hash.hash(realShard.toString() + "_" + i));
   }
 }
 public void addNode(Shard realShard) {
   for (int i = 0; i < num_virtual; i++) {
     virtual_nodes.put(hash.hash(realShard.toString() + "_" + i), realShard);
   }
 }