HistogramGenerator addRead(final GATKRead read) { final byte[] quals = (useOriginalQualities ? ReadUtils.getOriginalBaseQualities(read) : read.getBaseQualities()); if (quals == null) { return this; } final int length = quals.length; final boolean isReverseStrand = read.isReverseStrand(); ensureArraysBigEnough(length + 1); for (int i = 0; i < length; ++i) { final int cycle = isReverseStrand ? length - i : i + 1; if (read.isPaired() && read.isSecondOfPair()) { secondReadTotalsByCycle[cycle] += quals[i]; secondReadCountsByCycle[cycle] += 1; } else { firstReadTotalsByCycle[cycle] += quals[i]; firstReadCountsByCycle[cycle] += 1; } } return this; }
public void setRead(final GATKRead read) { if (!read.isEmpty()) { this.read = read; if (!read.isUnmapped()) loc = genomeLocParser.createGenomeLoc( read.getContig(), ReadUtils.getSoftStart(read), ReadUtils.getSoftEnd(read)); } }
@Override public void processElement(ProcessContext c) throws Exception { String dest = c.element(); logger.info("Saving to " + dest); Iterable<GATKRead> reads = c.sideInput(iterableView); OutputStream outputStream = BucketUtils.createFile(dest, c.getPipelineOptions()); try (SAMFileWriter writer = new SAMFileWriterFactory().makeBAMWriter(header, false, outputStream)) { for (GATKRead r : reads) { final SAMRecord sr = r.convertToSAMRecord(header); writer.addAlignment(sr); } } }
@Override public void recordValues( final GATKRead read, final SAMFileHeader header, final ReadCovariates values) { final SAMReadGroupRecord rg = ReadUtils.getSAMReadGroupRecord(read, header); final String readGroupId = getID(rg); final int key = keyForReadGroup(readGroupId); final int readLength = read.getLength(); for (int i = 0; i < readLength; i++) { values.addCovariate(key, key, key, i); } }
/** * Add a read to the manager * * @param read the read to add */ public void addRead(final GATKRead read) { if (read == null) throw new IllegalArgumentException("read added to manager is null, which is not allowed"); // if the new read is on a different contig or we have too many reads, then we need to flush the // queue and clear the map final boolean tooManyReads = getNReadsInQueue() >= MAX_RECORDS_IN_MEMORY; final boolean encounteredNewContig = getNReadsInQueue() > 0 && !waitingReads.peek().read.getContig().equals(read.getContig()); if (tooManyReads || encounteredNewContig) { if (DEBUG) logger.warn( "Flushing queue on " + (tooManyReads ? "too many reads" : ("move to new contig: " + read.getContig() + " from " + waitingReads.peek().read.getContig())) + " at " + read.getStart()); final int targetQueueSize = encounteredNewContig ? 0 : MAX_RECORDS_IN_MEMORY / 2; // write the required number of waiting reads to disk while (getNReadsInQueue() > targetQueueSize) writer.addRead(waitingReads.poll().read); } final SplitRead splitRead = new SplitRead(read); // fix overhangs, as needed for (final Splice splice : splices) fixSplit(splitRead, splice); // add the new read to the queue waitingReads.add(splitRead); }
@Test public void testPerReadAlleleLikelihoodMap() { final PerReadAlleleLikelihoodMap map = new PerReadAlleleLikelihoodMap(); final Allele alleleA = Allele.create("A"); final double lik = -1.0; // ignored final int[] MQs = {1, 2, 3, 4, 5, 6, 7, 8, 9, 10, QualityUtils.MAPPING_QUALITY_UNAVAILABLE}; final List<Integer> MQsList = Arrays.asList(ArrayUtils.toObject(MQs)); // MQ 255 are excluded from the calculations, we test it here. final List<Integer> MQsListOK = new ArrayList<>(MQsList); // NOTE: if we just call remove(i), Java thinks i is an index. // A workaround for this overloading bogosity to to call removeAll and pass a collection // (casting i to (Object) would work too but it's more error prone) MQsListOK.removeAll(Collections.singleton(QualityUtils.MAPPING_QUALITY_UNAVAILABLE)); final int n1A = MQs.length; for (int i = 0; i < n1A; i++) { final GATKRead read = ArtificialReadUtils.createArtificialRead(TextCigarCodec.decode("10M")); read.setMappingQuality(MQs[i]); map.add(read, alleleA, lik); } final Map<String, PerReadAlleleLikelihoodMap> perReadAlleleLikelihoodMap = Collections.singletonMap("sample1", map); final VariantContext vc = makeVC(); final ReferenceContext referenceContext = null; final Map<String, Object> annotate = new RMSMappingQuality().annotate(referenceContext, vc, perReadAlleleLikelihoodMap); Assert.assertEquals(annotate.size(), 1, "size"); Assert.assertEquals( annotate.keySet(), Collections.singleton(VCFConstants.RMS_MAPPING_QUALITY_KEY), "annots"); final double rms = MathUtils.rms(MQsListOK); // only those are MQ0 Assert.assertEquals( annotate.get(VCFConstants.RMS_MAPPING_QUALITY_KEY), String.format("%.2f", rms)); }