private Collection<VariantContext> getVariantContexts( RefMetaDataTracker tracker, ReferenceContext ref) { List<Feature> features = tracker.getValues(variants, ref.getLocus()); List<VariantContext> VCs = new ArrayList<VariantContext>(features.size()); for (Feature record : features) { if (VariantContextAdaptors.canBeConvertedToVariantContext(record)) { // we need to special case the HapMap format because indels aren't handled correctly if (record instanceof RawHapMapFeature) { // is it an indel? RawHapMapFeature hapmap = (RawHapMapFeature) record; if (hapmap.getAlleles()[0].equals(RawHapMapFeature.NULL_ALLELE_STRING) || hapmap.getAlleles()[1].equals(RawHapMapFeature.NULL_ALLELE_STRING)) { // get the dbsnp object corresponding to this record (needed to help us distinguish // between insertions and deletions) VariantContext dbsnpVC = getDbsnp(hapmap.getName()); if (dbsnpVC == null || dbsnpVC.isMixed()) continue; Map<String, Allele> alleleMap = new HashMap<String, Allele>(2); alleleMap.put( RawHapMapFeature.DELETION, Allele.create(ref.getBase(), dbsnpVC.isSimpleInsertion())); alleleMap.put( RawHapMapFeature.INSERTION, Allele.create( (char) ref.getBase() + ((RawHapMapFeature) record).getAlleles()[1], !dbsnpVC.isSimpleInsertion())); hapmap.setActualAlleles(alleleMap); // also, use the correct positioning for insertions hapmap.updatePosition(dbsnpVC.getStart()); if (hapmap.getStart() < ref.getWindow().getStart()) { logger.warn( "Hapmap record at " + ref.getLocus() + " represents an indel too large to be converted; skipping..."); continue; } } } // ok, we might actually be able to turn this record in a variant context VariantContext vc = VariantContextAdaptors.toVariantContext(variants.getName(), record, ref); if (vc != null) // sometimes the track has odd stuff in it that can't be converted VCs.add(vc); } } return VCs; }
public Event map(RefMetaDataTracker tracker, ReferenceContext ref, AlignmentContext context) { boolean hasIndel = false; boolean hasInsertion = false; boolean hasPointEvent = false; int furthestStopPos = -1; // look at the rods for indels or SNPs if (tracker != null) { for (VariantContext vc : tracker.getValues(known)) { switch (vc.getType()) { case INDEL: hasIndel = true; if (vc.isSimpleInsertion()) hasInsertion = true; break; case SNP: hasPointEvent = true; break; case MIXED: hasPointEvent = true; hasIndel = true; if (vc.isSimpleInsertion()) hasInsertion = true; break; default: break; } if (hasIndel) furthestStopPos = vc.getEnd(); } } // look at the normal context to get deletions and positions with high entropy final ReadBackedPileup pileup = context.getBasePileup(); int mismatchQualities = 0, totalQualities = 0; final byte refBase = ref.getBase(); for (PileupElement p : pileup) { // check the ends of the reads to see how far they extend furthestStopPos = Math.max(furthestStopPos, p.getRead().getAlignmentEnd()); // is it a deletion or insertion? if (p.isDeletion() || p.isBeforeInsertion()) { hasIndel = true; if (p.isBeforeInsertion()) hasInsertion = true; } // look for mismatches else if (lookForMismatchEntropy) { if (p.getBase() != refBase) mismatchQualities += p.getQual(); totalQualities += p.getQual(); } } // make sure we're supposed to look for high entropy if (lookForMismatchEntropy && pileup.getNumberOfElements() >= minReadsAtLocus && (double) mismatchQualities / (double) totalQualities >= mismatchThreshold) hasPointEvent = true; // return null if no event occurred if (!hasIndel && !hasPointEvent) return null; // return null if we didn't find any usable reads/rods associated with the event if (furthestStopPos == -1) return null; GenomeLoc eventLoc = context.getLocation(); if (hasInsertion) eventLoc = getToolkit() .getGenomeLocParser() .createGenomeLoc(eventLoc.getContig(), eventLoc.getStart(), eventLoc.getStart() + 1); EVENT_TYPE eventType = (hasIndel ? (hasPointEvent ? EVENT_TYPE.BOTH : EVENT_TYPE.INDEL_EVENT) : EVENT_TYPE.POINT_EVENT); return new Event(eventLoc, furthestStopPos, eventType); }